diff options
Diffstat (limited to 'spec')
288 files changed, 6532 insertions, 3530 deletions
diff --git a/spec/controllers/admin/instance_statistics_controller_spec.rb b/spec/controllers/admin/usage_trends_controller_spec.rb index c589e46857f..35fb005aacb 100644 --- a/spec/controllers/admin/instance_statistics_controller_spec.rb +++ b/spec/controllers/admin/usage_trends_controller_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Admin::InstanceStatisticsController do +RSpec.describe Admin::UsageTrendsController do let(:admin) { create(:user, :admin) } before do diff --git a/spec/controllers/projects/blob_controller_spec.rb b/spec/controllers/projects/blob_controller_spec.rb index 68551ce4858..c9a76049e19 100644 --- a/spec/controllers/projects/blob_controller_spec.rb +++ b/spec/controllers/projects/blob_controller_spec.rb @@ -20,8 +20,8 @@ RSpec.describe Projects::BlobController do project.add_maintainer(user) sign_in(user) - stub_experiment(ci_syntax_templates: experiment_active) - stub_experiment_for_subject(ci_syntax_templates: in_experiment_group) + stub_experiment(ci_syntax_templates_b: experiment_active) + stub_experiment_for_subject(ci_syntax_templates_b: in_experiment_group) end context 'when the experiment is not active' do @@ -35,48 +35,62 @@ RSpec.describe Projects::BlobController do end end - context 'when the experiment is active and the user is in the control group' do + context 'when the experiment is active' do let(:experiment_active) { true } - let(:in_experiment_group) { false } - - it 'records the experiment user in the control group' do - expect(Experiment).to receive(:add_user) - .with(:ci_syntax_templates, :control, user, namespace_id: project.namespace_id) - request - end - end + context 'when the user is in the control group' do + let(:in_experiment_group) { false } - context 'when the experiment is active and the user is in the experimental group' do - let(:experiment_active) { true } - let(:in_experiment_group) { true } - - it 'records the experiment user in the experimental group' do - expect(Experiment).to receive(:add_user) - .with(:ci_syntax_templates, :experimental, user, namespace_id: project.namespace_id) + it 'records the experiment user in the control group' do + expect(Experiment).to receive(:add_user) + .with(:ci_syntax_templates_b, :control, user, namespace_id: project.namespace_id) - request + request + end end - context 'when requesting a non default config file type' do - let(:file_name) { '.non_default_ci_config' } - let(:project) { create(:project, :public, :repository, ci_config_path: file_name) } + context 'when the user is in the experimental group' do + let(:in_experiment_group) { true } it 'records the experiment user in the experimental group' do expect(Experiment).to receive(:add_user) - .with(:ci_syntax_templates, :experimental, user, namespace_id: project.namespace_id) + .with(:ci_syntax_templates_b, :experimental, user, namespace_id: project.namespace_id) request end - end - context 'when requesting a different file type' do - let(:file_name) { '.gitignore' } + context 'when requesting a non default config file type' do + let(:file_name) { '.non_default_ci_config' } + let(:project) { create(:project, :public, :repository, ci_config_path: file_name) } - it 'does not record the experiment user' do - expect(Experiment).not_to receive(:add_user) + it 'records the experiment user in the experimental group' do + expect(Experiment).to receive(:add_user) + .with(:ci_syntax_templates_b, :experimental, user, namespace_id: project.namespace_id) - request + request + end + end + + context 'when requesting a different file type' do + let(:file_name) { '.gitignore' } + + it 'does not record the experiment user' do + expect(Experiment).not_to receive(:add_user) + + request + end + end + + context 'when the group is created longer than 90 days ago' do + before do + project.namespace.update_attribute(:created_at, 91.days.ago) + end + + it 'does not record the experiment user' do + expect(Experiment).not_to receive(:add_user) + + request + end end end end diff --git a/spec/controllers/projects/notes_controller_spec.rb b/spec/controllers/projects/notes_controller_spec.rb index edebaf294c4..add249e2c74 100644 --- a/spec/controllers/projects/notes_controller_spec.rb +++ b/spec/controllers/projects/notes_controller_spec.rb @@ -150,7 +150,7 @@ RSpec.describe Projects::NotesController do end it 'returns an empty page of notes' do - expect(Gitlab::EtagCaching::Middleware).to receive(:skip!) + expect(Gitlab::EtagCaching::Middleware).not_to receive(:skip!) request.headers['X-Last-Fetched-At'] = microseconds(Time.zone.now) @@ -169,8 +169,6 @@ RSpec.describe Projects::NotesController do end it 'returns all notes' do - expect(Gitlab::EtagCaching::Middleware).to receive(:skip!) - get :index, params: request_params expect(json_response['notes'].count).to eq((page_1 + page_2 + page_3).size + 1) @@ -764,49 +762,9 @@ RSpec.describe Projects::NotesController do end end - context 'when the endpoint receives requests above the limit' do - before do - stub_application_setting(notes_create_limit: 3) - end - - it 'prevents from creating more notes', :request_store do - 3.times { create! } - - expect { create! } - .to change { Gitlab::GitalyClient.get_request_count }.by(0) - - create! - expect(response.body).to eq(_('This endpoint has been requested too many times. Try again later.')) - expect(response).to have_gitlab_http_status(:too_many_requests) - end - - it 'logs the event in auth.log' do - attributes = { - message: 'Application_Rate_Limiter_Request', - env: :notes_create_request_limit, - remote_ip: '0.0.0.0', - request_method: 'POST', - path: "/#{project.full_path}/notes", - user_id: user.id, - username: user.username - } - - expect(Gitlab::AuthLogger).to receive(:error).with(attributes).once - - project.add_developer(user) - sign_in(user) - - 4.times { create! } - end - - it 'allows user in allow-list to create notes, even if the case is different' do - user.update_attribute(:username, user.username.titleize) - stub_application_setting(notes_create_limit_allowlist: ["#{user.username.downcase}"]) - 3.times { create! } - - create! - expect(response).to have_gitlab_http_status(:found) - end + it_behaves_like 'request exceeding rate limit', :clean_gitlab_redis_cache do + let(:params) { request_params.except(:format) } + let(:request_full_path) { project_notes_path(project) } end end diff --git a/spec/controllers/projects/snippets_controller_spec.rb b/spec/controllers/projects/snippets_controller_spec.rb index f9221c5a4ef..793ffbbfad9 100644 --- a/spec/controllers/projects/snippets_controller_spec.rb +++ b/spec/controllers/projects/snippets_controller_spec.rb @@ -207,14 +207,14 @@ RSpec.describe Projects::SnippetsController do subject expect(assigns(:snippet)).to eq(project_snippet) - expect(assigns(:blobs)).to eq(project_snippet.blobs) + expect(assigns(:blobs).map(&:name)).to eq(project_snippet.blobs.map(&:name)) expect(response).to have_gitlab_http_status(:ok) end it 'does not show the blobs expanded by default' do subject - expect(project_snippet.blobs.map(&:expanded?)).to be_all(false) + expect(assigns(:blobs).map(&:expanded?)).to be_all(false) end context 'when param expanded is set' do @@ -223,7 +223,7 @@ RSpec.describe Projects::SnippetsController do it 'shows all blobs expanded' do subject - expect(project_snippet.blobs.map(&:expanded?)).to be_all(true) + expect(assigns(:blobs).map(&:expanded?)).to be_all(true) end end end diff --git a/spec/controllers/projects/templates_controller_spec.rb b/spec/controllers/projects/templates_controller_spec.rb index fe282baf769..bd299efb5b5 100644 --- a/spec/controllers/projects/templates_controller_spec.rb +++ b/spec/controllers/projects/templates_controller_spec.rb @@ -160,13 +160,28 @@ RSpec.describe Projects::TemplatesController do end shared_examples 'template names request' do - it 'returns the template names' do - get(:names, params: { namespace_id: project.namespace, template_type: template_type, project_id: project }, format: :json) + context 'when feature flag enabled' do + it 'returns the template names', :aggregate_failures do + get(:names, params: { namespace_id: project.namespace, template_type: template_type, project_id: project }, format: :json) - expect(response).to have_gitlab_http_status(:ok) - expect(json_response.size).to eq(2) - expect(json_response.size).to eq(2) - expect(json_response.map { |x| x.slice('name') }).to match(expected_template_names) + expect(response).to have_gitlab_http_status(:ok) + expect(json_response['Project Templates'].size).to eq(2) + expect(json_response['Project Templates'].map { |x| x.slice('name') }).to match(expected_template_names) + end + end + + context 'when feature flag disabled' do + before do + stub_feature_flags(inherited_issuable_templates: false) + end + + it 'returns the template names', :aggregate_failures do + get(:names, params: { namespace_id: project.namespace, template_type: template_type, project_id: project }, format: :json) + + expect(response).to have_gitlab_http_status(:ok) + expect(json_response.size).to eq(2) + expect(json_response.map { |x| x.slice('name') }).to match(expected_template_names) + end end it 'fails for user with no access' do diff --git a/spec/controllers/projects/web_ide_schemas_controller_spec.rb b/spec/controllers/projects/web_ide_schemas_controller_spec.rb index fbec941aecc..136edd2f7ad 100644 --- a/spec/controllers/projects/web_ide_schemas_controller_spec.rb +++ b/spec/controllers/projects/web_ide_schemas_controller_spec.rb @@ -53,13 +53,13 @@ RSpec.describe Projects::WebIdeSchemasController do end context 'when an error occurs parsing the schema' do - let(:result) { { status: :error, message: 'Some error occured' } } + let(:result) { { status: :error, message: 'Some error occurred' } } it 'returns 422 with the error' do subject expect(response).to have_gitlab_http_status(:unprocessable_entity) - expect(response.body).to eq('{"status":"error","message":"Some error occured"}') + expect(response.body).to eq('{"status":"error","message":"Some error occurred"}') end end end diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb index 1e4ec48b119..281b69af605 100644 --- a/spec/controllers/projects_controller_spec.rb +++ b/spec/controllers/projects_controller_spec.rb @@ -221,6 +221,20 @@ RSpec.describe ProjectsController do allow(controller).to receive(:record_experiment_user) end + context 'when user can push to default branch' do + let(:user) { empty_project.owner } + + it 'creates an "view_project_show" experiment tracking event', :snowplow do + allow_next_instance_of(ApplicationExperiment) do |e| + allow(e).to receive(:should_track?).and_return(true) + end + + get :show, params: { namespace_id: empty_project.namespace, id: empty_project } + + expect_snowplow_event(category: 'empty_repo_upload', action: 'view_project_show', context: [{ schema: 'iglu:com.gitlab/gitlab_experiment/jsonschema/0-3-0', data: anything }]) + end + end + User.project_views.keys.each do |project_view| context "with #{project_view} view set" do before do @@ -416,7 +430,8 @@ RSpec.describe ProjectsController do path: 'foo', description: 'bar', namespace_id: user.namespace.id, - visibility_level: Gitlab::VisibilityLevel::PUBLIC + visibility_level: Gitlab::VisibilityLevel::PUBLIC, + initialize_with_readme: 1 } end @@ -425,9 +440,11 @@ RSpec.describe ProjectsController do end it 'tracks a created event for the new_project_readme experiment', :experiment do - expect(experiment(:new_project_readme)).to track(:created, property: 'blank').on_any_instance.with_context( - actor: user - ) + expect(experiment(:new_project_readme)).to track( + :created, + property: 'blank', + value: 1 + ).on_any_instance.with_context(actor: user) post :create, params: { project: project_params } end diff --git a/spec/controllers/root_controller_spec.rb b/spec/controllers/root_controller_spec.rb index 85f9ea66c5f..7622f82afa7 100644 --- a/spec/controllers/root_controller_spec.rb +++ b/spec/controllers/root_controller_spec.rb @@ -123,11 +123,7 @@ RSpec.describe RootController do expect(response).to render_template 'dashboard/projects/index' end - context 'when experiment is enabled' do - before do - stub_experiment_for_subject(customize_homepage: true) - end - + context 'when customize_homepage is enabled' do it 'renders the default dashboard' do get :index @@ -135,9 +131,9 @@ RSpec.describe RootController do end end - context 'when experiment not enabled' do + context 'when customize_homepage is not enabled' do before do - stub_experiment(customize_homepage: false) + stub_feature_flags(customize_homepage: false) end it 'renders the default dashboard' do diff --git a/spec/controllers/snippets/notes_controller_spec.rb b/spec/controllers/snippets/notes_controller_spec.rb index 487635169fc..558e68fbb8f 100644 --- a/spec/controllers/snippets/notes_controller_spec.rb +++ b/spec/controllers/snippets/notes_controller_spec.rb @@ -141,6 +141,11 @@ RSpec.describe Snippets::NotesController do it 'creates the note' do expect { post :create, params: request_params }.to change { Note.count }.by(1) end + + it_behaves_like 'request exceeding rate limit', :clean_gitlab_redis_cache do + let(:params) { request_params } + let(:request_full_path) { snippet_notes_path(public_snippet) } + end end context 'when a snippet is internal' do @@ -164,6 +169,11 @@ RSpec.describe Snippets::NotesController do it 'creates the note' do expect { post :create, params: request_params }.to change { Note.count }.by(1) end + + it_behaves_like 'request exceeding rate limit', :clean_gitlab_redis_cache do + let(:params) { request_params } + let(:request_full_path) { snippet_notes_path(internal_snippet) } + end end context 'when a snippet is private' do @@ -228,6 +238,12 @@ RSpec.describe Snippets::NotesController do it 'creates the note' do expect { post :create, params: request_params }.to change { Note.count }.by(1) end + + it_behaves_like 'request exceeding rate limit', :clean_gitlab_redis_cache do + let(:params) { request_params } + let(:request_full_path) { snippet_notes_path(private_snippet) } + let(:user) { private_snippet.author } + end end end end diff --git a/spec/factories/analytics/instance_statistics/measurement.rb b/spec/factories/analytics/usage_trends/measurement.rb index f9398cd3061..ec80174e967 100644 --- a/spec/factories/analytics/instance_statistics/measurement.rb +++ b/spec/factories/analytics/usage_trends/measurement.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true FactoryBot.define do - factory :instance_statistics_measurement, class: 'Analytics::InstanceStatistics::Measurement' do + factory :usage_trends_measurement, class: 'Analytics::UsageTrends::Measurement' do recorded_at { Time.now } identifier { :projects } count { 1_000 } diff --git a/spec/factories/custom_emoji.rb b/spec/factories/custom_emoji.rb index ba1ae11c18d..88e50eafa7c 100644 --- a/spec/factories/custom_emoji.rb +++ b/spec/factories/custom_emoji.rb @@ -6,5 +6,6 @@ FactoryBot.define do namespace group file { 'https://gitlab.com/images/partyparrot.png' } + creator { namespace.owner } end end diff --git a/spec/factories/iteration_cadences.rb b/spec/factories/iteration_cadences.rb new file mode 100644 index 00000000000..b36f15e3dd4 --- /dev/null +++ b/spec/factories/iteration_cadences.rb @@ -0,0 +1,13 @@ +# frozen_string_literal: true + +FactoryBot.define do + sequence(:cadence_sequential_date) do |n| + n.days.from_now + end + + factory :iterations_cadence, class: 'Iterations::Cadence' do + title + group + start_date { generate(:cadence_sequential_date) } + end +end diff --git a/spec/factories_spec.rb b/spec/factories_spec.rb index 38ade20de28..c381fb82ba0 100644 --- a/spec/factories_spec.rb +++ b/spec/factories_spec.rb @@ -5,6 +5,41 @@ require 'spec_helper' RSpec.describe 'factories' do include Database::DatabaseHelpers + def skipped_traits + [ + [:alert_management_alert, :with_ended_at], + [:audit_event, :unauthenticated], + [:ci_build_trace_chunk, :fog_with_data], + [:ci_job_artifact, :remote_store], + [:ci_job_artifact, :raw], + [:ci_job_artifact, :gzip], + [:ci_job_artifact, :correct_checksum], + [:design_version, :empty], + [:environment, :non_playable], + [:go_module_commit, :files], + [:go_module_commit, :package], + [:go_module_version, :pseudo], + [:composer_cache_file, :object_storage], + [:debian_project_component_file, :object_storage], + [:debian_project_distribution, :object_storage], + [:debian_file_metadatum, :unknown], + [:package_file, :object_storage], + [:pages_domain, :without_certificate], + [:pages_domain, :without_key], + [:pages_domain, :with_missing_chain], + [:pages_domain, :with_trusted_chain], + [:pages_domain, :with_trusted_expired_chain], + [:pages_domain, :explicit_ecdsa], + [:project_member, :blocked], + [:project, :remote_mirror], + [:prometheus_alert_event, :none], + [:remote_mirror, :ssh], + [:self_managed_prometheus_alert_event, :resolved], + [:self_managed_prometheus_alert_event, :none], + [:user_preference, :only_comments] + ] + end + shared_examples 'factory' do |factory| describe "#{factory.name} factory" do it 'does not raise error when built' do @@ -16,8 +51,10 @@ RSpec.describe 'factories' do end factory.definition.defined_traits.map(&:name).each do |trait_name| - describe "linting #{trait_name} trait" do - skip 'does not raise error when created' do + describe "linting :#{trait_name} trait" do + it 'does not raise error when created' do + pending("Trait skipped linting due to legacy error") if skipped_traits.include?([factory.name, trait_name.to_sym]) + expect { create(factory.name, trait_name) }.not_to raise_error end end @@ -29,9 +66,21 @@ RSpec.describe 'factories' do # and reuse them in other factories. # # However, for some factories we cannot use FactoryDefault because the - # associations must be unique and cannot be reused. + # associations must be unique and cannot be reused, or the factory default + # is being mutated. skip_factory_defaults = %i[ fork_network_member + group_member + import_state + namespace + project_broken_repo + prometheus_alert + prometheus_alert_event + prometheus_metric + self_managed_prometheus_alert_event + users_star_project + wiki_page + wiki_page_meta ].to_set.freeze # Some factories and their corresponding models are based on @@ -46,9 +95,9 @@ RSpec.describe 'factories' do .partition { |factory| skip_factory_defaults.include?(factory.name) } context 'with factory defaults', factory_default: :keep do - let_it_be(:namespace) { create_default(:namespace) } - let_it_be(:project) { create_default(:project, :repository) } - let_it_be(:user) { create_default(:user) } + let_it_be(:namespace) { create_default(:namespace).freeze } + let_it_be(:project) { create_default(:project, :repository).freeze } + let_it_be(:user) { create_default(:user).freeze } before do factories_based_on_view.each do |factory| diff --git a/spec/features/commit_spec.rb b/spec/features/commit_spec.rb index 02754cc803e..80a30ab01b2 100644 --- a/spec/features/commit_spec.rb +++ b/spec/features/commit_spec.rb @@ -35,9 +35,8 @@ RSpec.describe 'Commit' do end end - context "pagination enabled" do + describe "pagination" do before do - stub_feature_flags(paginate_commit_view: true) stub_const("Projects::CommitController::COMMIT_DIFFS_PER_PAGE", 1) visit project_commit_path(project, commit) @@ -61,18 +60,5 @@ RSpec.describe 'Commit' do expect(page).to have_selector(".files ##{files[1].file_hash}") end end - - context "pagination disabled" do - before do - stub_feature_flags(paginate_commit_view: false) - - visit project_commit_path(project, commit) - end - - it "shows both diffs on the page" do - expect(page).to have_selector(".files ##{files[0].file_hash}") - expect(page).to have_selector(".files ##{files[1].file_hash}") - end - end end end diff --git a/spec/features/groups/members/manage_members_spec.rb b/spec/features/groups/members/manage_members_spec.rb index c27d0afba6f..3b637a10abe 100644 --- a/spec/features/groups/members/manage_members_spec.rb +++ b/spec/features/groups/members/manage_members_spec.rb @@ -15,7 +15,7 @@ RSpec.describe 'Groups > Members > Manage members' do sign_in(user1) end - shared_examples 'includes the correct Invite Members link' do |should_include, should_not_include| + shared_examples 'includes the correct Invite link' do |should_include, should_not_include| it 'includes either the form or the modal trigger' do group.add_owner(user1) @@ -31,15 +31,13 @@ RSpec.describe 'Groups > Members > Manage members' do stub_feature_flags(invite_members_group_modal: true) end - it_behaves_like 'includes the correct Invite Members link', '.js-invite-members-trigger', '.invite-users-form' + it_behaves_like 'includes the correct Invite link', '.js-invite-members-trigger', '.invite-users-form' + it_behaves_like 'includes the correct Invite link', '.js-invite-group-trigger', '.invite-group-form' end context 'when Invite Members modal is disabled' do - before do - stub_feature_flags(invite_members_group_modal: false) - end - - it_behaves_like 'includes the correct Invite Members link', '.invite-users-form', '.js-invite-members-trigger' + it_behaves_like 'includes the correct Invite link', '.invite-users-form', '.js-invite-members-trigger' + it_behaves_like 'includes the correct Invite link', '.invite-group-form', '.js-invite-group-trigger' end it 'update user to owner level', :js do diff --git a/spec/features/groups/settings/user_searches_in_settings_spec.rb b/spec/features/groups/settings/user_searches_in_settings_spec.rb new file mode 100644 index 00000000000..819d0c4faba --- /dev/null +++ b/spec/features/groups/settings/user_searches_in_settings_spec.rb @@ -0,0 +1,36 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'User searches group settings', :js do + let_it_be(:user) { create(:user) } + let_it_be(:group) { create(:group) } + let_it_be(:project) { create(:project, :repository, namespace: group) } + + before do + group.add_owner(user) + sign_in(user) + end + + context 'in general settings page' do + let(:visit_path) { edit_group_path(group) } + + it_behaves_like 'can search settings with feature flag check', 'Naming', 'Permissions' + end + + context 'in Repository page' do + before do + visit group_settings_repository_path(group) + end + + it_behaves_like 'can search settings', 'Deploy tokens', 'Default initial branch name' + end + + context 'in CI/CD page' do + before do + visit group_settings_ci_cd_path(group) + end + + it_behaves_like 'can search settings', 'Variables', 'Runners' + end +end diff --git a/spec/features/issues/gfm_autocomplete_spec.rb b/spec/features/issues/gfm_autocomplete_spec.rb index e2087868035..eb3d0f223db 100644 --- a/spec/features/issues/gfm_autocomplete_spec.rb +++ b/spec/features/issues/gfm_autocomplete_spec.rb @@ -994,7 +994,7 @@ RSpec.describe 'GFM autocomplete', :js do end def start_and_cancel_discussion - click_button('Reply...') + find_field('Reply…').click fill_in('note_note', with: 'Whoops!') diff --git a/spec/features/merge_request/batch_comments_spec.rb b/spec/features/merge_request/batch_comments_spec.rb index c8fc23bebf9..25f2707146d 100644 --- a/spec/features/merge_request/batch_comments_spec.rb +++ b/spec/features/merge_request/batch_comments_spec.rb @@ -223,7 +223,7 @@ end def write_reply_to_discussion(button_text: 'Start a review', text: 'Line is wrong', resolve: false, unresolve: false) page.within(first('.diff-files-holder .discussion-reply-holder')) do - click_button('Reply...') + find_field('Reply…', match: :first).click fill_in('note_note', with: text) diff --git a/spec/features/merge_request/user_posts_diff_notes_spec.rb b/spec/features/merge_request/user_posts_diff_notes_spec.rb index 794dfd7c8da..163ce10132e 100644 --- a/spec/features/merge_request/user_posts_diff_notes_spec.rb +++ b/spec/features/merge_request/user_posts_diff_notes_spec.rb @@ -192,7 +192,7 @@ RSpec.describe 'Merge request > User posts diff notes', :js do it 'adds as discussion' do should_allow_commenting(find('[id="6eb14e00385d2fb284765eb1cd8d420d33d63fc9_22_22"]'), asset_form_reset: false) expect(page).to have_css('.notes_holder .note.note-discussion', count: 1) - expect(page).to have_button('Reply...') + expect(page).to have_field('Reply…') end end end diff --git a/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb b/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb index b86586d53e2..caa04059469 100644 --- a/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb +++ b/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb @@ -149,7 +149,7 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do it 'allows user to comment' do page.within '.diff-content' do - click_button 'Reply...' + find_field('Reply…').click find(".js-unresolve-checkbox").set false find('.js-note-text').set 'testing' @@ -179,7 +179,7 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do it 'allows user to comment & unresolve thread' do page.within '.diff-content' do - click_button 'Reply...' + find_field('Reply…').click find('.js-note-text').set 'testing' @@ -208,7 +208,7 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do it 'allows user to comment & resolve thread' do page.within '.diff-content' do - click_button 'Reply...' + find_field('Reply…').click find('.js-note-text').set 'testing' @@ -442,7 +442,7 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do it 'allows user to comment & resolve thread' do page.within '.diff-content' do - click_button 'Reply...' + find_field('Reply…').click find('.js-note-text').set 'testing' @@ -461,7 +461,7 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do page.within '.diff-content' do click_button 'Resolve thread' - click_button 'Reply...' + find_field('Reply…').click find('.js-note-text').set 'testing' diff --git a/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb b/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb index d15d5b3bc73..90cdc28d1bd 100644 --- a/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb +++ b/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb @@ -37,7 +37,7 @@ RSpec.describe 'Merge request > User sees avatars on diff notes', :js do end it 'does not render avatars after commenting on discussion tab' do - click_button 'Reply...' + find_field('Reply…').click page.within('.js-discussion-note-form') do find('.note-textarea').native.send_keys('Test comment') @@ -132,7 +132,7 @@ RSpec.describe 'Merge request > User sees avatars on diff notes', :js do end it 'adds avatar when commenting' do - click_button 'Reply...' + find_field('Reply…', match: :first).click page.within '.js-discussion-note-form' do find('.js-note-text').native.send_keys('Test') @@ -151,7 +151,7 @@ RSpec.describe 'Merge request > User sees avatars on diff notes', :js do it 'adds multiple comments' do 3.times do - click_button 'Reply...' + find_field('Reply…', match: :first).click page.within '.js-discussion-note-form' do find('.js-note-text').native.send_keys('Test') diff --git a/spec/features/merge_request/user_sees_discussions_spec.rb b/spec/features/merge_request/user_sees_discussions_spec.rb index 289c861739f..d79763ba5e0 100644 --- a/spec/features/merge_request/user_sees_discussions_spec.rb +++ b/spec/features/merge_request/user_sees_discussions_spec.rb @@ -60,7 +60,7 @@ RSpec.describe 'Merge request > User sees threads', :js do it 'can be replied to' do within(".discussion[data-discussion-id='#{discussion_id}']") do - click_button 'Reply...' + find_field('Reply…').click fill_in 'note[note]', with: 'Test!' click_button 'Comment' diff --git a/spec/features/merge_request/user_sees_notes_from_forked_project_spec.rb b/spec/features/merge_request/user_sees_notes_from_forked_project_spec.rb index 20c45a1d652..ea46ae06329 100644 --- a/spec/features/merge_request/user_sees_notes_from_forked_project_spec.rb +++ b/spec/features/merge_request/user_sees_notes_from_forked_project_spec.rb @@ -27,7 +27,7 @@ RSpec.describe 'Merge request > User sees notes from forked project', :js do expect(page).to have_content('A commit comment') page.within('.discussion-notes') do - find('.btn-text-field').click + find_field('Reply…').click scroll_to(page.find('#note_note', visible: false)) find('#note_note').send_keys('A reply comment') find('.js-comment-button').click diff --git a/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb b/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb index bbeb91bbd19..dbc88d0cce2 100644 --- a/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb +++ b/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb @@ -83,7 +83,7 @@ RSpec.describe 'User comments on a diff', :js do wait_for_requests - click_button 'Reply...' + find_field('Reply…', match: :first).click find('.js-suggestion-btn').click diff --git a/spec/features/projects/files/gitlab_ci_syntax_yml_dropdown_spec.rb b/spec/features/projects/files/gitlab_ci_syntax_yml_dropdown_spec.rb index ca6f03472dd..cd796d45aba 100644 --- a/spec/features/projects/files/gitlab_ci_syntax_yml_dropdown_spec.rb +++ b/spec/features/projects/files/gitlab_ci_syntax_yml_dropdown_spec.rb @@ -5,11 +5,13 @@ require 'spec_helper' RSpec.describe 'Projects > Files > User wants to add a .gitlab-ci.yml file' do include Spec::Support::Helpers::Features::EditorLiteSpecHelpers + let_it_be(:namespace) { create(:namespace) } + let(:project) { create(:project, :repository, namespace: namespace) } + before do - project = create(:project, :repository) sign_in project.owner - stub_experiment(ci_syntax_templates: experiment_active) - stub_experiment_for_subject(ci_syntax_templates: in_experiment_group) + stub_experiment(ci_syntax_templates_b: experiment_active) + stub_experiment_for_subject(ci_syntax_templates_b: in_experiment_group) visit project_new_blob_path(project, 'master', file_name: '.gitlab-ci.yml') end @@ -23,35 +25,45 @@ RSpec.describe 'Projects > Files > User wants to add a .gitlab-ci.yml file' do end end - context 'when experiment is active and the user is in the control group' do + context 'when experiment is active' do let(:experiment_active) { true } - let(:in_experiment_group) { false } - it 'does not show the "Learn CI/CD syntax" template dropdown' do - expect(page).not_to have_css('.gitlab-ci-syntax-yml-selector') + context 'when the user is in the control group' do + let(:in_experiment_group) { false } + + it 'does not show the "Learn CI/CD syntax" template dropdown' do + expect(page).not_to have_css('.gitlab-ci-syntax-yml-selector') + end end - end - context 'when experiment is active and the user is in the experimental group' do - let(:experiment_active) { true } - let(:in_experiment_group) { true } + context 'when the user is in the experimental group' do + let(:in_experiment_group) { true } + + it 'allows the user to pick a "Learn CI/CD syntax" template from the dropdown', :js do + expect(page).to have_css('.gitlab-ci-syntax-yml-selector') - it 'allows the user to pick a "Learn CI/CD syntax" template from the dropdown', :js do - expect(page).to have_css('.gitlab-ci-syntax-yml-selector') + find('.js-gitlab-ci-syntax-yml-selector').click - find('.js-gitlab-ci-syntax-yml-selector').click + wait_for_requests - wait_for_requests + within '.gitlab-ci-syntax-yml-selector' do + find('.dropdown-input-field').set('Artifacts example') + find('.dropdown-content .is-focused', text: 'Artifacts example').click + end - within '.gitlab-ci-syntax-yml-selector' do - find('.dropdown-input-field').set('Artifacts example') - find('.dropdown-content .is-focused', text: 'Artifacts example').click + wait_for_requests + + expect(page).to have_css('.gitlab-ci-syntax-yml-selector .dropdown-toggle-text', text: 'Learn CI/CD syntax') + expect(editor_get_value).to have_content('You can use artifacts to pass data to jobs in later stages.') end - wait_for_requests + context 'when the group is created longer than 90 days ago' do + let(:namespace) { create(:namespace, created_at: 91.days.ago) } - expect(page).to have_css('.gitlab-ci-syntax-yml-selector .dropdown-toggle-text', text: 'Learn CI/CD syntax') - expect(editor_get_value).to have_content('You can use artifacts to pass data to jobs in later stages.') + it 'does not show the "Learn CI/CD syntax" template dropdown' do + expect(page).not_to have_css('.gitlab-ci-syntax-yml-selector') + end + end end end end diff --git a/spec/features/projects/pages/user_edits_settings_spec.rb b/spec/features/projects/pages/user_edits_settings_spec.rb index 3649fae17ce..6156b5243de 100644 --- a/spec/features/projects/pages/user_edits_settings_spec.rb +++ b/spec/features/projects/pages/user_edits_settings_spec.rb @@ -140,7 +140,7 @@ RSpec.describe 'Pages edits pages settings', :js do before do allow(Projects::UpdateService).to receive(:new).and_return(service) - allow(service).to receive(:execute).and_return(status: :error, message: 'Some error has occured') + allow(service).to receive(:execute).and_return(status: :error, message: 'Some error has occurred') end it 'tries to change the setting' do @@ -150,7 +150,7 @@ RSpec.describe 'Pages edits pages settings', :js do click_button 'Save' - expect(page).to have_text('Some error has occured') + expect(page).to have_text('Some error has occurred') end end diff --git a/spec/features/projects/services/user_activates_jira_spec.rb b/spec/features/projects/services/user_activates_jira_spec.rb index 85afc54be48..3b8032e1d0d 100644 --- a/spec/features/projects/services/user_activates_jira_spec.rb +++ b/spec/features/projects/services/user_activates_jira_spec.rb @@ -6,12 +6,14 @@ RSpec.describe 'User activates Jira', :js do include_context 'project service activation' include_context 'project service Jira context' + before do + server_info = { key: 'value' }.to_json + stub_request(:get, test_url).to_return(body: server_info) + end + describe 'user tests Jira Service' do context 'when Jira connection test succeeds' do before do - server_info = { key: 'value' }.to_json - stub_request(:get, test_url).with(basic_auth: %w(username password)).to_return(body: server_info) - visit_project_integration('Jira') fill_form click_test_then_save_integration(expect_test_to_fail: false) @@ -81,4 +83,40 @@ RSpec.describe 'User activates Jira', :js do end end end + + describe 'issue transition settings' do + it 'shows validation errors' do + visit_project_integration('Jira') + + expect(page).to have_field('Move to Done', checked: true) + + fill_form + choose 'Use custom transitions' + click_save_integration + + within '[data-testid="issue-transition-settings"]' do + expect(page).to have_content('This field is required.') + end + + fill_in 'service[jira_issue_transition_id]', with: '1, 2, 3' + click_save_integration + + expect(page).to have_content('Jira settings saved and active.') + expect(project.reload.jira_service.jira_issue_transition_id).to eq('1, 2, 3') + end + + it 'clears the transition IDs when using automatic transitions' do + create(:jira_service, project: project, jira_issue_transition_id: '1, 2, 3') + visit_project_integration('Jira') + + expect(page).to have_field('Use custom transitions', checked: true) + expect(page).to have_field('service[jira_issue_transition_id]', with: '1, 2, 3') + + choose 'Move to Done' + click_save_integration + + expect(page).to have_content('Jira settings saved and active.') + expect(project.reload.jira_service.jira_issue_transition_id).to eq('') + end + end end diff --git a/spec/features/projects/settings/service_desk_setting_spec.rb b/spec/features/projects/settings/service_desk_setting_spec.rb index d31913d2dcf..50451075db5 100644 --- a/spec/features/projects/settings/service_desk_setting_spec.rb +++ b/spec/features/projects/settings/service_desk_setting_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe 'Service Desk Setting', :js do +RSpec.describe 'Service Desk Setting', :js, :clean_gitlab_redis_cache do let(:project) { create(:project_empty_repo, :private, service_desk_enabled: false) } let(:presenter) { project.present(current_user: user) } let(:user) { create(:user) } @@ -66,5 +66,48 @@ RSpec.describe 'Service Desk Setting', :js do expect(find('[data-testid="incoming-email"]').value).to eq('address-suffix@example.com') end + + context 'issue description templates' do + let_it_be(:issuable_project_template_files) do + { + '.gitlab/issue_templates/project-issue-bar.md' => 'Project Issue Template Bar', + '.gitlab/issue_templates/project-issue-foo.md' => 'Project Issue Template Foo' + } + end + + let_it_be(:issuable_group_template_files) do + { + '.gitlab/issue_templates/group-issue-bar.md' => 'Group Issue Template Bar', + '.gitlab/issue_templates/group-issue-foo.md' => 'Group Issue Template Foo' + } + end + + let_it_be_with_reload(:group) { create(:group)} + let_it_be_with_reload(:project) { create(:project, :custom_repo, group: group, files: issuable_project_template_files) } + let_it_be(:group_template_repo) { create(:project, :custom_repo, group: group, files: issuable_group_template_files) } + + before do + stub_licensed_features(custom_file_templates_for_namespace: false, custom_file_templates: false) + group.update_columns(file_template_project_id: group_template_repo.id) + end + + context 'when inherited_issuable_templates enabled' do + before do + stub_feature_flags(inherited_issuable_templates: true) + visit edit_project_path(project) + end + + it_behaves_like 'issue description templates from current project only' + end + + context 'when inherited_issuable_templates disabled' do + before do + stub_feature_flags(inherited_issuable_templates: false) + visit edit_project_path(project) + end + + it_behaves_like 'issue description templates from current project only' + end + end end end diff --git a/spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb b/spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb index e8e32d93f7b..397c334a2b8 100644 --- a/spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb +++ b/spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb @@ -133,7 +133,7 @@ RSpec.describe 'Projects > Settings > User manages merge request settings' do it 'when unchecked sets :remove_source_branch_after_merge to false' do uncheck('project_remove_source_branch_after_merge') within('.merge-request-settings-form') do - find('.qa-save-merge-request-changes') + find('.rspec-save-merge-request-changes') click_on('Save changes') end @@ -157,7 +157,7 @@ RSpec.describe 'Projects > Settings > User manages merge request settings' do choose('project_project_setting_attributes_squash_option_default_on') within('.merge-request-settings-form') do - find('.qa-save-merge-request-changes') + find('.rspec-save-merge-request-changes') click_on('Save changes') end @@ -172,7 +172,7 @@ RSpec.describe 'Projects > Settings > User manages merge request settings' do choose('project_project_setting_attributes_squash_option_always') within('.merge-request-settings-form') do - find('.qa-save-merge-request-changes') + find('.rspec-save-merge-request-changes') click_on('Save changes') end @@ -187,7 +187,7 @@ RSpec.describe 'Projects > Settings > User manages merge request settings' do choose('project_project_setting_attributes_squash_option_never') within('.merge-request-settings-form') do - find('.qa-save-merge-request-changes') + find('.rspec-save-merge-request-changes') click_on('Save changes') end diff --git a/spec/features/projects/settings/user_searches_in_settings_spec.rb b/spec/features/projects/settings/user_searches_in_settings_spec.rb new file mode 100644 index 00000000000..563dff615d6 --- /dev/null +++ b/spec/features/projects/settings/user_searches_in_settings_spec.rb @@ -0,0 +1,42 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'User searches project settings', :js do + let_it_be(:user) { create(:user) } + let_it_be(:project) { create(:project, :repository, namespace: user.namespace) } + + before do + sign_in(user) + end + + context 'in general settings page' do + let(:visit_path) { edit_project_path(project) } + + it_behaves_like 'can search settings with feature flag check', 'Naming', 'Visibility' + end + + context 'in Repository page' do + before do + visit project_settings_repository_path(project) + end + + it_behaves_like 'can search settings', 'Deploy keys', 'Mirroring repositories' + end + + context 'in CI/CD page' do + before do + visit project_settings_ci_cd_path(project) + end + + it_behaves_like 'can search settings', 'General pipelines', 'Auto DevOps' + end + + context 'in Operations page' do + before do + visit project_settings_operations_path(project) + end + + it_behaves_like 'can search settings', 'Alerts', 'Incidents' + end +end diff --git a/spec/features/projects/show/user_uploads_files_spec.rb b/spec/features/projects/show/user_uploads_files_spec.rb index 053598a528e..b7c5d324d93 100644 --- a/spec/features/projects/show/user_uploads_files_spec.rb +++ b/spec/features/projects/show/user_uploads_files_spec.rb @@ -33,4 +33,34 @@ RSpec.describe 'Projects > Show > User uploads files' do include_examples 'it uploads and commit a new file to a forked project' end + + context 'with an empty repo' do + let(:project) { create(:project, :empty_repo, creator: user) } + + context 'when in the empty_repo_upload experiment' do + before do + stub_experiments(empty_repo_upload: :candidate) + + visit(project_path(project)) + end + + it 'uploads and commits a new text file', :js do + click_link('Upload file') + + drop_in_dropzone(File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt')) + + page.within('#modal-upload-blob') do + fill_in(:commit_message, with: 'New commit message') + end + + click_button('Upload file') + + wait_for_requests + + expect(page).to have_content('New commit message') + expect(page).to have_content('Lorem ipsum dolor sit amet') + expect(page).to have_content('Sed ut perspiciatis unde omnis') + end + end + end end diff --git a/spec/features/sentry_js_spec.rb b/spec/features/sentry_js_spec.rb index aa0ad17340a..1d277ba7b3c 100644 --- a/spec/features/sentry_js_spec.rb +++ b/spec/features/sentry_js_spec.rb @@ -12,7 +12,7 @@ RSpec.describe 'Sentry' do expect(has_requested_sentry).to eq(false) end - xit 'loads sentry if sentry is enabled' do + it 'loads sentry if sentry is enabled' do stub_sentry_settings visit new_user_session_path diff --git a/spec/features/uploads/user_uploads_avatar_to_profile_spec.rb b/spec/features/uploads/user_uploads_avatar_to_profile_spec.rb index 31e29810c65..900cd72c17f 100644 --- a/spec/features/uploads/user_uploads_avatar_to_profile_spec.rb +++ b/spec/features/uploads/user_uploads_avatar_to_profile_spec.rb @@ -23,7 +23,7 @@ RSpec.describe 'User uploads avatar to profile' do expect(user.reload.avatar.file).to exist end - it 'their new avatar is immediately visible in the header', :js do + it 'their new avatar is immediately visible in the header and setting sidebar', :js do find('.js-user-avatar-input', visible: false).set(avatar_file_path) click_button 'Set new profile picture' @@ -33,5 +33,6 @@ RSpec.describe 'User uploads avatar to profile' do data_uri = find('.avatar-image .avatar')['src'] expect(page.find('.header-user-avatar')['src']).to eq data_uri + expect(page.find('[data-testid="sidebar-user-avatar"]')['src']).to eq data_uri end end diff --git a/spec/features/user_can_display_performance_bar_spec.rb b/spec/features/user_can_display_performance_bar_spec.rb index 9c67523f88f..b8f41925156 100644 --- a/spec/features/user_can_display_performance_bar_spec.rb +++ b/spec/features/user_can_display_performance_bar_spec.rb @@ -49,6 +49,10 @@ RSpec.describe 'User can display performance bar', :js do let(:group) { create(:group) } + before do + allow(GitlabPerformanceBarStatsWorker).to receive(:perform_in) + end + context 'when user is logged-out' do before do visit root_path @@ -97,6 +101,26 @@ RSpec.describe 'User can display performance bar', :js do it_behaves_like 'performance bar is enabled by default in development' it_behaves_like 'performance bar can be displayed' + + it 'does not show Stats link by default' do + find('body').native.send_keys('pb') + + expect(page).not_to have_link('Stats', visible: :all) + end + + context 'when GITLAB_PERFORMANCE_BAR_STATS_URL environment variable is set' do + let(:stats_url) { 'https://log.gprd.gitlab.net/app/dashboards#/view/' } + + before do + stub_env('GITLAB_PERFORMANCE_BAR_STATS_URL', stats_url) + end + + it 'shows Stats link' do + find('body').native.send_keys('pb') + + expect(page).to have_link('Stats', href: stats_url, visible: :all) + end + end end end end diff --git a/spec/finders/packages/group_packages_finder_spec.rb b/spec/finders/packages/group_packages_finder_spec.rb index 445482a5a96..d6daf73aba2 100644 --- a/spec/finders/packages/group_packages_finder_spec.rb +++ b/spec/finders/packages/group_packages_finder_spec.rb @@ -122,7 +122,7 @@ RSpec.describe Packages::GroupPackagesFinder do end context 'when there are processing packages' do - let_it_be(:package4) { create(:nuget_package, project: project, name: Packages::Nuget::CreatePackageService::TEMPORARY_PACKAGE_NAME) } + let_it_be(:package4) { create(:nuget_package, project: project, name: Packages::Nuget::TEMPORARY_PACKAGE_NAME) } it { is_expected.to match_array([package1, package2]) } end diff --git a/spec/finders/packages/npm/package_finder_spec.rb b/spec/finders/packages/npm/package_finder_spec.rb index 78c23971f92..f021d800f31 100644 --- a/spec/finders/packages/npm/package_finder_spec.rb +++ b/spec/finders/packages/npm/package_finder_spec.rb @@ -2,39 +2,139 @@ require 'spec_helper' RSpec.describe ::Packages::Npm::PackageFinder do - let(:package) { create(:npm_package) } + let_it_be_with_reload(:project) { create(:project)} + let_it_be(:package) { create(:npm_package, project: project) } + let(:project) { package.project } let(:package_name) { package.name } - describe '#execute!' do - subject { described_class.new(project, package_name).execute } + shared_examples 'accepting a namespace for' do |example_name| + before do + project.update!(namespace: namespace) + end + + context 'that is a group' do + let_it_be(:namespace) { create(:group) } + + it_behaves_like example_name + + context 'within another group' do + let_it_be(:subgroup) { create(:group, parent: namespace) } + + before do + project.update!(namespace: subgroup) + end + + it_behaves_like example_name + end + end + + context 'that is a user namespace' do + let_it_be(:user) { create(:user) } + let_it_be(:namespace) { user.namespace } + + it_behaves_like example_name + end + end + + describe '#execute' do + shared_examples 'finding packages by name' do + it { is_expected.to eq([package]) } + + context 'with unknown package name' do + let(:package_name) { 'baz' } + + it { is_expected.to be_empty } + end + end + + subject { finder.execute } + + context 'with a project' do + let(:finder) { described_class.new(package_name, project: project) } - it { is_expected.to eq([package]) } + it_behaves_like 'finding packages by name' - context 'with unknown package name' do - let(:package_name) { 'baz' } + context 'set to nil' do + let(:project) { nil } - it { is_expected.to be_empty } + it { is_expected.to be_empty } + end end - context 'with nil project' do - let(:project) { nil } + context 'with a namespace' do + let(:finder) { described_class.new(package_name, namespace: namespace) } + + it_behaves_like 'accepting a namespace for', 'finding packages by name' + + context 'set to nil' do + let_it_be(:namespace) { nil } - it { is_expected.to be_empty } + it { is_expected.to be_empty } + end end end describe '#find_by_version' do let(:version) { package.version } - subject { described_class.new(project, package.name).find_by_version(version) } + subject { finder.find_by_version(version) } + + shared_examples 'finding packages by version' do + it { is_expected.to eq(package) } + + context 'with unknown version' do + let(:version) { 'foobar' } + + it { is_expected.to be_nil } + end + end + + context 'with a project' do + let(:finder) { described_class.new(package_name, project: project) } + + it_behaves_like 'finding packages by version' + end + + context 'with a namespace' do + let(:finder) { described_class.new(package_name, namespace: namespace) } + + it_behaves_like 'accepting a namespace for', 'finding packages by version' + end + end + + describe '#last' do + subject { finder.last } + + shared_examples 'finding package by last' do + it { is_expected.to eq(package) } + end + + context 'with a project' do + let(:finder) { described_class.new(package_name, project: project) } + + it_behaves_like 'finding package by last' + end + + context 'with a namespace' do + let(:finder) { described_class.new(package_name, namespace: namespace) } + + it_behaves_like 'accepting a namespace for', 'finding package by last' - it { is_expected.to eq(package) } + context 'with duplicate packages' do + let_it_be(:namespace) { create(:group) } + let_it_be(:subgroup1) { create(:group, parent: namespace) } + let_it_be(:subgroup2) { create(:group, parent: namespace) } + let_it_be(:project2) { create(:project, namespace: subgroup2) } + let_it_be(:package2) { create(:npm_package, name: package.name, project: project2) } - context 'with unknown version' do - let(:version) { 'foobar' } + before do + project.update!(namespace: subgroup1) + end - it { is_expected.to be_nil } + # the most recent one is returned + it { is_expected.to eq(package2) } + end end end end diff --git a/spec/finders/packages/package_finder_spec.rb b/spec/finders/packages/package_finder_spec.rb index ef07e7575d1..e8c7404a612 100644 --- a/spec/finders/packages/package_finder_spec.rb +++ b/spec/finders/packages/package_finder_spec.rb @@ -14,7 +14,7 @@ RSpec.describe ::Packages::PackageFinder do it { is_expected.to eq(maven_package) } context 'processing packages' do - let_it_be(:nuget_package) { create(:nuget_package, project: project, name: Packages::Nuget::CreatePackageService::TEMPORARY_PACKAGE_NAME) } + let_it_be(:nuget_package) { create(:nuget_package, project: project, name: Packages::Nuget::TEMPORARY_PACKAGE_NAME) } let(:package_id) { nuget_package.id } it 'are not returned' do diff --git a/spec/finders/packages/packages_finder_spec.rb b/spec/finders/packages/packages_finder_spec.rb index 6e92616bafa..0add77a8478 100644 --- a/spec/finders/packages/packages_finder_spec.rb +++ b/spec/finders/packages/packages_finder_spec.rb @@ -76,7 +76,7 @@ RSpec.describe ::Packages::PackagesFinder do end context 'with processing packages' do - let_it_be(:nuget_package) { create(:nuget_package, project: project, name: Packages::Nuget::CreatePackageService::TEMPORARY_PACKAGE_NAME) } + let_it_be(:nuget_package) { create(:nuget_package, project: project, name: Packages::Nuget::TEMPORARY_PACKAGE_NAME) } it { is_expected.to match_array([conan_package, maven_package]) } end diff --git a/spec/frontend/__helpers__/vue_test_utils_helper.js b/spec/frontend/__helpers__/vue_test_utils_helper.js index ffccfb249c2..d6132ef84ac 100644 --- a/spec/frontend/__helpers__/vue_test_utils_helper.js +++ b/spec/frontend/__helpers__/vue_test_utils_helper.js @@ -45,9 +45,16 @@ export const extendedWrapper = (wrapper) => { return wrapper; } - return Object.defineProperty(wrapper, 'findByTestId', { - value(id) { - return this.find(`[data-testid="${id}"]`); + return Object.defineProperties(wrapper, { + findByTestId: { + value(id) { + return this.find(`[data-testid="${id}"]`); + }, + }, + findAllByTestId: { + value(id) { + return this.findAll(`[data-testid="${id}"]`); + }, }, }); }; diff --git a/spec/frontend/__helpers__/vue_test_utils_helper_spec.js b/spec/frontend/__helpers__/vue_test_utils_helper_spec.js index 31c4ccd5dbb..d4f8e36c169 100644 --- a/spec/frontend/__helpers__/vue_test_utils_helper_spec.js +++ b/spec/frontend/__helpers__/vue_test_utils_helper_spec.js @@ -88,5 +88,22 @@ describe('Vue test utils helpers', () => { expect(mockComponent.findByTestId(testId).exists()).toBe(true); }); }); + + describe('findAllByTestId', () => { + const testId = 'a-component'; + let mockComponent; + + beforeEach(() => { + mockComponent = extendedWrapper( + shallowMount({ + template: `<div><div data-testid="${testId}"></div><div data-testid="${testId}"></div></div>`, + }), + ); + }); + + it('should find all components by test id', () => { + expect(mockComponent.findAllByTestId(testId)).toHaveLength(2); + }); + }); }); }); diff --git a/spec/frontend/admin/users/tabs_spec.js b/spec/frontend/admin/users/tabs_spec.js new file mode 100644 index 00000000000..39ba8618486 --- /dev/null +++ b/spec/frontend/admin/users/tabs_spec.js @@ -0,0 +1,37 @@ +import initTabs from '~/admin/users/tabs'; +import Api from '~/api'; + +jest.mock('~/api.js'); +jest.mock('~/lib/utils/common_utils'); + +describe('tabs', () => { + beforeEach(() => { + setFixtures(` + <div> + <div class="js-users-tab-item"> + <a href="#users" data-testid='users-tab'>Users</a> + </div> + <div class="js-users-tab-item"> + <a href="#cohorts" data-testid='cohorts-tab'>Cohorts</a> + </div> + </div`); + + initTabs(); + }); + + afterEach(() => {}); + + describe('tracking', () => { + it('tracks event when cohorts tab is clicked', () => { + document.querySelector('[data-testid="cohorts-tab"]').click(); + + expect(Api.trackRedisHllUserEvent).toHaveBeenCalledWith('i_analytics_cohorts'); + }); + + it('does not track an event when users tab is clicked', () => { + document.querySelector('[data-testid="users-tab"]').click(); + + expect(Api.trackRedisHllUserEvent).not.toHaveBeenCalled(); + }); + }); +}); diff --git a/spec/frontend/analytics/instance_statistics/components/app_spec.js b/spec/frontend/analytics/instance_statistics/components/app_spec.js deleted file mode 100644 index b945cc20bd6..00000000000 --- a/spec/frontend/analytics/instance_statistics/components/app_spec.js +++ /dev/null @@ -1,45 +0,0 @@ -import { shallowMount } from '@vue/test-utils'; -import InstanceCounts from '~/analytics/instance_statistics/components//instance_counts.vue'; -import InstanceStatisticsApp from '~/analytics/instance_statistics/components/app.vue'; -import InstanceStatisticsCountChart from '~/analytics/instance_statistics/components/instance_statistics_count_chart.vue'; -import ProjectsAndGroupsChart from '~/analytics/instance_statistics/components/projects_and_groups_chart.vue'; -import UsersChart from '~/analytics/instance_statistics/components/users_chart.vue'; - -describe('InstanceStatisticsApp', () => { - let wrapper; - - const createComponent = () => { - wrapper = shallowMount(InstanceStatisticsApp); - }; - - beforeEach(() => { - createComponent(); - }); - - afterEach(() => { - wrapper.destroy(); - wrapper = null; - }); - - it('displays the instance counts component', () => { - expect(wrapper.find(InstanceCounts).exists()).toBe(true); - }); - - ['Pipelines', 'Issues & Merge Requests'].forEach((instance) => { - it(`displays the ${instance} chart`, () => { - const chartTitles = wrapper - .findAll(InstanceStatisticsCountChart) - .wrappers.map((chartComponent) => chartComponent.props('chartTitle')); - - expect(chartTitles).toContain(instance); - }); - }); - - it('displays the users chart component', () => { - expect(wrapper.find(UsersChart).exists()).toBe(true); - }); - - it('displays the projects and groups chart component', () => { - expect(wrapper.find(ProjectsAndGroupsChart).exists()).toBe(true); - }); -}); diff --git a/spec/frontend/analytics/instance_statistics/apollo_mock_data.js b/spec/frontend/analytics/usage_trends/apollo_mock_data.js index 98eabd577ee..98eabd577ee 100644 --- a/spec/frontend/analytics/instance_statistics/apollo_mock_data.js +++ b/spec/frontend/analytics/usage_trends/apollo_mock_data.js diff --git a/spec/frontend/analytics/instance_statistics/components/__snapshots__/instance_statistics_count_chart_spec.js.snap b/spec/frontend/analytics/usage_trends/components/__snapshots__/usage_trends_count_chart_spec.js.snap index 29bcd5f223b..65de69c2692 100644 --- a/spec/frontend/analytics/instance_statistics/components/__snapshots__/instance_statistics_count_chart_spec.js.snap +++ b/spec/frontend/analytics/usage_trends/components/__snapshots__/usage_trends_count_chart_spec.js.snap @@ -1,6 +1,6 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP -exports[`InstanceStatisticsCountChart when fetching more data when the fetchMore query returns data passes the data to the line chart 1`] = ` +exports[`UsageTrendsCountChart when fetching more data when the fetchMore query returns data passes the data to the line chart 1`] = ` Array [ Object { "data": Array [ @@ -22,7 +22,7 @@ Array [ ] `; -exports[`InstanceStatisticsCountChart with data passes the data to the line chart 1`] = ` +exports[`UsageTrendsCountChart with data passes the data to the line chart 1`] = ` Array [ Object { "data": Array [ diff --git a/spec/frontend/analytics/usage_trends/components/app_spec.js b/spec/frontend/analytics/usage_trends/components/app_spec.js new file mode 100644 index 00000000000..8f1dd3c445c --- /dev/null +++ b/spec/frontend/analytics/usage_trends/components/app_spec.js @@ -0,0 +1,45 @@ +import { shallowMount } from '@vue/test-utils'; +import UsageTrendsApp from '~/analytics/usage_trends/components/app.vue'; +import ProjectsAndGroupsChart from '~/analytics/usage_trends/components/projects_and_groups_chart.vue'; +import UsageCounts from '~/analytics/usage_trends/components/usage_counts.vue'; +import UsageTrendsCountChart from '~/analytics/usage_trends/components/usage_trends_count_chart.vue'; +import UsersChart from '~/analytics/usage_trends/components/users_chart.vue'; + +describe('UsageTrendsApp', () => { + let wrapper; + + const createComponent = () => { + wrapper = shallowMount(UsageTrendsApp); + }; + + beforeEach(() => { + createComponent(); + }); + + afterEach(() => { + wrapper.destroy(); + wrapper = null; + }); + + it('displays the usage counts component', () => { + expect(wrapper.find(UsageCounts).exists()).toBe(true); + }); + + ['Pipelines', 'Issues & Merge Requests'].forEach((usage) => { + it(`displays the ${usage} chart`, () => { + const chartTitles = wrapper + .findAll(UsageTrendsCountChart) + .wrappers.map((chartComponent) => chartComponent.props('chartTitle')); + + expect(chartTitles).toContain(usage); + }); + }); + + it('displays the users chart component', () => { + expect(wrapper.find(UsersChart).exists()).toBe(true); + }); + + it('displays the projects and groups chart component', () => { + expect(wrapper.find(ProjectsAndGroupsChart).exists()).toBe(true); + }); +}); diff --git a/spec/frontend/analytics/instance_statistics/components/instance_counts_spec.js b/spec/frontend/analytics/usage_trends/components/instance_counts_spec.js index 12b5e14b9c4..707d2cc310f 100644 --- a/spec/frontend/analytics/instance_statistics/components/instance_counts_spec.js +++ b/spec/frontend/analytics/usage_trends/components/instance_counts_spec.js @@ -1,9 +1,9 @@ import { shallowMount } from '@vue/test-utils'; -import InstanceCounts from '~/analytics/instance_statistics/components/instance_counts.vue'; import MetricCard from '~/analytics/shared/components/metric_card.vue'; -import { mockInstanceCounts } from '../mock_data'; +import UsageCounts from '~/analytics/usage_trends/components/usage_counts.vue'; +import { mockUsageCounts } from '../mock_data'; -describe('InstanceCounts', () => { +describe('UsageCounts', () => { let wrapper; const createComponent = ({ loading = false, data = {} } = {}) => { @@ -15,7 +15,7 @@ describe('InstanceCounts', () => { }, }; - wrapper = shallowMount(InstanceCounts, { + wrapper = shallowMount(UsageCounts, { mocks: { $apollo }, data() { return { @@ -44,11 +44,11 @@ describe('InstanceCounts', () => { describe('with data', () => { beforeEach(() => { - createComponent({ data: { counts: mockInstanceCounts } }); + createComponent({ data: { counts: mockUsageCounts } }); }); it('passes the counts data to the metric card', () => { - expect(findMetricCard().props('metrics')).toEqual(mockInstanceCounts); + expect(findMetricCard().props('metrics')).toEqual(mockUsageCounts); }); }); }); diff --git a/spec/frontend/analytics/instance_statistics/components/projects_and_groups_chart_spec.js b/spec/frontend/analytics/usage_trends/components/projects_and_groups_chart_spec.js index bbfc65f19b1..09215da32e0 100644 --- a/spec/frontend/analytics/instance_statistics/components/projects_and_groups_chart_spec.js +++ b/spec/frontend/analytics/usage_trends/components/projects_and_groups_chart_spec.js @@ -3,9 +3,9 @@ import { GlLineChart } from '@gitlab/ui/dist/charts'; import { createLocalVue, shallowMount } from '@vue/test-utils'; import VueApollo from 'vue-apollo'; import createMockApollo from 'helpers/mock_apollo_helper'; -import ProjectsAndGroupChart from '~/analytics/instance_statistics/components/projects_and_groups_chart.vue'; -import groupsQuery from '~/analytics/instance_statistics/graphql/queries/groups.query.graphql'; -import projectsQuery from '~/analytics/instance_statistics/graphql/queries/projects.query.graphql'; +import ProjectsAndGroupChart from '~/analytics/usage_trends/components/projects_and_groups_chart.vue'; +import groupsQuery from '~/analytics/usage_trends/graphql/queries/groups.query.graphql'; +import projectsQuery from '~/analytics/usage_trends/graphql/queries/projects.query.graphql'; import ChartSkeletonLoader from '~/vue_shared/components/resizable_chart/skeleton_loader.vue'; import { mockQueryResponse } from '../apollo_mock_data'; import { mockCountsData2, roundedSortedCountsMonthlyChartData2 } from '../mock_data'; diff --git a/spec/frontend/analytics/instance_statistics/components/instance_statistics_count_chart_spec.js b/spec/frontend/analytics/usage_trends/components/usage_trends_count_chart_spec.js index e80dcdff426..7c2df3fe8c4 100644 --- a/spec/frontend/analytics/instance_statistics/components/instance_statistics_count_chart_spec.js +++ b/spec/frontend/analytics/usage_trends/components/usage_trends_count_chart_spec.js @@ -3,8 +3,8 @@ import { GlLineChart } from '@gitlab/ui/dist/charts'; import { createLocalVue, shallowMount } from '@vue/test-utils'; import VueApollo from 'vue-apollo'; import createMockApollo from 'helpers/mock_apollo_helper'; -import InstanceStatisticsCountChart from '~/analytics/instance_statistics/components/instance_statistics_count_chart.vue'; -import statsQuery from '~/analytics/instance_statistics/graphql/queries/instance_count.query.graphql'; +import UsageTrendsCountChart from '~/analytics/usage_trends/components/usage_trends_count_chart.vue'; +import statsQuery from '~/analytics/usage_trends/graphql/queries/usage_count.query.graphql'; import ChartSkeletonLoader from '~/vue_shared/components/resizable_chart/skeleton_loader.vue'; import { mockQueryResponse, mockApolloResponse } from '../apollo_mock_data'; import { mockCountsData1 } from '../mock_data'; @@ -15,7 +15,7 @@ localVue.use(VueApollo); const loadChartErrorMessage = 'My load error message'; const noDataMessage = 'My no data message'; -const queryResponseDataKey = 'instanceStatisticsMeasurements'; +const queryResponseDataKey = 'usageTrendsMeasurements'; const identifier = 'MOCK_QUERY'; const mockQueryConfig = { identifier, @@ -33,12 +33,12 @@ const mockChartConfig = { queries: [mockQueryConfig], }; -describe('InstanceStatisticsCountChart', () => { +describe('UsageTrendsCountChart', () => { let wrapper; let queryHandler; const createComponent = ({ responseHandler }) => { - return shallowMount(InstanceStatisticsCountChart, { + return shallowMount(UsageTrendsCountChart, { localVue, apolloProvider: createMockApollo([[statsQuery, responseHandler]]), propsData: { ...mockChartConfig }, diff --git a/spec/frontend/analytics/instance_statistics/components/users_chart_spec.js b/spec/frontend/analytics/usage_trends/components/users_chart_spec.js index d857b7fae61..6adfcca11ac 100644 --- a/spec/frontend/analytics/instance_statistics/components/users_chart_spec.js +++ b/spec/frontend/analytics/usage_trends/components/users_chart_spec.js @@ -3,8 +3,8 @@ import { GlAreaChart } from '@gitlab/ui/dist/charts'; import { createLocalVue, shallowMount } from '@vue/test-utils'; import VueApollo from 'vue-apollo'; import createMockApollo from 'helpers/mock_apollo_helper'; -import UsersChart from '~/analytics/instance_statistics/components/users_chart.vue'; -import usersQuery from '~/analytics/instance_statistics/graphql/queries/users.query.graphql'; +import UsersChart from '~/analytics/usage_trends/components/users_chart.vue'; +import usersQuery from '~/analytics/usage_trends/graphql/queries/users.query.graphql'; import ChartSkeletonLoader from '~/vue_shared/components/resizable_chart/skeleton_loader.vue'; import { mockQueryResponse } from '../apollo_mock_data'; import { diff --git a/spec/frontend/analytics/instance_statistics/mock_data.js b/spec/frontend/analytics/usage_trends/mock_data.js index e86e552a952..d96dfa26209 100644 --- a/spec/frontend/analytics/instance_statistics/mock_data.js +++ b/spec/frontend/analytics/usage_trends/mock_data.js @@ -1,4 +1,4 @@ -export const mockInstanceCounts = [ +export const mockUsageCounts = [ { key: 'projects', value: 10, label: 'Projects' }, { key: 'groups', value: 20, label: 'Group' }, ]; diff --git a/spec/frontend/analytics/instance_statistics/utils_spec.js b/spec/frontend/analytics/usage_trends/utils_spec.js index 3fd89c7f740..656f310dda7 100644 --- a/spec/frontend/analytics/instance_statistics/utils_spec.js +++ b/spec/frontend/analytics/usage_trends/utils_spec.js @@ -2,7 +2,7 @@ import { getAverageByMonth, getEarliestDate, generateDataKeys, -} from '~/analytics/instance_statistics/utils'; +} from '~/analytics/usage_trends/utils'; import { mockCountsData1, mockCountsData2, diff --git a/spec/frontend/api_spec.js b/spec/frontend/api_spec.js index d2522a0124a..d6e1b170dd3 100644 --- a/spec/frontend/api_spec.js +++ b/spec/frontend/api_spec.js @@ -482,6 +482,30 @@ describe('Api', () => { }); }); + describe('projectShareWithGroup', () => { + it('invites a group to share access with the authenticated project', () => { + const projectId = 1; + const sharedGroupId = 99; + const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectId}/share`; + const options = { + group_id: sharedGroupId, + group_access: 10, + expires_at: undefined, + }; + + jest.spyOn(axios, 'post'); + + mock.onPost(expectedUrl).reply(200, { + status: 'success', + }); + + return Api.projectShareWithGroup(projectId, options).then(({ data }) => { + expect(data.status).toBe('success'); + expect(axios.post).toHaveBeenCalledWith(expectedUrl, options); + }); + }); + }); + describe('projectMilestones', () => { it('fetches project milestones', (done) => { const projectId = 1; @@ -638,6 +662,30 @@ describe('Api', () => { }); }); + describe('groupShareWithGroup', () => { + it('invites a group to share access with the authenticated group', () => { + const groupId = 1; + const sharedGroupId = 99; + const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${groupId}/share`; + const options = { + group_id: sharedGroupId, + group_access: 10, + expires_at: undefined, + }; + + jest.spyOn(axios, 'post'); + + mock.onPost(expectedUrl).reply(200, { + status: 'success', + }); + + return Api.groupShareWithGroup(groupId, options).then(({ data }) => { + expect(data.status).toBe('success'); + expect(axios.post).toHaveBeenCalledWith(expectedUrl, options); + }); + }); + }); + describe('commit', () => { const projectId = 'user/project'; const sha = 'abcd0123'; diff --git a/spec/frontend/artifacts_settings/components/__snapshots__/keep_latest_artifact_checkbox_spec.js.snap b/spec/frontend/artifacts_settings/components/__snapshots__/keep_latest_artifact_checkbox_spec.js.snap index bfe7e40fb32..bf33aa731ef 100644 --- a/spec/frontend/artifacts_settings/components/__snapshots__/keep_latest_artifact_checkbox_spec.js.snap +++ b/spec/frontend/artifacts_settings/components/__snapshots__/keep_latest_artifact_checkbox_spec.js.snap @@ -1,37 +1,5 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP -exports[`Keep latest artifact checkbox when application keep latest artifact setting is disabled checkbox is disabled when application setting is disabled 1`] = ` -<div> - <!----> - - <b-form-checkbox-stub - checked="true" - class="gl-form-checkbox" - disabled="true" - plain="true" - value="true" - > - <strong - class="gl-mr-3" - > - Keep artifacts from most recent successful jobs - </strong> - - <gl-link-stub - href="/help/ci/pipelines/job_artifacts" - > - More information - </gl-link-stub> - - <p - class="help-text" - > - This feature is disabled at the instance level. - </p> - </b-form-checkbox-stub> -</div> -`; - exports[`Keep latest artifact checkbox when application keep latest artifact setting is enabled sets correct setting value in checkbox with query result 1`] = ` <div> <!----> diff --git a/spec/frontend/artifacts_settings/components/keep_latest_artifact_checkbox_spec.js b/spec/frontend/artifacts_settings/components/keep_latest_artifact_checkbox_spec.js index fe2886d6c95..b0d1b70c198 100644 --- a/spec/frontend/artifacts_settings/components/keep_latest_artifact_checkbox_spec.js +++ b/spec/frontend/artifacts_settings/components/keep_latest_artifact_checkbox_spec.js @@ -120,23 +120,4 @@ describe('Keep latest artifact checkbox', () => { expect(findCheckbox().attributes('disabled')).toBeUndefined(); }); }); - - describe('when application keep latest artifact setting is disabled', () => { - it('checkbox is disabled when application setting is disabled', async () => { - createComponent({ - keepLatestArtifactApplicationQueryHandler: jest.fn().mockResolvedValue({ - data: { - ciApplicationSettings: { - keepLatestArtifact: false, - }, - }, - }), - }); - - await wrapper.vm.$nextTick(); - - expect(wrapper.element).toMatchSnapshot(); - expect(findCheckbox().attributes('disabled')).toBe('true'); - }); - }); }); diff --git a/spec/frontend/blob/blob_file_dropzone_spec.js b/spec/frontend/blob/blob_file_dropzone_spec.js index cbd36abd4ff..95520577e9a 100644 --- a/spec/frontend/blob/blob_file_dropzone_spec.js +++ b/spec/frontend/blob/blob_file_dropzone_spec.js @@ -1,5 +1,10 @@ import $ from 'jquery'; import BlobFileDropzone from '~/blob/blob_file_dropzone'; +import { trackUploadFileFormSubmitted } from '~/projects/upload_file_experiment'; + +jest.mock('~/projects/upload_file_experiment', () => ({ + trackUploadFileFormSubmitted: jest.fn(), +})); describe('BlobFileDropzone', () => { preloadFixtures('blob/show.html'); @@ -41,5 +46,13 @@ describe('BlobFileDropzone', () => { expect(replaceFileButton.is(':disabled')).toEqual(true); expect(dropzone.processQueue).toHaveBeenCalled(); }); + + it('calls the tracking event', () => { + jest.spyOn(window, 'alert').mockImplementation(() => {}); + + replaceFileButton.click(); + + expect(trackUploadFileFormSubmitted).toHaveBeenCalled(); + }); }); }); diff --git a/spec/frontend/boards/board_list_spec.js b/spec/frontend/boards/board_list_spec.js index 7ed20f20882..ca9aa673359 100644 --- a/spec/frontend/boards/board_list_spec.js +++ b/spec/frontend/boards/board_list_spec.js @@ -1,8 +1,9 @@ -import { createLocalVue, mount } from '@vue/test-utils'; +import { createLocalVue, shallowMount } from '@vue/test-utils'; import Vuex from 'vuex'; import { useFakeRequestAnimationFrame } from 'helpers/fake_request_animation_frame'; import BoardCard from '~/boards/components/board_card.vue'; import BoardList from '~/boards/components/board_list.vue'; +import BoardNewIssue from '~/boards/components/board_new_issue.vue'; import eventHub from '~/boards/eventhub'; import defaultState from '~/boards/stores/state'; import { mockList, mockIssuesByListId, issues, mockIssues } from './mock_data'; @@ -11,7 +12,7 @@ const localVue = createLocalVue(); localVue.use(Vuex); const actions = { - fetchIssuesForList: jest.fn(), + fetchItemsForList: jest.fn(), }; const createStore = (state = defaultState) => { @@ -28,8 +29,8 @@ const createComponent = ({ state = {}, } = {}) => { const store = createStore({ - issuesByListId: mockIssuesByListId, - issues, + boardItemsByListId: mockIssuesByListId, + boardItems: issues, pageInfoByListId: { 'gid://gitlab/List/1': { hasNextPage: true }, 'gid://gitlab/List/2': {}, @@ -38,6 +39,7 @@ const createComponent = ({ 'gid://gitlab/List/1': {}, 'gid://gitlab/List/2': {}, }, + selectedBoardItems: [], ...state, }); @@ -58,12 +60,12 @@ const createComponent = ({ list.issuesCount = 1; } - const component = mount(BoardList, { + const component = shallowMount(BoardList, { localVue, propsData: { disabled: false, list, - issues: [issue], + boardItems: [issue], canAdminList: true, ...componentProps, }, @@ -74,6 +76,10 @@ const createComponent = ({ weightFeatureAvailable: false, boardWeight: null, }, + stubs: { + BoardCard, + BoardNewIssue, + }, }); return component; @@ -81,7 +87,10 @@ const createComponent = ({ describe('Board list component', () => { let wrapper; + const findByTestId = (testId) => wrapper.find(`[data-testid="${testId}"]`); + const findIssueCountLoadingIcon = () => wrapper.find('[data-testid="count-loading-icon"]'); + useFakeRequestAnimationFrame(); afterEach(() => { @@ -170,7 +179,7 @@ describe('Board list component', () => { it('loads more issues after scrolling', () => { wrapper.vm.listRef.dispatchEvent(new Event('scroll')); - expect(actions.fetchIssuesForList).toHaveBeenCalled(); + expect(actions.fetchItemsForList).toHaveBeenCalled(); }); it('does not load issues if already loading', () => { @@ -179,7 +188,7 @@ describe('Board list component', () => { }); wrapper.vm.listRef.dispatchEvent(new Event('scroll')); - expect(actions.fetchIssuesForList).not.toHaveBeenCalled(); + expect(actions.fetchItemsForList).not.toHaveBeenCalled(); }); it('shows loading more spinner', async () => { @@ -189,7 +198,8 @@ describe('Board list component', () => { wrapper.vm.showCount = true; await wrapper.vm.$nextTick(); - expect(wrapper.find('.board-list-count .gl-spinner').exists()).toBe(true); + + expect(findIssueCountLoadingIcon().exists()).toBe(true); }); }); diff --git a/spec/frontend/boards/components/board_card_deprecated_spec.js b/spec/frontend/boards/components/board_card_deprecated_spec.js new file mode 100644 index 00000000000..6be84f6f111 --- /dev/null +++ b/spec/frontend/boards/components/board_card_deprecated_spec.js @@ -0,0 +1,219 @@ +/* global List */ +/* global ListAssignee */ +/* global ListLabel */ + +import { mount } from '@vue/test-utils'; + +import MockAdapter from 'axios-mock-adapter'; +import waitForPromises from 'helpers/wait_for_promises'; +import BoardCardDeprecated from '~/boards/components/board_card_deprecated.vue'; +import issueCardInner from '~/boards/components/issue_card_inner.vue'; +import eventHub from '~/boards/eventhub'; +import store from '~/boards/stores'; +import boardsStore from '~/boards/stores/boards_store'; +import axios from '~/lib/utils/axios_utils'; + +import sidebarEventHub from '~/sidebar/event_hub'; +import '~/boards/models/label'; +import '~/boards/models/assignee'; +import '~/boards/models/list'; +import userAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link.vue'; +import { listObj, boardsMockInterceptor, setMockEndpoints } from '../mock_data'; + +describe('BoardCard', () => { + let wrapper; + let mock; + let list; + + const findIssueCardInner = () => wrapper.find(issueCardInner); + const findUserAvatarLink = () => wrapper.find(userAvatarLink); + + // this particular mount component needs to be used after the root beforeEach because it depends on list being initialized + const mountComponent = (propsData) => { + wrapper = mount(BoardCardDeprecated, { + stubs: { + issueCardInner, + }, + store, + propsData: { + list, + issue: list.issues[0], + disabled: false, + index: 0, + ...propsData, + }, + provide: { + groupId: null, + rootPath: '/', + scopedLabelsAvailable: false, + }, + }); + }; + + const setupData = async () => { + list = new List(listObj); + boardsStore.create(); + boardsStore.detail.issue = {}; + const label1 = new ListLabel({ + id: 3, + title: 'testing 123', + color: '#000cff', + text_color: 'white', + description: 'test', + }); + await waitForPromises(); + + list.issues[0].labels.push(label1); + }; + + beforeEach(() => { + mock = new MockAdapter(axios); + mock.onAny().reply(boardsMockInterceptor); + setMockEndpoints(); + return setupData(); + }); + + afterEach(() => { + wrapper.destroy(); + wrapper = null; + list = null; + mock.restore(); + }); + + it('when details issue is empty does not show the element', () => { + mountComponent(); + expect(wrapper.find('[data-testid="board_card"').classes()).not.toContain('is-active'); + }); + + it('when detailIssue is equal to card issue shows the element', () => { + [boardsStore.detail.issue] = list.issues; + mountComponent(); + + expect(wrapper.classes()).toContain('is-active'); + }); + + it('when multiSelect does not contain issue removes multi select class', () => { + mountComponent(); + expect(wrapper.classes()).not.toContain('multi-select'); + }); + + it('when multiSelect contain issue add multi select class', () => { + boardsStore.multiSelect.list = [list.issues[0]]; + mountComponent(); + + expect(wrapper.classes()).toContain('multi-select'); + }); + + it('adds user-can-drag class if not disabled', () => { + mountComponent(); + expect(wrapper.classes()).toContain('user-can-drag'); + }); + + it('does not add user-can-drag class disabled', () => { + mountComponent({ disabled: true }); + + expect(wrapper.classes()).not.toContain('user-can-drag'); + }); + + it('does not add disabled class', () => { + mountComponent(); + expect(wrapper.classes()).not.toContain('is-disabled'); + }); + + it('adds disabled class is disabled is true', () => { + mountComponent({ disabled: true }); + + expect(wrapper.classes()).toContain('is-disabled'); + }); + + describe('mouse events', () => { + it('does not set detail issue if showDetail is false', () => { + mountComponent(); + expect(boardsStore.detail.issue).toEqual({}); + }); + + it('does not set detail issue if link is clicked', () => { + mountComponent(); + findIssueCardInner().find('a').trigger('mouseup'); + + expect(boardsStore.detail.issue).toEqual({}); + }); + + it('does not set detail issue if img is clicked', () => { + mountComponent({ + issue: { + ...list.issues[0], + assignees: [ + new ListAssignee({ + id: 1, + name: 'testing 123', + username: 'test', + avatar: 'test_image', + }), + ], + }, + }); + + findUserAvatarLink().trigger('mouseup'); + + expect(boardsStore.detail.issue).toEqual({}); + }); + + it('does not set detail issue if showDetail is false after mouseup', () => { + mountComponent(); + wrapper.trigger('mouseup'); + + expect(boardsStore.detail.issue).toEqual({}); + }); + + it('sets detail issue to card issue on mouse up', () => { + jest.spyOn(eventHub, '$emit').mockImplementation(() => {}); + + mountComponent(); + + wrapper.trigger('mousedown'); + wrapper.trigger('mouseup'); + + expect(eventHub.$emit).toHaveBeenCalledWith('newDetailIssue', wrapper.vm.issue, false); + expect(boardsStore.detail.list).toEqual(wrapper.vm.list); + }); + + it('resets detail issue to empty if already set', () => { + jest.spyOn(eventHub, '$emit').mockImplementation(() => {}); + const [issue] = list.issues; + boardsStore.detail.issue = issue; + mountComponent(); + + wrapper.trigger('mousedown'); + wrapper.trigger('mouseup'); + + expect(eventHub.$emit).toHaveBeenCalledWith('clearDetailIssue', false); + }); + }); + + describe('sidebarHub events', () => { + it('closes all sidebars before showing an issue if no issues are opened', () => { + jest.spyOn(sidebarEventHub, '$emit').mockImplementation(() => {}); + boardsStore.detail.issue = {}; + mountComponent(); + + // sets conditional so that event is emitted. + wrapper.trigger('mousedown'); + + wrapper.trigger('mouseup'); + + expect(sidebarEventHub.$emit).toHaveBeenCalledWith('sidebar.closeAll'); + }); + + it('it does not closes all sidebars before showing an issue if an issue is opened', () => { + jest.spyOn(sidebarEventHub, '$emit').mockImplementation(() => {}); + const [issue] = list.issues; + boardsStore.detail.issue = issue; + mountComponent(); + + wrapper.trigger('mousedown'); + + expect(sidebarEventHub.$emit).not.toHaveBeenCalledWith('sidebar.closeAll'); + }); + }); +}); diff --git a/spec/frontend/boards/components/board_card_layout_spec.js b/spec/frontend/boards/components/board_card_layout_spec.js deleted file mode 100644 index 3fa8714807c..00000000000 --- a/spec/frontend/boards/components/board_card_layout_spec.js +++ /dev/null @@ -1,116 +0,0 @@ -import { createLocalVue, shallowMount } from '@vue/test-utils'; -import Vuex from 'vuex'; - -import BoardCardLayout from '~/boards/components/board_card_layout.vue'; -import IssueCardInner from '~/boards/components/issue_card_inner.vue'; -import { ISSUABLE } from '~/boards/constants'; -import defaultState from '~/boards/stores/state'; -import { mockLabelList, mockIssue } from '../mock_data'; - -describe('Board card layout', () => { - let wrapper; - let store; - - const localVue = createLocalVue(); - localVue.use(Vuex); - - const createStore = ({ getters = {}, actions = {} } = {}) => { - store = new Vuex.Store({ - state: defaultState, - actions, - getters, - }); - }; - - // this particular mount component needs to be used after the root beforeEach because it depends on list being initialized - const mountComponent = ({ propsData = {}, provide = {} } = {}) => { - wrapper = shallowMount(BoardCardLayout, { - localVue, - stubs: { - IssueCardInner, - }, - store, - propsData: { - list: mockLabelList, - issue: mockIssue, - disabled: false, - index: 0, - ...propsData, - }, - provide: { - groupId: null, - rootPath: '/', - scopedLabelsAvailable: false, - ...provide, - }, - }); - }; - - afterEach(() => { - wrapper.destroy(); - wrapper = null; - }); - - describe('mouse events', () => { - it('sets showDetail to true on mousedown', async () => { - createStore(); - mountComponent(); - - wrapper.trigger('mousedown'); - await wrapper.vm.$nextTick(); - - expect(wrapper.vm.showDetail).toBe(true); - }); - - it('sets showDetail to false on mousemove', async () => { - createStore(); - mountComponent(); - wrapper.trigger('mousedown'); - await wrapper.vm.$nextTick(); - expect(wrapper.vm.showDetail).toBe(true); - wrapper.trigger('mousemove'); - await wrapper.vm.$nextTick(); - expect(wrapper.vm.showDetail).toBe(false); - }); - - it("calls 'setActiveId'", async () => { - const setActiveId = jest.fn(); - createStore({ - actions: { - setActiveId, - }, - }); - mountComponent(); - - wrapper.trigger('mouseup'); - await wrapper.vm.$nextTick(); - - expect(setActiveId).toHaveBeenCalledTimes(1); - expect(setActiveId).toHaveBeenCalledWith(expect.any(Object), { - id: mockIssue.id, - sidebarType: ISSUABLE, - }); - }); - - it("calls 'setActiveId' when epic swimlanes is active", async () => { - const setActiveId = jest.fn(); - const isSwimlanesOn = () => true; - createStore({ - getters: { isSwimlanesOn }, - actions: { - setActiveId, - }, - }); - mountComponent(); - - wrapper.trigger('mouseup'); - await wrapper.vm.$nextTick(); - - expect(setActiveId).toHaveBeenCalledTimes(1); - expect(setActiveId).toHaveBeenCalledWith(expect.any(Object), { - id: mockIssue.id, - sidebarType: ISSUABLE, - }); - }); - }); -}); diff --git a/spec/frontend/boards/components/board_card_spec.js b/spec/frontend/boards/components/board_card_spec.js index 5f26ae1bb3b..d68e0f9fc23 100644 --- a/spec/frontend/boards/components/board_card_spec.js +++ b/spec/frontend/boards/components/board_card_spec.js @@ -1,43 +1,49 @@ -/* global List */ -/* global ListAssignee */ -/* global ListLabel */ +import { createLocalVue, shallowMount } from '@vue/test-utils'; +import Vuex from 'vuex'; -import { mount } from '@vue/test-utils'; - -import MockAdapter from 'axios-mock-adapter'; -import waitForPromises from 'helpers/wait_for_promises'; import BoardCard from '~/boards/components/board_card.vue'; -import issueCardInner from '~/boards/components/issue_card_inner.vue'; -import eventHub from '~/boards/eventhub'; -import store from '~/boards/stores'; -import boardsStore from '~/boards/stores/boards_store'; -import axios from '~/lib/utils/axios_utils'; - -import sidebarEventHub from '~/sidebar/event_hub'; -import '~/boards/models/label'; -import '~/boards/models/assignee'; -import '~/boards/models/list'; -import userAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link.vue'; -import { listObj, boardsMockInterceptor, setMockEndpoints } from '../mock_data'; - -describe('BoardCard', () => { - let wrapper; - let mock; - let list; +import IssueCardInner from '~/boards/components/issue_card_inner.vue'; +import { inactiveId } from '~/boards/constants'; +import { mockLabelList, mockIssue } from '../mock_data'; - const findIssueCardInner = () => wrapper.find(issueCardInner); - const findUserAvatarLink = () => wrapper.find(userAvatarLink); +describe('Board card layout', () => { + let wrapper; + let store; + let mockActions; + + const localVue = createLocalVue(); + localVue.use(Vuex); + + const createStore = ({ initialState = {}, isSwimlanesOn = false } = {}) => { + mockActions = { + toggleBoardItem: jest.fn(), + toggleBoardItemMultiSelection: jest.fn(), + }; + + store = new Vuex.Store({ + state: { + activeId: inactiveId, + selectedBoardItems: [], + ...initialState, + }, + actions: mockActions, + getters: { + isSwimlanesOn: () => isSwimlanesOn, + }, + }); + }; // this particular mount component needs to be used after the root beforeEach because it depends on list being initialized - const mountComponent = (propsData) => { - wrapper = mount(BoardCard, { + const mountComponent = ({ propsData = {}, provide = {} } = {}) => { + wrapper = shallowMount(BoardCard, { + localVue, stubs: { - issueCardInner, + IssueCardInner, }, store, propsData: { - list, - issue: list.issues[0], + list: mockLabelList, + issue: mockIssue, disabled: false, index: 0, ...propsData, @@ -46,174 +52,94 @@ describe('BoardCard', () => { groupId: null, rootPath: '/', scopedLabelsAvailable: false, + ...provide, }, }); }; - const setupData = async () => { - list = new List(listObj); - boardsStore.create(); - boardsStore.detail.issue = {}; - const label1 = new ListLabel({ - id: 3, - title: 'testing 123', - color: '#000cff', - text_color: 'white', - description: 'test', - }); - await waitForPromises(); - - list.issues[0].labels.push(label1); + const selectCard = async () => { + wrapper.trigger('mouseup'); + await wrapper.vm.$nextTick(); }; - beforeEach(() => { - mock = new MockAdapter(axios); - mock.onAny().reply(boardsMockInterceptor); - setMockEndpoints(); - return setupData(); - }); + const multiSelectCard = async () => { + wrapper.trigger('mouseup', { ctrlKey: true }); + await wrapper.vm.$nextTick(); + }; afterEach(() => { wrapper.destroy(); wrapper = null; - list = null; - mock.restore(); - }); - - it('when details issue is empty does not show the element', () => { - mountComponent(); - expect(wrapper.find('[data-testid="board_card"').classes()).not.toContain('is-active'); - }); - - it('when detailIssue is equal to card issue shows the element', () => { - [boardsStore.detail.issue] = list.issues; - mountComponent(); - - expect(wrapper.classes()).toContain('is-active'); - }); - - it('when multiSelect does not contain issue removes multi select class', () => { - mountComponent(); - expect(wrapper.classes()).not.toContain('multi-select'); - }); - - it('when multiSelect contain issue add multi select class', () => { - boardsStore.multiSelect.list = [list.issues[0]]; - mountComponent(); - - expect(wrapper.classes()).toContain('multi-select'); - }); - - it('adds user-can-drag class if not disabled', () => { - mountComponent(); - expect(wrapper.classes()).toContain('user-can-drag'); - }); - - it('does not add user-can-drag class disabled', () => { - mountComponent({ disabled: true }); - - expect(wrapper.classes()).not.toContain('user-can-drag'); - }); - - it('does not add disabled class', () => { - mountComponent(); - expect(wrapper.classes()).not.toContain('is-disabled'); + store = null; }); - it('adds disabled class is disabled is true', () => { - mountComponent({ disabled: true }); - - expect(wrapper.classes()).toContain('is-disabled'); - }); - - describe('mouse events', () => { - it('does not set detail issue if showDetail is false', () => { + describe.each` + isSwimlanesOn + ${true} | ${false} + `('when isSwimlanesOn is $isSwimlanesOn', ({ isSwimlanesOn }) => { + it('should not highlight the card by default', async () => { + createStore({ isSwimlanesOn }); mountComponent(); - expect(boardsStore.detail.issue).toEqual({}); - }); - it('does not set detail issue if link is clicked', () => { - mountComponent(); - findIssueCardInner().find('a').trigger('mouseup'); - - expect(boardsStore.detail.issue).toEqual({}); + expect(wrapper.classes()).not.toContain('is-active'); + expect(wrapper.classes()).not.toContain('multi-select'); }); - it('does not set detail issue if img is clicked', () => { - mountComponent({ - issue: { - ...list.issues[0], - assignees: [ - new ListAssignee({ - id: 1, - name: 'testing 123', - username: 'test', - avatar: 'test_image', - }), - ], + it('should highlight the card with a correct style when selected', async () => { + createStore({ + initialState: { + activeId: mockIssue.id, }, + isSwimlanesOn, }); - - findUserAvatarLink().trigger('mouseup'); - - expect(boardsStore.detail.issue).toEqual({}); - }); - - it('does not set detail issue if showDetail is false after mouseup', () => { - mountComponent(); - wrapper.trigger('mouseup'); - - expect(boardsStore.detail.issue).toEqual({}); - }); - - it('sets detail issue to card issue on mouse up', () => { - jest.spyOn(eventHub, '$emit').mockImplementation(() => {}); - mountComponent(); - wrapper.trigger('mousedown'); - wrapper.trigger('mouseup'); - - expect(eventHub.$emit).toHaveBeenCalledWith('newDetailIssue', wrapper.vm.issue, false); - expect(boardsStore.detail.list).toEqual(wrapper.vm.list); + expect(wrapper.classes()).toContain('is-active'); + expect(wrapper.classes()).not.toContain('multi-select'); }); - it('resets detail issue to empty if already set', () => { - jest.spyOn(eventHub, '$emit').mockImplementation(() => {}); - const [issue] = list.issues; - boardsStore.detail.issue = issue; + it('should highlight the card with a correct style when multi-selected', async () => { + createStore({ + initialState: { + activeId: inactiveId, + selectedBoardItems: [mockIssue], + }, + isSwimlanesOn, + }); mountComponent(); - wrapper.trigger('mousedown'); - wrapper.trigger('mouseup'); - - expect(eventHub.$emit).toHaveBeenCalledWith('clearDetailIssue', false); + expect(wrapper.classes()).toContain('multi-select'); + expect(wrapper.classes()).not.toContain('is-active'); }); - }); - - describe('sidebarHub events', () => { - it('closes all sidebars before showing an issue if no issues are opened', () => { - jest.spyOn(sidebarEventHub, '$emit').mockImplementation(() => {}); - boardsStore.detail.issue = {}; - mountComponent(); - - // sets conditional so that event is emitted. - wrapper.trigger('mousedown'); - wrapper.trigger('mouseup'); + describe('when mouseup event is called on the issue card', () => { + beforeEach(() => { + createStore({ isSwimlanesOn }); + mountComponent(); + }); - expect(sidebarEventHub.$emit).toHaveBeenCalledWith('sidebar.closeAll'); - }); + describe('when not using multi-select', () => { + it('should call vuex action "toggleBoardItem" with correct parameters', async () => { + await selectCard(); - it('it does not closes all sidebars before showing an issue if an issue is opened', () => { - jest.spyOn(sidebarEventHub, '$emit').mockImplementation(() => {}); - const [issue] = list.issues; - boardsStore.detail.issue = issue; - mountComponent(); + expect(mockActions.toggleBoardItem).toHaveBeenCalledTimes(1); + expect(mockActions.toggleBoardItem).toHaveBeenCalledWith(expect.any(Object), { + boardItem: mockIssue, + }); + }); + }); - wrapper.trigger('mousedown'); + describe('when using multi-select', () => { + it('should call vuex action "multiSelectBoardItem" with correct parameters', async () => { + await multiSelectCard(); - expect(sidebarEventHub.$emit).not.toHaveBeenCalledWith('sidebar.closeAll'); + expect(mockActions.toggleBoardItemMultiSelection).toHaveBeenCalledTimes(1); + expect(mockActions.toggleBoardItemMultiSelection).toHaveBeenCalledWith( + expect.any(Object), + mockIssue, + ); + }); + }); }); }); }); diff --git a/spec/frontend/boards/components/sidebar/board_sidebar_due_date_spec.js b/spec/frontend/boards/components/sidebar/board_sidebar_due_date_spec.js index 7838b5a0b2f..8fd178a0856 100644 --- a/spec/frontend/boards/components/sidebar/board_sidebar_due_date_spec.js +++ b/spec/frontend/boards/components/sidebar/board_sidebar_due_date_spec.js @@ -24,7 +24,7 @@ describe('~/boards/components/sidebar/board_sidebar_due_date.vue', () => { const createWrapper = ({ dueDate = null } = {}) => { store = createStore(); - store.state.issues = { [TEST_ISSUE.id]: { ...TEST_ISSUE, dueDate } }; + store.state.boardItems = { [TEST_ISSUE.id]: { ...TEST_ISSUE, dueDate } }; store.state.activeId = TEST_ISSUE.id; wrapper = shallowMount(BoardSidebarDueDate, { @@ -61,7 +61,7 @@ describe('~/boards/components/sidebar/board_sidebar_due_date.vue', () => { createWrapper(); jest.spyOn(wrapper.vm, 'setActiveIssueDueDate').mockImplementation(() => { - store.state.issues[TEST_ISSUE.id].dueDate = TEST_DUE_DATE; + store.state.boardItems[TEST_ISSUE.id].dueDate = TEST_DUE_DATE; }); findDatePicker().vm.$emit('input', TEST_PARSED_DATE); await wrapper.vm.$nextTick(); @@ -86,7 +86,7 @@ describe('~/boards/components/sidebar/board_sidebar_due_date.vue', () => { createWrapper(); jest.spyOn(wrapper.vm, 'setActiveIssueDueDate').mockImplementation(() => { - store.state.issues[TEST_ISSUE.id].dueDate = null; + store.state.boardItems[TEST_ISSUE.id].dueDate = null; }); findDatePicker().vm.$emit('clear'); await wrapper.vm.$nextTick(); @@ -104,7 +104,7 @@ describe('~/boards/components/sidebar/board_sidebar_due_date.vue', () => { createWrapper({ dueDate: TEST_DUE_DATE }); jest.spyOn(wrapper.vm, 'setActiveIssueDueDate').mockImplementation(() => { - store.state.issues[TEST_ISSUE.id].dueDate = null; + store.state.boardItems[TEST_ISSUE.id].dueDate = null; }); findResetButton().vm.$emit('click'); await wrapper.vm.$nextTick(); diff --git a/spec/frontend/boards/components/sidebar/board_sidebar_issue_title_spec.js b/spec/frontend/boards/components/sidebar/board_sidebar_issue_title_spec.js index bc7df1c76c6..723d0345f76 100644 --- a/spec/frontend/boards/components/sidebar/board_sidebar_issue_title_spec.js +++ b/spec/frontend/boards/components/sidebar/board_sidebar_issue_title_spec.js @@ -34,7 +34,7 @@ describe('~/boards/components/sidebar/board_sidebar_issue_title.vue', () => { const createWrapper = (issue = TEST_ISSUE_A) => { store = createStore(); - store.state.issues = { [issue.id]: { ...issue } }; + store.state.boardItems = { [issue.id]: { ...issue } }; store.dispatch('setActiveId', { id: issue.id }); wrapper = shallowMount(BoardSidebarIssueTitle, { @@ -74,7 +74,7 @@ describe('~/boards/components/sidebar/board_sidebar_issue_title.vue', () => { createWrapper(); jest.spyOn(wrapper.vm, 'setActiveIssueTitle').mockImplementation(() => { - store.state.issues[TEST_ISSUE_A.id].title = TEST_TITLE; + store.state.boardItems[TEST_ISSUE_A.id].title = TEST_TITLE; }); findFormInput().vm.$emit('input', TEST_TITLE); findForm().vm.$emit('submit', { preventDefault: () => {} }); @@ -147,7 +147,7 @@ describe('~/boards/components/sidebar/board_sidebar_issue_title.vue', () => { createWrapper(TEST_ISSUE_B); jest.spyOn(wrapper.vm, 'setActiveIssueTitle').mockImplementation(() => { - store.state.issues[TEST_ISSUE_B.id].title = TEST_TITLE; + store.state.boardItems[TEST_ISSUE_B.id].title = TEST_TITLE; }); findFormInput().vm.$emit('input', TEST_TITLE); findCancelButton().vm.$emit('click'); diff --git a/spec/frontend/boards/components/sidebar/board_sidebar_labels_select_spec.js b/spec/frontend/boards/components/sidebar/board_sidebar_labels_select_spec.js index 12b873ba7d8..98ac211238c 100644 --- a/spec/frontend/boards/components/sidebar/board_sidebar_labels_select_spec.js +++ b/spec/frontend/boards/components/sidebar/board_sidebar_labels_select_spec.js @@ -25,7 +25,7 @@ describe('~/boards/components/sidebar/board_sidebar_labels_select.vue', () => { const createWrapper = ({ labels = [] } = {}) => { store = createStore(); - store.state.issues = { [TEST_ISSUE.id]: { ...TEST_ISSUE, labels } }; + store.state.boardItems = { [TEST_ISSUE.id]: { ...TEST_ISSUE, labels } }; store.state.activeId = TEST_ISSUE.id; wrapper = shallowMount(BoardSidebarLabelsSelect, { @@ -66,7 +66,7 @@ describe('~/boards/components/sidebar/board_sidebar_labels_select.vue', () => { jest.spyOn(wrapper.vm, 'setActiveIssueLabels').mockImplementation(() => TEST_LABELS); findLabelsSelect().vm.$emit('updateSelectedLabels', TEST_LABELS_PAYLOAD); - store.state.issues[TEST_ISSUE.id].labels = TEST_LABELS; + store.state.boardItems[TEST_ISSUE.id].labels = TEST_LABELS; await wrapper.vm.$nextTick(); }); diff --git a/spec/frontend/boards/components/sidebar/board_sidebar_milestone_select_spec.js b/spec/frontend/boards/components/sidebar/board_sidebar_milestone_select_spec.js index 8820ec7ae63..8706424a296 100644 --- a/spec/frontend/boards/components/sidebar/board_sidebar_milestone_select_spec.js +++ b/spec/frontend/boards/components/sidebar/board_sidebar_milestone_select_spec.js @@ -22,7 +22,7 @@ describe('~/boards/components/sidebar/board_sidebar_milestone_select.vue', () => const createWrapper = ({ milestone = null, loading = false } = {}) => { store = createStore(); - store.state.issues = { [TEST_ISSUE.id]: { ...TEST_ISSUE, milestone } }; + store.state.boardItems = { [TEST_ISSUE.id]: { ...TEST_ISSUE, milestone } }; store.state.activeId = TEST_ISSUE.id; wrapper = shallowMount(BoardSidebarMilestoneSelect, { @@ -113,7 +113,7 @@ describe('~/boards/components/sidebar/board_sidebar_milestone_select.vue', () => createWrapper(); jest.spyOn(wrapper.vm, 'setActiveIssueMilestone').mockImplementation(() => { - store.state.issues[TEST_ISSUE.id].milestone = TEST_MILESTONE; + store.state.boardItems[TEST_ISSUE.id].milestone = TEST_MILESTONE; }); findDropdownItem().vm.$emit('click'); await wrapper.vm.$nextTick(); @@ -137,7 +137,7 @@ describe('~/boards/components/sidebar/board_sidebar_milestone_select.vue', () => createWrapper({ milestone: TEST_MILESTONE }); jest.spyOn(wrapper.vm, 'setActiveIssueMilestone').mockImplementation(() => { - store.state.issues[TEST_ISSUE.id].milestone = null; + store.state.boardItems[TEST_ISSUE.id].milestone = null; }); findUnsetMilestoneItem().vm.$emit('click'); await wrapper.vm.$nextTick(); diff --git a/spec/frontend/boards/components/sidebar/board_sidebar_subscription_spec.js b/spec/frontend/boards/components/sidebar/board_sidebar_subscription_spec.js index 3e6b0be0267..d2d10563ae5 100644 --- a/spec/frontend/boards/components/sidebar/board_sidebar_subscription_spec.js +++ b/spec/frontend/boards/components/sidebar/board_sidebar_subscription_spec.js @@ -22,7 +22,7 @@ describe('~/boards/components/sidebar/board_sidebar_subscription_spec.vue', () = const createComponent = (activeIssue = { ...mockActiveIssue }) => { store = createStore(); - store.state.issues = { [activeIssue.id]: activeIssue }; + store.state.boardItems = { [activeIssue.id]: activeIssue }; store.state.activeId = activeIssue.id; wrapper = mount(BoardSidebarSubscription, { diff --git a/spec/frontend/boards/stores/actions_spec.js b/spec/frontend/boards/stores/actions_spec.js index 32d0e7ae886..80d98c5eb6b 100644 --- a/spec/frontend/boards/stores/actions_spec.js +++ b/spec/frontend/boards/stores/actions_spec.js @@ -5,7 +5,7 @@ import { formatBoardLists, formatIssueInput, } from '~/boards/boards_util'; -import { inactiveId } from '~/boards/constants'; +import { inactiveId, ISSUABLE } from '~/boards/constants'; import destroyBoardListMutation from '~/boards/graphql/board_list_destroy.mutation.graphql'; import issueCreateMutation from '~/boards/graphql/issue_create.mutation.graphql'; import issueMoveListMutation from '~/boards/graphql/issue_move_list.mutation.graphql'; @@ -112,6 +112,15 @@ describe('setActiveId', () => { }); describe('fetchLists', () => { + it('should dispatch fetchIssueLists action', () => { + testAction({ + action: actions.fetchLists, + expectedActions: [{ type: 'fetchIssueLists' }], + }); + }); +}); + +describe('fetchIssueLists', () => { const state = { fullPath: 'gitlab-org', boardId: '1', @@ -138,7 +147,7 @@ describe('fetchLists', () => { jest.spyOn(gqlClient, 'query').mockResolvedValue(queryResponse); testAction( - actions.fetchLists, + actions.fetchIssueLists, {}, state, [ @@ -152,6 +161,23 @@ describe('fetchLists', () => { ); }); + it('should commit mutations RECEIVE_BOARD_LISTS_FAILURE on failure', (done) => { + jest.spyOn(gqlClient, 'query').mockResolvedValue(Promise.reject()); + + testAction( + actions.fetchIssueLists, + {}, + state, + [ + { + type: types.RECEIVE_BOARD_LISTS_FAILURE, + }, + ], + [], + done, + ); + }); + it('dispatch createList action when backlog list does not exist and is not hidden', (done) => { queryResponse = { data: { @@ -168,7 +194,7 @@ describe('fetchLists', () => { jest.spyOn(gqlClient, 'query').mockResolvedValue(queryResponse); testAction( - actions.fetchLists, + actions.fetchIssueLists, {}, state, [ @@ -490,7 +516,7 @@ describe('removeList', () => { }); }); -describe('fetchIssuesForList', () => { +describe('fetchItemsForList', () => { const listId = mockLists[0].id; const state = { @@ -533,21 +559,21 @@ describe('fetchIssuesForList', () => { [listId]: pageInfo, }; - it('should commit mutations REQUEST_ISSUES_FOR_LIST and RECEIVE_ISSUES_FOR_LIST_SUCCESS on success', (done) => { + it('should commit mutations REQUEST_ITEMS_FOR_LIST and RECEIVE_ITEMS_FOR_LIST_SUCCESS on success', (done) => { jest.spyOn(gqlClient, 'query').mockResolvedValue(queryResponse); testAction( - actions.fetchIssuesForList, + actions.fetchItemsForList, { listId }, state, [ { - type: types.REQUEST_ISSUES_FOR_LIST, + type: types.REQUEST_ITEMS_FOR_LIST, payload: { listId, fetchNext: false }, }, { - type: types.RECEIVE_ISSUES_FOR_LIST_SUCCESS, - payload: { listIssues: formattedIssues, listPageInfo, listId }, + type: types.RECEIVE_ITEMS_FOR_LIST_SUCCESS, + payload: { listItems: formattedIssues, listPageInfo, listId }, }, ], [], @@ -555,19 +581,19 @@ describe('fetchIssuesForList', () => { ); }); - it('should commit mutations REQUEST_ISSUES_FOR_LIST and RECEIVE_ISSUES_FOR_LIST_FAILURE on failure', (done) => { + it('should commit mutations REQUEST_ITEMS_FOR_LIST and RECEIVE_ITEMS_FOR_LIST_FAILURE on failure', (done) => { jest.spyOn(gqlClient, 'query').mockResolvedValue(Promise.reject()); testAction( - actions.fetchIssuesForList, + actions.fetchItemsForList, { listId }, state, [ { - type: types.REQUEST_ISSUES_FOR_LIST, + type: types.REQUEST_ITEMS_FOR_LIST, payload: { listId, fetchNext: false }, }, - { type: types.RECEIVE_ISSUES_FOR_LIST_FAILURE, payload: listId }, + { type: types.RECEIVE_ITEMS_FOR_LIST_FAILURE, payload: listId }, ], [], done, @@ -598,8 +624,8 @@ describe('moveIssue', () => { boardType: 'group', disabled: false, boardLists: mockLists, - issuesByListId: listIssues, - issues, + boardItemsByListId: listIssues, + boardItems: issues, }; it('should commit MOVE_ISSUE mutation and MOVE_ISSUE_SUCCESS mutation when successful', (done) => { @@ -879,7 +905,7 @@ describe('addListIssue', () => { }); describe('setActiveIssueLabels', () => { - const state = { issues: { [mockIssue.id]: mockIssue } }; + const state = { boardItems: { [mockIssue.id]: mockIssue } }; const getters = { activeIssue: mockIssue }; const testLabelIds = labels.map((label) => label.id); const input = { @@ -924,7 +950,7 @@ describe('setActiveIssueLabels', () => { }); describe('setActiveIssueDueDate', () => { - const state = { issues: { [mockIssue.id]: mockIssue } }; + const state = { boardItems: { [mockIssue.id]: mockIssue } }; const getters = { activeIssue: mockIssue }; const testDueDate = '2020-02-20'; const input = { @@ -975,7 +1001,7 @@ describe('setActiveIssueDueDate', () => { }); describe('setActiveIssueSubscribed', () => { - const state = { issues: { [mockActiveIssue.id]: mockActiveIssue } }; + const state = { boardItems: { [mockActiveIssue.id]: mockActiveIssue } }; const getters = { activeIssue: mockActiveIssue }; const subscribedState = true; const input = { @@ -1026,7 +1052,7 @@ describe('setActiveIssueSubscribed', () => { }); describe('setActiveIssueMilestone', () => { - const state = { issues: { [mockIssue.id]: mockIssue } }; + const state = { boardItems: { [mockIssue.id]: mockIssue } }; const getters = { activeIssue: mockIssue }; const testMilestone = { ...mockMilestone, @@ -1080,7 +1106,7 @@ describe('setActiveIssueMilestone', () => { }); describe('setActiveIssueTitle', () => { - const state = { issues: { [mockIssue.id]: mockIssue } }; + const state = { boardItems: { [mockIssue.id]: mockIssue } }; const getters = { activeIssue: mockIssue }; const testTitle = 'Test Title'; const input = { @@ -1220,6 +1246,7 @@ describe('setSelectedProject', () => { describe('toggleBoardItemMultiSelection', () => { const boardItem = mockIssue; + const boardItem2 = mockIssue2; it('should commit mutation ADD_BOARD_ITEM_TO_SELECTION if item is not on selection state', () => { testAction( @@ -1250,6 +1277,66 @@ describe('toggleBoardItemMultiSelection', () => { [], ); }); + + it('should additionally commit mutation ADD_BOARD_ITEM_TO_SELECTION for active issue and dispatch unsetActiveId', () => { + testAction( + actions.toggleBoardItemMultiSelection, + boardItem2, + { activeId: mockActiveIssue.id, activeIssue: mockActiveIssue, selectedBoardItems: [] }, + [ + { + type: types.ADD_BOARD_ITEM_TO_SELECTION, + payload: mockActiveIssue, + }, + { + type: types.ADD_BOARD_ITEM_TO_SELECTION, + payload: boardItem2, + }, + ], + [{ type: 'unsetActiveId' }], + ); + }); +}); + +describe('resetBoardItemMultiSelection', () => { + it('should commit mutation RESET_BOARD_ITEM_SELECTION', () => { + testAction({ + action: actions.resetBoardItemMultiSelection, + state: { selectedBoardItems: [mockIssue] }, + expectedMutations: [ + { + type: types.RESET_BOARD_ITEM_SELECTION, + }, + ], + }); + }); +}); + +describe('toggleBoardItem', () => { + it('should dispatch resetBoardItemMultiSelection and unsetActiveId when boardItem is the active item', () => { + testAction({ + action: actions.toggleBoardItem, + payload: { boardItem: mockIssue }, + state: { + activeId: mockIssue.id, + }, + expectedActions: [{ type: 'resetBoardItemMultiSelection' }, { type: 'unsetActiveId' }], + }); + }); + + it('should dispatch resetBoardItemMultiSelection and setActiveId when boardItem is not the active item', () => { + testAction({ + action: actions.toggleBoardItem, + payload: { boardItem: mockIssue }, + state: { + activeId: inactiveId, + }, + expectedActions: [ + { type: 'resetBoardItemMultiSelection' }, + { type: 'setActiveId', payload: { id: mockIssue.id, sidebarType: ISSUABLE } }, + ], + }); + }); }); describe('fetchBacklog', () => { diff --git a/spec/frontend/boards/stores/getters_spec.js b/spec/frontend/boards/stores/getters_spec.js index d5a19bf613f..d030b34ef80 100644 --- a/spec/frontend/boards/stores/getters_spec.js +++ b/spec/frontend/boards/stores/getters_spec.js @@ -38,15 +38,15 @@ describe('Boards - Getters', () => { }); }); - describe('getIssueById', () => { - const state = { issues: { 1: 'issue' } }; + describe('getBoardItemById', () => { + const state = { boardItems: { 1: 'issue' } }; it.each` id | expected ${'1'} | ${'issue'} ${''} | ${{}} `('returns $expected when $id is passed to state', ({ id, expected }) => { - expect(getters.getIssueById(state)(id)).toEqual(expected); + expect(getters.getBoardItemById(state)(id)).toEqual(expected); }); }); @@ -56,7 +56,7 @@ describe('Boards - Getters', () => { ${'1'} | ${'issue'} ${''} | ${{}} `('returns $expected when $id is passed to state', ({ id, expected }) => { - const state = { issues: { 1: 'issue' }, activeId: id }; + const state = { boardItems: { 1: 'issue' }, activeId: id }; expect(getters.activeIssue(state)).toEqual(expected); }); @@ -94,17 +94,18 @@ describe('Boards - Getters', () => { }); }); - describe('getIssuesByList', () => { + describe('getBoardItemsByList', () => { const boardsState = { - issuesByListId: mockIssuesByListId, - issues, + boardItemsByListId: mockIssuesByListId, + boardItems: issues, }; it('returns issues for a given listId', () => { - const getIssueById = (issueId) => [mockIssue, mockIssue2].find(({ id }) => id === issueId); + const getBoardItemById = (issueId) => + [mockIssue, mockIssue2].find(({ id }) => id === issueId); - expect(getters.getIssuesByList(boardsState, { getIssueById })('gid://gitlab/List/2')).toEqual( - mockIssues, - ); + expect( + getters.getBoardItemsByList(boardsState, { getBoardItemById })('gid://gitlab/List/2'), + ).toEqual(mockIssues); }); }); diff --git a/spec/frontend/boards/stores/mutations_spec.js b/spec/frontend/boards/stores/mutations_spec.js index 9423f2ed583..2d7b80a997a 100644 --- a/spec/frontend/boards/stores/mutations_spec.js +++ b/spec/frontend/boards/stores/mutations_spec.js @@ -37,6 +37,7 @@ describe('Board Store Mutations', () => { const boardConfig = { milestoneTitle: 'Milestone 1', }; + const isEpicBoard = true; mutations[types.SET_INITIAL_BOARD_DATA](state, { boardId, @@ -44,6 +45,7 @@ describe('Board Store Mutations', () => { boardType, disabled, boardConfig, + isEpicBoard, }); expect(state.boardId).toEqual(boardId); @@ -51,6 +53,7 @@ describe('Board Store Mutations', () => { expect(state.boardType).toEqual(boardType); expect(state.disabled).toEqual(disabled); expect(state.boardConfig).toEqual(boardConfig); + expect(state.isEpicBoard).toEqual(isEpicBoard); }); }); @@ -219,24 +222,24 @@ describe('Board Store Mutations', () => { }); describe('RESET_ISSUES', () => { - it('should remove issues from issuesByListId state', () => { - const issuesByListId = { + it('should remove issues from boardItemsByListId state', () => { + const boardItemsByListId = { 'gid://gitlab/List/1': [mockIssue.id], }; state = { ...state, - issuesByListId, + boardItemsByListId, }; mutations[types.RESET_ISSUES](state); - expect(state.issuesByListId).toEqual({ 'gid://gitlab/List/1': [] }); + expect(state.boardItemsByListId).toEqual({ 'gid://gitlab/List/1': [] }); }); }); - describe('RECEIVE_ISSUES_FOR_LIST_SUCCESS', () => { - it('updates issuesByListId and issues on state', () => { + describe('RECEIVE_ITEMS_FOR_LIST_SUCCESS', () => { + it('updates boardItemsByListId and issues on state', () => { const listIssues = { 'gid://gitlab/List/1': [mockIssue.id], }; @@ -246,10 +249,10 @@ describe('Board Store Mutations', () => { state = { ...state, - issuesByListId: { + boardItemsByListId: { 'gid://gitlab/List/1': [], }, - issues: {}, + boardItems: {}, boardLists: initialBoardListsState, }; @@ -260,18 +263,18 @@ describe('Board Store Mutations', () => { }, }; - mutations.RECEIVE_ISSUES_FOR_LIST_SUCCESS(state, { - listIssues: { listData: listIssues, issues }, + mutations.RECEIVE_ITEMS_FOR_LIST_SUCCESS(state, { + listItems: { listData: listIssues, boardItems: issues }, listPageInfo, listId: 'gid://gitlab/List/1', }); - expect(state.issuesByListId).toEqual(listIssues); - expect(state.issues).toEqual(issues); + expect(state.boardItemsByListId).toEqual(listIssues); + expect(state.boardItems).toEqual(issues); }); }); - describe('RECEIVE_ISSUES_FOR_LIST_FAILURE', () => { + describe('RECEIVE_ITEMS_FOR_LIST_FAILURE', () => { it('sets error message', () => { state = { ...state, @@ -281,7 +284,7 @@ describe('Board Store Mutations', () => { const listId = 'gid://gitlab/List/1'; - mutations.RECEIVE_ISSUES_FOR_LIST_FAILURE(state, listId); + mutations.RECEIVE_ITEMS_FOR_LIST_FAILURE(state, listId); expect(state.error).toEqual( 'An error occurred while fetching the board issues. Please reload the page.', @@ -303,7 +306,7 @@ describe('Board Store Mutations', () => { state = { ...state, error: undefined, - issues: { + boardItems: { ...issue, }, }; @@ -317,7 +320,7 @@ describe('Board Store Mutations', () => { value, }); - expect(state.issues[issueId]).toEqual({ ...issue[issueId], id: '2' }); + expect(state.boardItems[issueId]).toEqual({ ...issue[issueId], id: '2' }); }); }); @@ -343,7 +346,7 @@ describe('Board Store Mutations', () => { }); describe('MOVE_ISSUE', () => { - it('updates issuesByListId, moving issue between lists', () => { + it('updates boardItemsByListId, moving issue between lists', () => { const listIssues = { 'gid://gitlab/List/1': [mockIssue.id, mockIssue2.id], 'gid://gitlab/List/2': [], @@ -356,9 +359,9 @@ describe('Board Store Mutations', () => { state = { ...state, - issuesByListId: listIssues, + boardItemsByListId: listIssues, boardLists: initialBoardListsState, - issues, + boardItems: issues, }; mutations.MOVE_ISSUE(state, { @@ -372,7 +375,7 @@ describe('Board Store Mutations', () => { 'gid://gitlab/List/2': [mockIssue2.id], }; - expect(state.issuesByListId).toEqual(updatedListIssues); + expect(state.boardItemsByListId).toEqual(updatedListIssues); }); }); @@ -384,19 +387,19 @@ describe('Board Store Mutations', () => { state = { ...state, - issues, + boardItems: issues, }; mutations.MOVE_ISSUE_SUCCESS(state, { issue: rawIssue, }); - expect(state.issues).toEqual({ 436: { ...mockIssue, id: 436 } }); + expect(state.boardItems).toEqual({ 436: { ...mockIssue, id: 436 } }); }); }); describe('MOVE_ISSUE_FAILURE', () => { - it('updates issuesByListId, reverting moving issue between lists, and sets error message', () => { + it('updates boardItemsByListId, reverting moving issue between lists, and sets error message', () => { const listIssues = { 'gid://gitlab/List/1': [mockIssue.id], 'gid://gitlab/List/2': [mockIssue2.id], @@ -404,7 +407,7 @@ describe('Board Store Mutations', () => { state = { ...state, - issuesByListId: listIssues, + boardItemsByListId: listIssues, boardLists: initialBoardListsState, }; @@ -420,7 +423,7 @@ describe('Board Store Mutations', () => { 'gid://gitlab/List/2': [], }; - expect(state.issuesByListId).toEqual(updatedListIssues); + expect(state.boardItemsByListId).toEqual(updatedListIssues); expect(state.error).toEqual('An error occurred while moving the issue. Please try again.'); }); }); @@ -446,7 +449,7 @@ describe('Board Store Mutations', () => { }); describe('ADD_ISSUE_TO_LIST', () => { - it('adds issue to issues state and issue id in list in issuesByListId', () => { + it('adds issue to issues state and issue id in list in boardItemsByListId', () => { const listIssues = { 'gid://gitlab/List/1': [mockIssue.id], }; @@ -456,8 +459,8 @@ describe('Board Store Mutations', () => { state = { ...state, - issuesByListId: listIssues, - issues, + boardItemsByListId: listIssues, + boardItems: issues, boardLists: initialBoardListsState, }; @@ -465,14 +468,14 @@ describe('Board Store Mutations', () => { mutations.ADD_ISSUE_TO_LIST(state, { list: mockLists[0], issue: mockIssue2 }); - expect(state.issuesByListId['gid://gitlab/List/1']).toContain(mockIssue2.id); - expect(state.issues[mockIssue2.id]).toEqual(mockIssue2); + expect(state.boardItemsByListId['gid://gitlab/List/1']).toContain(mockIssue2.id); + expect(state.boardItems[mockIssue2.id]).toEqual(mockIssue2); expect(state.boardLists['gid://gitlab/List/1'].issuesCount).toBe(2); }); }); describe('ADD_ISSUE_TO_LIST_FAILURE', () => { - it('removes issue id from list in issuesByListId and sets error message', () => { + it('removes issue id from list in boardItemsByListId and sets error message', () => { const listIssues = { 'gid://gitlab/List/1': [mockIssue.id, mockIssue2.id], }; @@ -483,20 +486,20 @@ describe('Board Store Mutations', () => { state = { ...state, - issuesByListId: listIssues, - issues, + boardItemsByListId: listIssues, + boardItems: issues, boardLists: initialBoardListsState, }; mutations.ADD_ISSUE_TO_LIST_FAILURE(state, { list: mockLists[0], issueId: mockIssue2.id }); - expect(state.issuesByListId['gid://gitlab/List/1']).not.toContain(mockIssue2.id); + expect(state.boardItemsByListId['gid://gitlab/List/1']).not.toContain(mockIssue2.id); expect(state.error).toBe('An error occurred while creating the issue. Please try again.'); }); }); describe('REMOVE_ISSUE_FROM_LIST', () => { - it('removes issue id from list in issuesByListId and deletes issue from state', () => { + it('removes issue id from list in boardItemsByListId and deletes issue from state', () => { const listIssues = { 'gid://gitlab/List/1': [mockIssue.id, mockIssue2.id], }; @@ -507,15 +510,15 @@ describe('Board Store Mutations', () => { state = { ...state, - issuesByListId: listIssues, - issues, + boardItemsByListId: listIssues, + boardItems: issues, boardLists: initialBoardListsState, }; mutations.ADD_ISSUE_TO_LIST_FAILURE(state, { list: mockLists[0], issueId: mockIssue2.id }); - expect(state.issuesByListId['gid://gitlab/List/1']).not.toContain(mockIssue2.id); - expect(state.issues).not.toContain(mockIssue2); + expect(state.boardItemsByListId['gid://gitlab/List/1']).not.toContain(mockIssue2.id); + expect(state.boardItems).not.toContain(mockIssue2); }); }); @@ -607,14 +610,21 @@ describe('Board Store Mutations', () => { describe('REMOVE_BOARD_ITEM_FROM_SELECTION', () => { it('Should remove boardItem to selectedBoardItems state', () => { - state = { - ...state, - selectedBoardItems: [mockIssue], - }; + state.selectedBoardItems = [mockIssue]; mutations[types.REMOVE_BOARD_ITEM_FROM_SELECTION](state, mockIssue); expect(state.selectedBoardItems).toEqual([]); }); }); + + describe('RESET_BOARD_ITEM_SELECTION', () => { + it('Should reset selectedBoardItems state', () => { + state.selectedBoardItems = [mockIssue]; + + mutations[types.RESET_BOARD_ITEM_SELECTION](state, mockIssue); + + expect(state.selectedBoardItems).toEqual([]); + }); + }); }); diff --git a/spec/frontend/clusters_list/components/clusters_spec.js b/spec/frontend/clusters_list/components/clusters_spec.js index f398d7a0965..941a3adb625 100644 --- a/spec/frontend/clusters_list/components/clusters_spec.js +++ b/spec/frontend/clusters_list/components/clusters_spec.js @@ -4,12 +4,12 @@ import { GlDeprecatedSkeletonLoading as GlSkeletonLoading, GlTable, } from '@gitlab/ui'; +import * as Sentry from '@sentry/browser'; import { mount } from '@vue/test-utils'; import MockAdapter from 'axios-mock-adapter'; import Clusters from '~/clusters_list/components/clusters.vue'; import ClusterStore from '~/clusters_list/store'; import axios from '~/lib/utils/axios_utils'; -import * as Sentry from '~/sentry/wrapper'; import { apiData } from '../mock_data'; describe('Clusters', () => { diff --git a/spec/frontend/clusters_list/store/actions_spec.js b/spec/frontend/clusters_list/store/actions_spec.js index 00b998166aa..b2ef3c2138a 100644 --- a/spec/frontend/clusters_list/store/actions_spec.js +++ b/spec/frontend/clusters_list/store/actions_spec.js @@ -1,3 +1,4 @@ +import * as Sentry from '@sentry/browser'; import MockAdapter from 'axios-mock-adapter'; import testAction from 'helpers/vuex_action_helper'; import waitForPromises from 'helpers/wait_for_promises'; @@ -7,7 +8,6 @@ import * as types from '~/clusters_list/store/mutation_types'; import { deprecatedCreateFlash as flashError } from '~/flash'; import axios from '~/lib/utils/axios_utils'; import Poll from '~/lib/utils/poll'; -import * as Sentry from '~/sentry/wrapper'; import { apiData } from '../mock_data'; jest.mock('~/flash.js'); diff --git a/spec/frontend/commit/pipelines/pipelines_spec.js b/spec/frontend/commit/pipelines/pipelines_spec.js index f8bdd00f5da..3d6debda520 100644 --- a/spec/frontend/commit/pipelines/pipelines_spec.js +++ b/spec/frontend/commit/pipelines/pipelines_spec.js @@ -13,10 +13,8 @@ describe('Pipelines table in Commits and Merge requests', () => { let vm; const props = { endpoint: 'endpoint.json', - helpPagePath: 'foo', emptyStateSvgPath: 'foo', errorStateSvgPath: 'foo', - autoDevopsHelpPath: 'foo', }; preloadFixtures(jsonFixtureName); diff --git a/spec/frontend/design_management/components/design_notes/design_discussion_spec.js b/spec/frontend/design_management/components/design_notes/design_discussion_spec.js index 92e188f4bcc..efadb9b717d 100644 --- a/spec/frontend/design_management/components/design_notes/design_discussion_spec.js +++ b/spec/frontend/design_management/components/design_notes/design_discussion_spec.js @@ -93,7 +93,7 @@ describe('Design discussions component', () => { }); it('does not render a checkbox in reply form', () => { - findReplyPlaceholder().vm.$emit('onClick'); + findReplyPlaceholder().vm.$emit('focus'); return wrapper.vm.$nextTick().then(() => { expect(findResolveCheckbox().exists()).toBe(false); @@ -124,7 +124,7 @@ describe('Design discussions component', () => { }); it('renders a checkbox with Resolve thread text in reply form', () => { - findReplyPlaceholder().vm.$emit('onClick'); + findReplyPlaceholder().vm.$emit('focus'); wrapper.setProps({ discussionWithOpenForm: defaultMockDiscussion.id }); return wrapper.vm.$nextTick().then(() => { @@ -193,7 +193,7 @@ describe('Design discussions component', () => { }); it('renders a checkbox with Unresolve thread text in reply form', () => { - findReplyPlaceholder().vm.$emit('onClick'); + findReplyPlaceholder().vm.$emit('focus'); wrapper.setProps({ discussionWithOpenForm: defaultMockDiscussion.id }); return wrapper.vm.$nextTick().then(() => { @@ -205,7 +205,7 @@ describe('Design discussions component', () => { it('hides reply placeholder and opens form on placeholder click', () => { createComponent(); - findReplyPlaceholder().vm.$emit('onClick'); + findReplyPlaceholder().vm.$emit('focus'); wrapper.setProps({ discussionWithOpenForm: defaultMockDiscussion.id }); return wrapper.vm.$nextTick().then(() => { @@ -307,7 +307,7 @@ describe('Design discussions component', () => { it('emits openForm event on opening the form', () => { createComponent(); - findReplyPlaceholder().vm.$emit('onClick'); + findReplyPlaceholder().vm.$emit('focus'); expect(wrapper.emitted('open-form')).toBeTruthy(); }); diff --git a/spec/frontend/design_management/components/list/item_spec.js b/spec/frontend/design_management/components/list/item_spec.js index caf0f8bb5bc..58636ece91e 100644 --- a/spec/frontend/design_management/components/list/item_spec.js +++ b/spec/frontend/design_management/components/list/item_spec.js @@ -8,7 +8,7 @@ const localVue = createLocalVue(); localVue.use(VueRouter); const router = new VueRouter(); -// Referenced from: doc/api/graphql/reference/gitlab_schema.graphql:DesignVersionEvent +// Referenced from: gitlab_schema.graphql:DesignVersionEvent const DESIGN_VERSION_EVENT = { CREATION: 'CREATION', DELETION: 'DELETION', diff --git a/spec/frontend/diffs/store/utils_spec.js b/spec/frontend/diffs/store/utils_spec.js index dcb58f7a380..6af38590610 100644 --- a/spec/frontend/diffs/store/utils_spec.js +++ b/spec/frontend/diffs/store/utils_spec.js @@ -275,24 +275,28 @@ describe('DiffsStoreUtils', () => { describe('trimFirstCharOfLineContent', () => { it('trims the line when it starts with a space', () => { + // eslint-disable-next-line import/no-deprecated expect(utils.trimFirstCharOfLineContent({ rich_text: ' diff' })).toEqual({ rich_text: 'diff', }); }); it('trims the line when it starts with a +', () => { + // eslint-disable-next-line import/no-deprecated expect(utils.trimFirstCharOfLineContent({ rich_text: '+diff' })).toEqual({ rich_text: 'diff', }); }); it('trims the line when it starts with a -', () => { + // eslint-disable-next-line import/no-deprecated expect(utils.trimFirstCharOfLineContent({ rich_text: '-diff' })).toEqual({ rich_text: 'diff', }); }); it('does not trims the line when it starts with a letter', () => { + // eslint-disable-next-line import/no-deprecated expect(utils.trimFirstCharOfLineContent({ rich_text: 'diff' })).toEqual({ rich_text: 'diff', }); @@ -303,12 +307,14 @@ describe('DiffsStoreUtils', () => { rich_text: ' diff', }; + // eslint-disable-next-line import/no-deprecated utils.trimFirstCharOfLineContent(lineObj); expect(lineObj).toEqual({ rich_text: ' diff' }); }); it('handles a undefined or null parameter', () => { + // eslint-disable-next-line import/no-deprecated expect(utils.trimFirstCharOfLineContent()).toEqual({}); }); }); diff --git a/spec/frontend/experiment_tracking_spec.js b/spec/frontend/experiment_tracking_spec.js new file mode 100644 index 00000000000..ac05419cecd --- /dev/null +++ b/spec/frontend/experiment_tracking_spec.js @@ -0,0 +1,77 @@ +import ExperimentTracking from '~/experiment_tracking'; +import Tracking from '~/tracking'; + +jest.mock('~/tracking'); + +const oldGon = window.gon; + +let experimentTracking; +let label; +let newGon = {}; + +const setup = () => { + window.gon = newGon; + experimentTracking = new ExperimentTracking('sidebar_experiment', label); +}; + +beforeEach(() => { + document.body.dataset.page = 'issues-page'; +}); + +afterEach(() => { + window.gon = oldGon; + Tracking.mockClear(); + label = undefined; +}); + +describe('event', () => { + describe('when experiment data exists for experimentName', () => { + beforeEach(() => { + newGon = { global: { experiment: { sidebar_experiment: 'experiment-data' } } }; + setup(); + }); + + describe('when providing options', () => { + label = { label: 'sidebar-drawer' }; + + it('passes them to the tracking call', () => { + experimentTracking.event('click_sidebar_close'); + + expect(Tracking.event).toHaveBeenCalledTimes(1); + expect(Tracking.event).toHaveBeenCalledWith('issues-page', 'click_sidebar_close', { + label: 'sidebar-drawer', + context: { + schema: 'iglu:com.gitlab/gitlab_experiment/jsonschema/1-0-0', + data: 'experiment-data', + }, + }); + }); + }); + + it('tracks with the correct context', () => { + experimentTracking.event('click_sidebar_trigger'); + + expect(Tracking.event).toHaveBeenCalledTimes(1); + expect(Tracking.event).toHaveBeenCalledWith('issues-page', 'click_sidebar_trigger', { + label: undefined, + context: { + schema: 'iglu:com.gitlab/gitlab_experiment/jsonschema/1-0-0', + data: 'experiment-data', + }, + }); + }); + }); + + describe('when experiment data does NOT exists for the experimentName', () => { + beforeEach(() => { + newGon = { global: { experiment: { unrelated_experiment: 'not happening' } } }; + setup(); + }); + + it('does not track', () => { + experimentTracking.event('click_sidebar_close'); + + expect(Tracking.event).not.toHaveBeenCalled(); + }); + }); +}); diff --git a/spec/frontend/feature_flags/components/edit_feature_flag_spec.js b/spec/frontend/feature_flags/components/edit_feature_flag_spec.js index e2717b98ea9..2fd8e524e7a 100644 --- a/spec/frontend/feature_flags/components/edit_feature_flag_spec.js +++ b/spec/frontend/feature_flags/components/edit_feature_flag_spec.js @@ -150,5 +150,12 @@ describe('Edit feature flag form', () => { label: 'feature_flag_toggle', }); }); + + it('should render the toggle with a visually hidden label', () => { + expect(wrapper.find(GlToggle).props()).toMatchObject({ + label: 'Feature flag status', + labelPosition: 'hidden', + }); + }); }); }); diff --git a/spec/frontend/fixtures/pipelines.rb b/spec/frontend/fixtures/pipelines.rb index b4b7f0e332f..2a538352abe 100644 --- a/spec/frontend/fixtures/pipelines.rb +++ b/spec/frontend/fixtures/pipelines.rb @@ -5,16 +5,22 @@ require 'spec_helper' RSpec.describe Projects::PipelinesController, '(JavaScript fixtures)', type: :controller do include JavaScriptFixturesHelpers - let(:namespace) { create(:namespace, name: 'frontend-fixtures' )} - let(:project) { create(:project, :repository, namespace: namespace, path: 'pipelines-project') } - let(:commit) { create(:commit, project: project) } - let(:commit_without_author) { RepoHelpers.another_sample_commit } - let!(:user) { create(:user, developer_projects: [project], email: commit.author_email) } - let!(:pipeline) { create(:ci_pipeline, project: project, sha: commit.id, user: user) } + let_it_be(:namespace) { create(:namespace, name: 'frontend-fixtures' )} + let_it_be(:project) { create(:project, :repository, namespace: namespace, path: 'pipelines-project') } + + let_it_be(:commit_without_author) { RepoHelpers.another_sample_commit } let!(:pipeline_without_author) { create(:ci_pipeline, project: project, sha: commit_without_author.id) } - let!(:pipeline_without_commit) { create(:ci_pipeline, status: :success, project: project, sha: '0000') } + let!(:build_pipeline_without_author) { create(:ci_build, pipeline: pipeline_without_author, stage: 'test') } - render_views + let_it_be(:pipeline_without_commit) { create(:ci_pipeline, status: :success, project: project, sha: '0000') } + let!(:build_pipeline_without_commit) { create(:ci_build, pipeline: pipeline_without_commit, stage: 'test') } + + let(:commit) { create(:commit, project: project) } + let(:user) { create(:user, developer_projects: [project], email: commit.author_email) } + let!(:pipeline) { create(:ci_pipeline, :with_test_reports, project: project, sha: commit.id, user: user) } + let!(:build_success) { create(:ci_build, pipeline: pipeline, stage: 'build') } + let!(:build_test) { create(:ci_build, pipeline: pipeline, stage: 'test') } + let!(:build_deploy_failed) { create(:ci_build, status: :failed, pipeline: pipeline, stage: 'deploy') } before(:all) do clean_frontend_fixtures('pipelines/') @@ -32,4 +38,14 @@ RSpec.describe Projects::PipelinesController, '(JavaScript fixtures)', type: :co expect(response).to be_successful end + + it "pipelines/test_report.json" do + get :test_report, params: { + namespace_id: namespace, + project_id: project, + id: pipeline.id + }, format: :json + + expect(response).to be_successful + end end diff --git a/spec/frontend/fixtures/test_report.rb b/spec/frontend/fixtures/test_report.rb deleted file mode 100644 index 3d09078ba68..00000000000 --- a/spec/frontend/fixtures/test_report.rb +++ /dev/null @@ -1,29 +0,0 @@ -# frozen_string_literal: true - -require "spec_helper" - -RSpec.describe Projects::PipelinesController, "(JavaScript fixtures)", type: :controller do - include JavaScriptFixturesHelpers - - let(:namespace) { create(:namespace, name: "frontend-fixtures") } - let(:project) { create(:project, :repository, namespace: namespace, path: "pipelines-project") } - let(:commit) { create(:commit, project: project) } - let(:user) { create(:user, developer_projects: [project], email: commit.author_email) } - let(:pipeline) { create(:ci_pipeline, :with_test_reports, project: project, user: user) } - - render_views - - before do - sign_in(user) - end - - it "pipelines/test_report.json" do - get :test_report, params: { - namespace_id: project.namespace, - project_id: project, - id: pipeline.id - }, format: :json - - expect(response).to be_successful - end -end diff --git a/spec/frontend/ide/components/pipelines/__snapshots__/list_spec.js.snap b/spec/frontend/ide/components/pipelines/__snapshots__/list_spec.js.snap index efa58a4a47b..194a619c4aa 100644 --- a/spec/frontend/ide/components/pipelines/__snapshots__/list_spec.js.snap +++ b/spec/frontend/ide/components/pipelines/__snapshots__/list_spec.js.snap @@ -10,7 +10,6 @@ exports[`IDE pipelines list when loaded renders empty state when no latestPipeli cansetci="true" class="mb-auto mt-auto" emptystatesvgpath="http://test.host" - helppagepath="http://test.host" /> </div> `; diff --git a/spec/frontend/ide/components/pipelines/list_spec.js b/spec/frontend/ide/components/pipelines/list_spec.js index 58d8c0629fb..a917f4c0230 100644 --- a/spec/frontend/ide/components/pipelines/list_spec.js +++ b/spec/frontend/ide/components/pipelines/list_spec.js @@ -19,7 +19,6 @@ describe('IDE pipelines list', () => { let wrapper; const defaultState = { - links: { ciHelpPagePath: TEST_HOST }, pipelinesEmptyStateSvgPath: TEST_HOST, }; const defaultPipelinesState = { diff --git a/spec/frontend/ide/components/repo_editor_spec.js b/spec/frontend/ide/components/repo_editor_spec.js index 1985feb1615..a3b327343e5 100644 --- a/spec/frontend/ide/components/repo_editor_spec.js +++ b/spec/frontend/ide/components/repo_editor_spec.js @@ -1,11 +1,15 @@ +import { shallowMount } from '@vue/test-utils'; import MockAdapter from 'axios-mock-adapter'; -import { Range } from 'monaco-editor'; +import { editor as monacoEditor, Range } from 'monaco-editor'; import Vue from 'vue'; import Vuex from 'vuex'; import '~/behaviors/markdown/render_gfm'; -import { createComponentWithStore } from 'helpers/vue_mount_component_helper'; import waitForPromises from 'helpers/wait_for_promises'; import waitUsingRealTimer from 'helpers/wait_using_real_timer'; +import { exampleConfigs, exampleFiles } from 'jest/ide/lib/editorconfig/mock_data'; +import { EDITOR_CODE_INSTANCE_FN, EDITOR_DIFF_INSTANCE_FN } from '~/editor/constants'; +import EditorLite from '~/editor/editor_lite'; +import { EditorWebIdeExtension } from '~/editor/extensions/editor_lite_webide_ext'; import RepoEditor from '~/ide/components/repo_editor.vue'; import { leftSidebarViews, @@ -13,733 +17,723 @@ import { FILE_VIEW_MODE_PREVIEW, viewerTypes, } from '~/ide/constants'; -import Editor from '~/ide/lib/editor'; +import ModelManager from '~/ide/lib/common/model_manager'; import service from '~/ide/services'; import { createStoreOptions } from '~/ide/stores'; import axios from '~/lib/utils/axios_utils'; +import ContentViewer from '~/vue_shared/components/content_viewer/content_viewer.vue'; import { file } from '../helpers'; -import { exampleConfigs, exampleFiles } from '../lib/editorconfig/mock_data'; + +const defaultFileProps = { + ...file('file.txt'), + content: 'hello world', + active: true, + tempFile: true, +}; +const createActiveFile = (props) => { + return { + ...defaultFileProps, + ...props, + }; +}; + +const dummyFile = { + markdown: (() => + createActiveFile({ + projectId: 'namespace/project', + path: 'sample.md', + name: 'sample.md', + }))(), + binary: (() => + createActiveFile({ + name: 'file.dat', + content: '🐱', // non-ascii binary content, + }))(), + empty: (() => + createActiveFile({ + tempFile: false, + content: '', + raw: '', + }))(), +}; + +const prepareStore = (state, activeFile) => { + const localState = { + openFiles: [activeFile], + projects: { + 'gitlab-org/gitlab': { + branches: { + master: { + name: 'master', + commit: { + id: 'abcdefgh', + }, + }, + }, + }, + }, + currentProjectId: 'gitlab-org/gitlab', + currentBranchId: 'master', + entries: { + [activeFile.path]: activeFile, + }, + }; + const storeOptions = createStoreOptions(); + return new Vuex.Store({ + ...createStoreOptions(), + state: { + ...storeOptions.state, + ...localState, + ...state, + }, + }); +}; describe('RepoEditor', () => { + let wrapper; let vm; - let store; + let createInstanceSpy; + let createDiffInstanceSpy; + let createModelSpy; const waitForEditorSetup = () => new Promise((resolve) => { vm.$once('editorSetup', resolve); }); - const createComponent = () => { - if (vm) { - throw new Error('vm already exists'); - } - vm = createComponentWithStore(Vue.extend(RepoEditor), store, { - file: store.state.openFiles[0], + const createComponent = async ({ state = {}, activeFile = defaultFileProps } = {}) => { + const store = prepareStore(state, activeFile); + wrapper = shallowMount(RepoEditor, { + store, + propsData: { + file: store.state.openFiles[0], + }, + mocks: { + ContentViewer, + }, }); - + await waitForPromises(); + vm = wrapper.vm; jest.spyOn(vm, 'getFileData').mockResolvedValue(); jest.spyOn(vm, 'getRawFileData').mockResolvedValue(); - - vm.$mount(); }; - const createOpenFile = (path) => { - const origFile = store.state.openFiles[0]; - const newFile = { ...origFile, path, key: path, name: 'myfile.txt', content: 'hello world' }; - - store.state.entries[path] = newFile; - - store.state.openFiles = [newFile]; - }; + const findEditor = () => wrapper.find('[data-testid="editor-container"]'); + const findTabs = () => wrapper.findAll('.ide-mode-tabs .nav-links li'); + const findPreviewTab = () => wrapper.find('[data-testid="preview-tab"]'); beforeEach(() => { - const f = { - ...file('file.txt'), - content: 'hello world', - }; - - const storeOptions = createStoreOptions(); - store = new Vuex.Store(storeOptions); - - f.active = true; - f.tempFile = true; - - store.state.openFiles.push(f); - store.state.projects = { - 'gitlab-org/gitlab': { - branches: { - master: { - name: 'master', - commit: { - id: 'abcdefgh', - }, - }, - }, - }, - }; - store.state.currentProjectId = 'gitlab-org/gitlab'; - store.state.currentBranchId = 'master'; - - Vue.set(store.state.entries, f.path, f); + createInstanceSpy = jest.spyOn(EditorLite.prototype, EDITOR_CODE_INSTANCE_FN); + createDiffInstanceSpy = jest.spyOn(EditorLite.prototype, EDITOR_DIFF_INSTANCE_FN); + createModelSpy = jest.spyOn(monacoEditor, 'createModel'); + jest.spyOn(service, 'getFileData').mockResolvedValue(); + jest.spyOn(service, 'getRawFileData').mockResolvedValue(); }); afterEach(() => { - vm.$destroy(); - vm = null; - - Editor.editorInstance.dispose(); + jest.clearAllMocks(); + // create a new model each time, otherwise tests conflict with each other + // because of same model being used in multiple tests + // eslint-disable-next-line no-undef + monaco.editor.getModels().forEach((model) => model.dispose()); + wrapper.destroy(); + wrapper = null; }); - const findEditor = () => vm.$el.querySelector('.multi-file-editor-holder'); - const changeViewMode = (viewMode) => - store.dispatch('editor/updateFileEditor', { path: vm.file.path, data: { viewMode } }); - describe('default', () => { - beforeEach(() => { - createComponent(); - - return waitForEditorSetup(); + it.each` + boolVal | textVal + ${true} | ${'all'} + ${false} | ${'none'} + `('sets renderWhitespace to "$textVal"', async ({ boolVal, textVal } = {}) => { + await createComponent({ + state: { + renderWhitespaceInCode: boolVal, + }, + }); + expect(vm.editorOptions.renderWhitespace).toEqual(textVal); }); - it('sets renderWhitespace to `all`', () => { - vm.$store.state.renderWhitespaceInCode = true; - - expect(vm.editorOptions.renderWhitespace).toEqual('all'); + it('renders an ide container', async () => { + await createComponent(); + expect(findEditor().isVisible()).toBe(true); }); - it('sets renderWhitespace to `none`', () => { - vm.$store.state.renderWhitespaceInCode = false; + it('renders only an edit tab', async () => { + await createComponent(); + const tabs = findTabs(); - expect(vm.editorOptions.renderWhitespace).toEqual('none'); + expect(tabs).toHaveLength(1); + expect(tabs.at(0).text()).toBe('Edit'); }); + }); - it('renders an ide container', () => { - expect(vm.shouldHideEditor).toBeFalsy(); - expect(vm.showEditor).toBe(true); - expect(findEditor()).not.toHaveCss({ display: 'none' }); - }); + describe('when file is markdown', () => { + let mock; + let activeFile; - it('renders only an edit tab', (done) => { - Vue.nextTick(() => { - const tabs = vm.$el.querySelectorAll('.ide-mode-tabs .nav-links li'); + beforeEach(() => { + activeFile = dummyFile.markdown; - expect(tabs.length).toBe(1); - expect(tabs[0].textContent.trim()).toBe('Edit'); + mock = new MockAdapter(axios); - done(); + mock.onPost(/(.*)\/preview_markdown/).reply(200, { + body: `<p>${defaultFileProps.content}</p>`, }); }); - describe('when file is markdown', () => { - let mock; - - beforeEach(() => { - mock = new MockAdapter(axios); - - mock.onPost(/(.*)\/preview_markdown/).reply(200, { - body: '<p>testing 123</p>', - }); - - Vue.set(vm, 'file', { - ...vm.file, - projectId: 'namespace/project', - path: 'sample.md', - name: 'sample.md', - content: 'testing 123', - }); - - vm.$store.state.entries[vm.file.path] = vm.file; + afterEach(() => { + mock.restore(); + }); - return vm.$nextTick(); - }); + it('renders an Edit and a Preview Tab', async () => { + await createComponent({ activeFile }); + const tabs = findTabs(); - afterEach(() => { - mock.restore(); - }); + expect(tabs).toHaveLength(2); + expect(tabs.at(0).text()).toBe('Edit'); + expect(tabs.at(1).text()).toBe('Preview Markdown'); + }); - it('renders an Edit and a Preview Tab', (done) => { - Vue.nextTick(() => { - const tabs = vm.$el.querySelectorAll('.ide-mode-tabs .nav-links li'); + it('renders markdown for tempFile', async () => { + // by default files created in the spec are temp: no need for explicitly sending the param + await createComponent({ activeFile }); - expect(tabs.length).toBe(2); - expect(tabs[0].textContent.trim()).toBe('Edit'); - expect(tabs[1].textContent.trim()).toBe('Preview Markdown'); + findPreviewTab().trigger('click'); + await waitForPromises(); + expect(wrapper.find(ContentViewer).html()).toContain(defaultFileProps.content); + }); - done(); - }); + it('shows no tabs when not in Edit mode', async () => { + await createComponent({ + state: { + currentActivityView: leftSidebarViews.review.name, + }, + activeFile, }); + expect(findTabs()).toHaveLength(0); + }); + }); - it('renders markdown for tempFile', (done) => { - vm.file.tempFile = true; - - vm.$nextTick() - .then(() => { - vm.$el.querySelectorAll('.ide-mode-tabs .nav-links a')[1].click(); - }) - .then(waitForPromises) - .then(() => { - expect(vm.$el.querySelector('.preview-container').innerHTML).toContain( - '<p>testing 123</p>', - ); - }) - .then(done) - .catch(done.fail); - }); + describe('when file is binary and not raw', () => { + beforeEach(async () => { + const activeFile = dummyFile.binary; + await createComponent({ activeFile }); + }); - describe('when not in edit mode', () => { - beforeEach(async () => { - await vm.$nextTick(); + it('does not render the IDE', () => { + expect(findEditor().isVisible()).toBe(false); + }); - vm.$store.state.currentActivityView = leftSidebarViews.review.name; + it('does not create an instance', () => { + expect(createInstanceSpy).not.toHaveBeenCalled(); + expect(createDiffInstanceSpy).not.toHaveBeenCalled(); + }); + }); - return vm.$nextTick(); + describe('createEditorInstance', () => { + it.each` + viewer | diffInstance + ${viewerTypes.edit} | ${undefined} + ${viewerTypes.diff} | ${true} + ${viewerTypes.mr} | ${true} + `( + 'creates instance of correct type when viewer is $viewer', + async ({ viewer, diffInstance }) => { + await createComponent({ + state: { viewer }, }); + const isDiff = () => { + return diffInstance ? { isDiff: true } : {}; + }; + expect(createInstanceSpy).toHaveBeenCalledWith(expect.objectContaining(isDiff())); + expect(createDiffInstanceSpy).toHaveBeenCalledTimes((diffInstance && 1) || 0); + }, + ); - it('shows no tabs', () => { - expect(vm.$el.querySelectorAll('.ide-mode-tabs .nav-links a')).toHaveLength(0); + it('installs the WebIDE extension', async () => { + const extensionSpy = jest.spyOn(EditorLite, 'instanceApplyExtension'); + await createComponent(); + expect(extensionSpy).toHaveBeenCalled(); + Reflect.ownKeys(EditorWebIdeExtension.prototype) + .filter((fn) => fn !== 'constructor') + .forEach((fn) => { + expect(vm.editor[fn]).toBe(EditorWebIdeExtension.prototype[fn]); }); - }); }); + }); - describe('when open file is binary and not raw', () => { - beforeEach((done) => { - vm.file.name = 'file.dat'; - vm.file.content = '🐱'; // non-ascii binary content - jest.spyOn(vm.editor, 'createInstance').mockImplementation(); - jest.spyOn(vm.editor, 'createDiffInstance').mockImplementation(); - - vm.$nextTick(done); - }); - - it('does not render the IDE', () => { - expect(vm.shouldHideEditor).toBeTruthy(); - }); - - it('does not call createInstance', async () => { - // Mirror the act's in the `createEditorInstance` - vm.createEditorInstance(); - - await vm.$nextTick(); + describe('setupEditor', () => { + beforeEach(async () => { + await createComponent(); + }); - expect(vm.editor.createInstance).not.toHaveBeenCalled(); - expect(vm.editor.createDiffInstance).not.toHaveBeenCalled(); - }); + it('creates new model on load', () => { + // We always create two models per file to be able to build a diff of changes + expect(createModelSpy).toHaveBeenCalledTimes(2); + // The model with the most recent changes is the last one + const [content] = createModelSpy.mock.calls[1]; + expect(content).toBe(defaultFileProps.content); }); - describe('createEditorInstance', () => { - it('calls createInstance when viewer is editor', (done) => { - jest.spyOn(vm.editor, 'createInstance').mockImplementation(); + it('does not create a new model on subsequent calls to setupEditor and re-uses the already-existing model', () => { + const existingModel = vm.model; + createModelSpy.mockClear(); - vm.createEditorInstance(); + vm.setupEditor(); - vm.$nextTick(() => { - expect(vm.editor.createInstance).toHaveBeenCalled(); + expect(createModelSpy).not.toHaveBeenCalled(); + expect(vm.model).toBe(existingModel); + }); - done(); - }); - }); + it('adds callback methods', () => { + jest.spyOn(vm.editor, 'onPositionChange'); + jest.spyOn(vm.model, 'onChange'); + jest.spyOn(vm.model, 'updateOptions'); - it('calls createDiffInstance when viewer is diff', (done) => { - vm.$store.state.viewer = 'diff'; + vm.setupEditor(); - jest.spyOn(vm.editor, 'createDiffInstance').mockImplementation(); + expect(vm.editor.onPositionChange).toHaveBeenCalledTimes(1); + expect(vm.model.onChange).toHaveBeenCalledTimes(1); + expect(vm.model.updateOptions).toHaveBeenCalledWith(vm.rules); + }); - vm.createEditorInstance(); + it('updates state with the value of the model', () => { + const newContent = 'As Gregor Samsa\n awoke one morning\n'; + vm.model.setValue(newContent); - vm.$nextTick(() => { - expect(vm.editor.createDiffInstance).toHaveBeenCalled(); + vm.setupEditor(); - done(); - }); - }); + expect(vm.file.content).toBe(newContent); + }); - it('calls createDiffInstance when viewer is a merge request diff', (done) => { - vm.$store.state.viewer = 'mrdiff'; + it('sets head model as staged file', () => { + vm.modelManager.dispose(); + const addModelSpy = jest.spyOn(ModelManager.prototype, 'addModel'); - jest.spyOn(vm.editor, 'createDiffInstance').mockImplementation(); + vm.$store.state.stagedFiles.push({ ...vm.file, key: 'staged' }); + vm.file.staged = true; + vm.file.key = `unstaged-${vm.file.key}`; - vm.createEditorInstance(); + vm.setupEditor(); - vm.$nextTick(() => { - expect(vm.editor.createDiffInstance).toHaveBeenCalled(); + expect(addModelSpy).toHaveBeenCalledWith(vm.file, vm.$store.state.stagedFiles[0]); + }); + }); - done(); - }); - }); + describe('editor updateDimensions', () => { + let updateDimensionsSpy; + let updateDiffViewSpy; + beforeEach(async () => { + await createComponent(); + updateDimensionsSpy = jest.spyOn(vm.editor, 'updateDimensions'); + updateDiffViewSpy = jest.spyOn(vm.editor, 'updateDiffView').mockImplementation(); }); - describe('setupEditor', () => { - it('creates new model', () => { - jest.spyOn(vm.editor, 'createModel'); + it('calls updateDimensions only when panelResizing is false', async () => { + expect(updateDimensionsSpy).not.toHaveBeenCalled(); + expect(updateDiffViewSpy).not.toHaveBeenCalled(); + expect(vm.$store.state.panelResizing).toBe(false); // default value - Editor.editorInstance.modelManager.dispose(); + vm.$store.state.panelResizing = true; + await vm.$nextTick(); - vm.setupEditor(); + expect(updateDimensionsSpy).not.toHaveBeenCalled(); + expect(updateDiffViewSpy).not.toHaveBeenCalled(); - expect(vm.editor.createModel).toHaveBeenCalledWith(vm.file, null); - expect(vm.model).not.toBeNull(); - }); + vm.$store.state.panelResizing = false; + await vm.$nextTick(); - it('attaches model to editor', () => { - jest.spyOn(vm.editor, 'attachModel'); + expect(updateDimensionsSpy).toHaveBeenCalledTimes(1); + expect(updateDiffViewSpy).toHaveBeenCalledTimes(1); - Editor.editorInstance.modelManager.dispose(); + vm.$store.state.panelResizing = true; + await vm.$nextTick(); - vm.setupEditor(); + expect(updateDimensionsSpy).toHaveBeenCalledTimes(1); + expect(updateDiffViewSpy).toHaveBeenCalledTimes(1); + }); - expect(vm.editor.attachModel).toHaveBeenCalledWith(vm.model); - }); + it('calls updateDimensions when rightPane is toggled', async () => { + expect(updateDimensionsSpy).not.toHaveBeenCalled(); + expect(updateDiffViewSpy).not.toHaveBeenCalled(); + expect(vm.$store.state.rightPane.isOpen).toBe(false); // default value - it('attaches model to merge request editor', () => { - vm.$store.state.viewer = 'mrdiff'; - vm.file.mrChange = true; - jest.spyOn(vm.editor, 'attachMergeRequestModel').mockImplementation(); + vm.$store.state.rightPane.isOpen = true; + await vm.$nextTick(); - Editor.editorInstance.modelManager.dispose(); + expect(updateDimensionsSpy).toHaveBeenCalledTimes(1); + expect(updateDiffViewSpy).toHaveBeenCalledTimes(1); - vm.setupEditor(); + vm.$store.state.rightPane.isOpen = false; + await vm.$nextTick(); - expect(vm.editor.attachMergeRequestModel).toHaveBeenCalledWith(vm.model); - }); + expect(updateDimensionsSpy).toHaveBeenCalledTimes(2); + expect(updateDiffViewSpy).toHaveBeenCalledTimes(2); + }); + }); - it('does not attach model to merge request editor when not a MR change', () => { - vm.$store.state.viewer = 'mrdiff'; - vm.file.mrChange = false; - jest.spyOn(vm.editor, 'attachMergeRequestModel').mockImplementation(); + describe('editor tabs', () => { + beforeEach(async () => { + await createComponent(); + }); - Editor.editorInstance.modelManager.dispose(); + it.each` + mode | isVisible + ${'edit'} | ${true} + ${'review'} | ${false} + ${'commit'} | ${false} + `('tabs in $mode are $isVisible', async ({ mode, isVisible } = {}) => { + vm.$store.state.currentActivityView = leftSidebarViews[mode].name; - vm.setupEditor(); + await vm.$nextTick(); + expect(wrapper.find('.nav-links').exists()).toBe(isVisible); + }); + }); - expect(vm.editor.attachMergeRequestModel).not.toHaveBeenCalledWith(vm.model); + describe('files in preview mode', () => { + let updateDimensionsSpy; + const changeViewMode = (viewMode) => + vm.$store.dispatch('editor/updateFileEditor', { + path: vm.file.path, + data: { viewMode }, }); - it('adds callback methods', () => { - jest.spyOn(vm.editor, 'onPositionChange'); - - Editor.editorInstance.modelManager.dispose(); - - vm.setupEditor(); - - expect(vm.editor.onPositionChange).toHaveBeenCalled(); - expect(vm.model.events.size).toBe(2); + beforeEach(async () => { + await createComponent({ + activeFile: dummyFile.markdown, }); - it('updates state with the value of the model', () => { - vm.model.setValue('testing 1234\n'); - - vm.setupEditor(); - - expect(vm.file.content).toBe('testing 1234\n'); - }); + updateDimensionsSpy = jest.spyOn(vm.editor, 'updateDimensions'); - it('sets head model as staged file', () => { - jest.spyOn(vm.editor, 'createModel'); + changeViewMode(FILE_VIEW_MODE_PREVIEW); + await vm.$nextTick(); + }); - Editor.editorInstance.modelManager.dispose(); + it('do not show the editor', () => { + expect(vm.showEditor).toBe(false); + expect(findEditor().isVisible()).toBe(false); + }); - vm.$store.state.stagedFiles.push({ ...vm.file, key: 'staged' }); - vm.file.staged = true; - vm.file.key = `unstaged-${vm.file.key}`; + it('updates dimensions when switching view back to edit', async () => { + expect(updateDimensionsSpy).not.toHaveBeenCalled(); - vm.setupEditor(); + changeViewMode(FILE_VIEW_MODE_EDITOR); + await vm.$nextTick(); - expect(vm.editor.createModel).toHaveBeenCalledWith(vm.file, vm.$store.state.stagedFiles[0]); - }); + expect(updateDimensionsSpy).toHaveBeenCalled(); }); + }); - describe('editor updateDimensions', () => { - beforeEach(() => { - jest.spyOn(vm.editor, 'updateDimensions'); - jest.spyOn(vm.editor, 'updateDiffView').mockImplementation(); - }); - - it('calls updateDimensions when panelResizing is false', (done) => { - vm.$store.state.panelResizing = true; - - vm.$nextTick() - .then(() => { - vm.$store.state.panelResizing = false; - }) - .then(vm.$nextTick) - .then(() => { - expect(vm.editor.updateDimensions).toHaveBeenCalled(); - expect(vm.editor.updateDiffView).toHaveBeenCalled(); - }) - .then(done) - .catch(done.fail); - }); - - it('does not call updateDimensions when panelResizing is true', (done) => { - vm.$store.state.panelResizing = true; + describe('initEditor', () => { + const hideEditorAndRunFn = async () => { + jest.clearAllMocks(); + jest.spyOn(vm, 'shouldHideEditor', 'get').mockReturnValue(true); - vm.$nextTick(() => { - expect(vm.editor.updateDimensions).not.toHaveBeenCalled(); - expect(vm.editor.updateDiffView).not.toHaveBeenCalled(); + vm.initEditor(); + await vm.$nextTick(); + }; - done(); - }); + it('does not fetch file information for temp entries', async () => { + await createComponent({ + activeFile: createActiveFile(), }); - it('calls updateDimensions when rightPane is opened', (done) => { - vm.$store.state.rightPane.isOpen = true; - - vm.$nextTick(() => { - expect(vm.editor.updateDimensions).toHaveBeenCalled(); - expect(vm.editor.updateDiffView).toHaveBeenCalled(); - - done(); - }); - }); + expect(vm.getFileData).not.toHaveBeenCalled(); }); - describe('show tabs', () => { - it('shows tabs in edit mode', () => { - expect(vm.$el.querySelector('.nav-links')).not.toBe(null); + it('is being initialised for files without content even if shouldHideEditor is `true`', async () => { + await createComponent({ + activeFile: dummyFile.empty, }); - it('hides tabs in review mode', (done) => { - vm.$store.state.currentActivityView = leftSidebarViews.review.name; + await hideEditorAndRunFn(); - vm.$nextTick(() => { - expect(vm.$el.querySelector('.nav-links')).toBe(null); + expect(vm.getFileData).toHaveBeenCalled(); + expect(vm.getRawFileData).toHaveBeenCalled(); + }); - done(); - }); + it('does not initialize editor for files already with content when shouldHideEditor is `true`', async () => { + await createComponent({ + activeFile: createActiveFile(), }); - it('hides tabs in commit mode', (done) => { - vm.$store.state.currentActivityView = leftSidebarViews.commit.name; + await hideEditorAndRunFn(); - vm.$nextTick(() => { - expect(vm.$el.querySelector('.nav-links')).toBe(null); + expect(vm.getFileData).not.toHaveBeenCalled(); + expect(vm.getRawFileData).not.toHaveBeenCalled(); + expect(createInstanceSpy).not.toHaveBeenCalled(); + }); + }); - done(); - }); + describe('updates on file changes', () => { + beforeEach(async () => { + await createComponent({ + activeFile: createActiveFile({ + content: 'foo', // need to prevent full cycle of initEditor + }), }); + jest.spyOn(vm, 'initEditor').mockImplementation(); }); - describe('when files view mode is preview', () => { - beforeEach((done) => { - jest.spyOn(vm.editor, 'updateDimensions').mockImplementation(); - changeViewMode(FILE_VIEW_MODE_PREVIEW); - vm.file.name = 'myfile.md'; - vm.file.content = 'hello world'; + it('calls removePendingTab when old file is pending', async () => { + jest.spyOn(vm, 'shouldHideEditor', 'get').mockReturnValue(true); + jest.spyOn(vm, 'removePendingTab').mockImplementation(); - vm.$nextTick(done); - }); + const origFile = vm.file; + vm.file.pending = true; + await vm.$nextTick(); - it('should hide editor', () => { - expect(vm.showEditor).toBe(false); - expect(findEditor()).toHaveCss({ display: 'none' }); + wrapper.setProps({ + file: file('testing'), }); + vm.file.content = 'foo'; // need to prevent full cycle of initEditor + await vm.$nextTick(); - describe('when file view mode changes to editor', () => { - it('should update dimensions', () => { - changeViewMode(FILE_VIEW_MODE_EDITOR); - - return vm.$nextTick().then(() => { - expect(vm.editor.updateDimensions).toHaveBeenCalled(); - }); - }); - }); + expect(vm.removePendingTab).toHaveBeenCalledWith(origFile); }); - describe('initEditor', () => { - beforeEach(() => { - vm.file.tempFile = false; - jest.spyOn(vm.editor, 'createInstance').mockImplementation(); - jest.spyOn(vm, 'shouldHideEditor', 'get').mockReturnValue(true); - }); + it('does not call initEditor if the file did not change', async () => { + Vue.set(vm, 'file', vm.file); + await vm.$nextTick(); - it('does not fetch file information for temp entries', (done) => { - vm.file.tempFile = true; - - vm.initEditor(); - vm.$nextTick() - .then(() => { - expect(vm.getFileData).not.toHaveBeenCalled(); - }) - .then(done) - .catch(done.fail); - }); - - it('is being initialised for files without content even if shouldHideEditor is `true`', (done) => { - vm.file.content = ''; - vm.file.raw = ''; + expect(vm.initEditor).not.toHaveBeenCalled(); + }); - vm.initEditor(); + it('calls initEditor when file key is changed', async () => { + expect(vm.initEditor).not.toHaveBeenCalled(); - vm.$nextTick() - .then(() => { - expect(vm.getFileData).toHaveBeenCalled(); - expect(vm.getRawFileData).toHaveBeenCalled(); - }) - .then(done) - .catch(done.fail); + wrapper.setProps({ + file: { + ...vm.file, + key: 'new', + }, }); + await vm.$nextTick(); - it('does not initialize editor for files already with content', (done) => { - vm.file.content = 'foo'; - - vm.initEditor(); - vm.$nextTick() - .then(() => { - expect(vm.getFileData).not.toHaveBeenCalled(); - expect(vm.getRawFileData).not.toHaveBeenCalled(); - expect(vm.editor.createInstance).not.toHaveBeenCalled(); - }) - .then(done) - .catch(done.fail); - }); + expect(vm.initEditor).toHaveBeenCalled(); + }); + }); + + describe('populates editor with the fetched content', () => { + const createRemoteFile = (name) => ({ + ...file(name), + tmpFile: false, }); - describe('updates on file changes', () => { - beforeEach(() => { - jest.spyOn(vm, 'initEditor').mockImplementation(); - }); + beforeEach(async () => { + await createComponent(); + vm.getRawFileData.mockRestore(); + }); - it('calls removePendingTab when old file is pending', (done) => { - jest.spyOn(vm, 'shouldHideEditor', 'get').mockReturnValue(true); - jest.spyOn(vm, 'removePendingTab').mockImplementation(); + it('after switching viewer from edit to diff', async () => { + const f = createRemoteFile('newFile'); + Vue.set(vm.$store.state.entries, f.path, f); - vm.file.pending = true; + jest.spyOn(service, 'getRawFileData').mockImplementation(async () => { + expect(vm.file.loading).toBe(true); - vm.$nextTick() - .then(() => { - vm.file = file('testing'); - vm.file.content = 'foo'; // need to prevent full cycle of initEditor + // switching from edit to diff mode usually triggers editor initialization + vm.$store.state.viewer = viewerTypes.diff; - return vm.$nextTick(); - }) - .then(() => { - expect(vm.removePendingTab).toHaveBeenCalled(); - }) - .then(done) - .catch(done.fail); + // we delay returning the file to make sure editor doesn't initialize before we fetch file content + await waitUsingRealTimer(30); + return 'rawFileData123\n'; }); - it('does not call initEditor if the file did not change', (done) => { - Vue.set(vm, 'file', vm.file); - - vm.$nextTick() - .then(() => { - expect(vm.initEditor).not.toHaveBeenCalled(); - }) - .then(done) - .catch(done.fail); + wrapper.setProps({ + file: f, }); - it('calls initEditor when file key is changed', (done) => { - expect(vm.initEditor).not.toHaveBeenCalled(); + await waitForEditorSetup(); + expect(vm.model.getModel().getValue()).toBe('rawFileData123\n'); + }); - Vue.set(vm, 'file', { - ...vm.file, - key: 'new', + it('after opening multiple files at the same time', async () => { + const fileA = createRemoteFile('fileA'); + const aContent = 'fileA-rawContent\n'; + const bContent = 'fileB-rawContent\n'; + const fileB = createRemoteFile('fileB'); + Vue.set(vm.$store.state.entries, fileA.path, fileA); + Vue.set(vm.$store.state.entries, fileB.path, fileB); + + jest + .spyOn(service, 'getRawFileData') + .mockImplementation(async () => { + // opening fileB while the content of fileA is still being fetched + wrapper.setProps({ + file: fileB, + }); + return aContent; + }) + .mockImplementationOnce(async () => { + // we delay returning fileB content to make sure the editor doesn't initialize prematurely + await waitUsingRealTimer(30); + return bContent; }); - vm.$nextTick() - .then(() => { - expect(vm.initEditor).toHaveBeenCalled(); - }) - .then(done) - .catch(done.fail); + wrapper.setProps({ + file: fileA, }); - }); - describe('populates editor with the fetched content', () => { - beforeEach(() => { - vm.getRawFileData.mockRestore(); - }); + await waitForEditorSetup(); + expect(vm.model.getModel().getValue()).toBe(bContent); + }); + }); - const createRemoteFile = (name) => ({ - ...file(name), - tmpFile: false, + describe('onPaste', () => { + const setFileName = (name) => + createActiveFile({ + content: 'hello world\n', + name, + path: `foo/${name}`, + key: 'new', }); - it('after switching viewer from edit to diff', async () => { - jest.spyOn(service, 'getRawFileData').mockImplementation(async () => { - expect(vm.file.loading).toBe(true); - - // switching from edit to diff mode usually triggers editor initialization - store.state.viewer = viewerTypes.diff; + const pasteImage = () => { + window.dispatchEvent( + Object.assign(new Event('paste'), { + clipboardData: { + files: [new File(['foo'], 'foo.png', { type: 'image/png' })], + }, + }), + ); + }; - // we delay returning the file to make sure editor doesn't initialize before we fetch file content - await waitUsingRealTimer(30); - return 'rawFileData123\n'; + const watchState = (watched) => + new Promise((resolve) => { + const unwatch = vm.$store.watch(watched, () => { + unwatch(); + resolve(); }); - - const f = createRemoteFile('newFile'); - Vue.set(store.state.entries, f.path, f); - - vm.file = f; - - await waitForEditorSetup(); - expect(vm.model.getModel().getValue()).toBe('rawFileData123\n'); }); - it('after opening multiple files at the same time', async () => { - const fileA = createRemoteFile('fileA'); - const fileB = createRemoteFile('fileB'); - Vue.set(store.state.entries, fileA.path, fileA); - Vue.set(store.state.entries, fileB.path, fileB); - - jest - .spyOn(service, 'getRawFileData') - .mockImplementationOnce(async () => { - // opening fileB while the content of fileA is still being fetched - vm.file = fileB; - return 'fileA-rawContent\n'; - }) - .mockImplementationOnce(async () => { - // we delay returning fileB content to make sure the editor doesn't initialize prematurely - await waitUsingRealTimer(30); - return 'fileB-rawContent\n'; - }); + // Pasting an image does a lot of things like using the FileReader API, + // so, waitForPromises isn't very reliable (and causes a flaky spec) + // Read more about state.watch: https://vuex.vuejs.org/api/#watch + const waitForFileContentChange = () => watchState((s) => s.entries['foo/bar.md'].content); - vm.file = fileA; - - await waitForEditorSetup(); - expect(vm.model.getModel().getValue()).toBe('fileB-rawContent\n'); + beforeEach(async () => { + await createComponent({ + state: { + trees: { + 'gitlab-org/gitlab': { tree: [] }, + }, + currentProjectId: 'gitlab-org', + currentBranchId: 'gitlab', + }, + activeFile: setFileName('bar.md'), }); - }); - - describe('onPaste', () => { - const setFileName = (name) => { - Vue.set(vm, 'file', { - ...vm.file, - content: 'hello world\n', - name, - path: `foo/${name}`, - key: 'new', - }); - vm.$store.state.entries[vm.file.path] = vm.file; - }; + vm.setupEditor(); - const pasteImage = () => { - window.dispatchEvent( - Object.assign(new Event('paste'), { - clipboardData: { - files: [new File(['foo'], 'foo.png', { type: 'image/png' })], - }, - }), - ); - }; - - const watchState = (watched) => - new Promise((resolve) => { - const unwatch = vm.$store.watch(watched, () => { - unwatch(); - resolve(); - }); - }); + await waitForPromises(); + // set cursor to line 2, column 1 + vm.editor.setSelection(new Range(2, 1, 2, 1)); + vm.editor.focus(); - // Pasting an image does a lot of things like using the FileReader API, - // so, waitForPromises isn't very reliable (and causes a flaky spec) - // Read more about state.watch: https://vuex.vuejs.org/api/#watch - const waitForFileContentChange = () => watchState((s) => s.entries['foo/bar.md'].content); - - beforeEach(() => { - setFileName('bar.md'); - - vm.$store.state.trees['gitlab-org/gitlab'] = { tree: [] }; - vm.$store.state.currentProjectId = 'gitlab-org'; - vm.$store.state.currentBranchId = 'gitlab'; - - // create a new model each time, otherwise tests conflict with each other - // because of same model being used in multiple tests - Editor.editorInstance.modelManager.dispose(); - vm.setupEditor(); + jest.spyOn(vm.editor, 'hasTextFocus').mockReturnValue(true); + }); - return waitForPromises().then(() => { - // set cursor to line 2, column 1 - vm.editor.instance.setSelection(new Range(2, 1, 2, 1)); - vm.editor.instance.focus(); + it('adds an image entry to the same folder for a pasted image in a markdown file', async () => { + pasteImage(); - jest.spyOn(vm.editor.instance, 'hasTextFocus').mockReturnValue(true); - }); + await waitForFileContentChange(); + expect(vm.$store.state.entries['foo/foo.png']).toMatchObject({ + path: 'foo/foo.png', + type: 'blob', + content: 'Zm9v', + rawPath: 'data:image/png;base64,Zm9v', }); + }); - it('adds an image entry to the same folder for a pasted image in a markdown file', () => { - pasteImage(); - - return waitForFileContentChange().then(() => { - expect(vm.$store.state.entries['foo/foo.png']).toMatchObject({ - path: 'foo/foo.png', - type: 'blob', - content: 'Zm9v', - rawPath: 'data:image/png;base64,Zm9v', - }); - }); - }); + it("adds a markdown image tag to the file's contents", async () => { + pasteImage(); - it("adds a markdown image tag to the file's contents", () => { - pasteImage(); + await waitForFileContentChange(); + expect(vm.file.content).toBe('hello world\n![foo.png](./foo.png)'); + }); - return waitForFileContentChange().then(() => { - expect(vm.file.content).toBe('hello world\n![foo.png](./foo.png)'); - }); + it("does not add file to state or set markdown image syntax if the file isn't markdown", async () => { + wrapper.setProps({ + file: setFileName('myfile.txt'), }); + pasteImage(); - it("does not add file to state or set markdown image syntax if the file isn't markdown", () => { - setFileName('myfile.txt'); - pasteImage(); - - return waitForPromises().then(() => { - expect(vm.$store.state.entries['foo/foo.png']).toBeUndefined(); - expect(vm.file.content).toBe('hello world\n'); - }); - }); + await waitForPromises(); + expect(vm.$store.state.entries['foo/foo.png']).toBeUndefined(); + expect(vm.file.content).toBe('hello world\n'); }); }); describe('fetchEditorconfigRules', () => { - beforeEach(() => { - exampleConfigs.forEach(({ path, content }) => { - store.state.entries[path] = { ...file(), path, content }; - }); - }); - it.each(exampleFiles)( 'does not fetch content from remote for .editorconfig files present locally (case %#)', - ({ path, monacoRules }) => { - createOpenFile(path); - createComponent(); - - return waitForEditorSetup().then(() => { - expect(vm.rules).toEqual(monacoRules); - expect(vm.model.options).toMatchObject(monacoRules); - expect(vm.getFileData).not.toHaveBeenCalled(); - expect(vm.getRawFileData).not.toHaveBeenCalled(); + async ({ path, monacoRules }) => { + await createComponent({ + state: { + entries: (() => { + const res = {}; + exampleConfigs.forEach(({ path: configPath, content }) => { + res[configPath] = { ...file(), path: configPath, content }; + }); + return res; + })(), + }, + activeFile: createActiveFile({ + path, + key: path, + name: 'myfile.txt', + content: 'hello world', + }), }); + + expect(vm.rules).toEqual(monacoRules); + expect(vm.model.options).toMatchObject(monacoRules); + expect(vm.getFileData).not.toHaveBeenCalled(); + expect(vm.getRawFileData).not.toHaveBeenCalled(); }, ); - it('fetches content from remote for .editorconfig files not available locally', () => { - exampleConfigs.forEach(({ path }) => { - delete store.state.entries[path].content; - delete store.state.entries[path].raw; + it('fetches content from remote for .editorconfig files not available locally', async () => { + const activeFile = createActiveFile({ + path: 'foo/bar/baz/test/my_spec.js', + key: 'foo/bar/baz/test/my_spec.js', + name: 'myfile.txt', + content: 'hello world', + }); + + const expectations = [ + 'foo/bar/baz/.editorconfig', + 'foo/bar/.editorconfig', + 'foo/.editorconfig', + '.editorconfig', + ]; + + await createComponent({ + state: { + entries: (() => { + const res = { + [activeFile.path]: activeFile, + }; + exampleConfigs.forEach(({ path: configPath }) => { + const f = { ...file(), path: configPath }; + delete f.content; + delete f.raw; + res[configPath] = f; + }); + return res; + })(), + }, + activeFile, }); - // Include a "test" directory which does not exist in store. This one should be skipped. - createOpenFile('foo/bar/baz/test/my_spec.js'); - createComponent(); - - return waitForEditorSetup().then(() => { - expect(vm.getFileData.mock.calls.map(([args]) => args)).toEqual([ - { makeFileActive: false, path: 'foo/bar/baz/.editorconfig' }, - { makeFileActive: false, path: 'foo/bar/.editorconfig' }, - { makeFileActive: false, path: 'foo/.editorconfig' }, - { makeFileActive: false, path: '.editorconfig' }, - ]); - expect(vm.getRawFileData.mock.calls.map(([args]) => args)).toEqual([ - { path: 'foo/bar/baz/.editorconfig' }, - { path: 'foo/bar/.editorconfig' }, - { path: 'foo/.editorconfig' }, - { path: '.editorconfig' }, - ]); - }); + expect(service.getFileData.mock.calls.map(([args]) => args)).toEqual( + expectations.map((expectation) => expect.stringContaining(expectation)), + ); + expect(service.getRawFileData.mock.calls.map(([args]) => args)).toEqual( + expectations.map((expectation) => expect.objectContaining({ path: expectation })), + ); }); }); }); diff --git a/spec/frontend/ide/components/repo_tab_spec.js b/spec/frontend/ide/components/repo_tab_spec.js index b39a488b034..95d52e8f7a9 100644 --- a/spec/frontend/ide/components/repo_tab_spec.js +++ b/spec/frontend/ide/components/repo_tab_spec.js @@ -1,5 +1,7 @@ +import { GlTab } from '@gitlab/ui'; import { mount, createLocalVue } from '@vue/test-utils'; import Vuex from 'vuex'; +import { stubComponent } from 'helpers/stub_component'; import RepoTab from '~/ide/components/repo_tab.vue'; import { createRouter } from '~/ide/ide_router'; import { createStore } from '~/ide/stores'; @@ -8,16 +10,25 @@ import { file } from '../helpers'; const localVue = createLocalVue(); localVue.use(Vuex); +const GlTabStub = stubComponent(GlTab, { + template: '<li><slot name="title" /></li>', +}); + describe('RepoTab', () => { let wrapper; let store; let router; + const findTab = () => wrapper.find(GlTabStub); + function createComponent(propsData) { wrapper = mount(RepoTab, { localVue, store, propsData, + stubs: { + GlTab: GlTabStub, + }, }); } @@ -55,7 +66,7 @@ describe('RepoTab', () => { jest.spyOn(wrapper.vm, 'openPendingTab').mockImplementation(() => {}); - await wrapper.trigger('click'); + await findTab().vm.$emit('click'); expect(wrapper.vm.openPendingTab).not.toHaveBeenCalled(); }); @@ -67,7 +78,7 @@ describe('RepoTab', () => { jest.spyOn(wrapper.vm, 'clickFile').mockImplementation(() => {}); - wrapper.trigger('click'); + findTab().vm.$emit('click'); expect(wrapper.vm.clickFile).toHaveBeenCalledWith(wrapper.vm.tab); }); @@ -91,11 +102,11 @@ describe('RepoTab', () => { tab, }); - await wrapper.trigger('mouseover'); + await findTab().vm.$emit('mouseover'); expect(wrapper.find('.file-modified').exists()).toBe(false); - await wrapper.trigger('mouseout'); + await findTab().vm.$emit('mouseout'); expect(wrapper.find('.file-modified').exists()).toBe(true); }); diff --git a/spec/frontend/import_entities/import_groups/components/import_table_row_spec.js b/spec/frontend/import_entities/import_groups/components/import_table_row_spec.js index cdef4b1ee62..9811532126f 100644 --- a/spec/frontend/import_entities/import_groups/components/import_table_row_spec.js +++ b/spec/frontend/import_entities/import_groups/components/import_table_row_spec.js @@ -75,6 +75,33 @@ describe('import table row', () => { }); }); + it('renders only namespaces if user cannot create new group', () => { + createComponent({ + canCreateGroup: false, + group: getFakeGroup(STATUSES.NONE), + }); + + const dropdownData = findNamespaceDropdown().props().options.data; + const noParentOption = dropdownData.find((o) => o.text === 'No parent'); + + expect(noParentOption).toBeUndefined(); + expect(dropdownData).toHaveLength(availableNamespacesFixture.length); + }); + + it('renders no parent option in available namespaces if user can create new group', () => { + createComponent({ + canCreateGroup: true, + group: getFakeGroup(STATUSES.NONE), + }); + + const dropdownData = findNamespaceDropdown().props().options.data; + const noParentOption = dropdownData.find((o) => o.text === 'No parent'); + const existingGroupOption = dropdownData.find((o) => o.text === 'Existing groups'); + + expect(noParentOption.id).toBe(''); + expect(existingGroupOption.children).toHaveLength(availableNamespacesFixture.length); + }); + describe('when entity status is SCHEDULING', () => { beforeEach(() => { group = getFakeGroup(STATUSES.SCHEDULING); diff --git a/spec/frontend/import_entities/import_groups/components/import_table_spec.js b/spec/frontend/import_entities/import_groups/components/import_table_spec.js index dd734782169..4b9fac96380 100644 --- a/spec/frontend/import_entities/import_groups/components/import_table_spec.js +++ b/spec/frontend/import_entities/import_groups/components/import_table_spec.js @@ -21,9 +21,13 @@ describe('import table', () => { let apolloProvider; const FAKE_GROUP = generateFakeEntry({ id: 1, status: STATUSES.NONE }); + const FAKE_GROUPS = [ + generateFakeEntry({ id: 1, status: STATUSES.NONE }), + generateFakeEntry({ id: 2, status: STATUSES.FINISHED }), + ]; const FAKE_PAGE_INFO = { page: 1, perPage: 20, total: 40, totalPages: 2 }; - const createComponent = ({ bulkImportSourceGroups }) => { + const createComponent = ({ bulkImportSourceGroups, canCreateGroup }) => { apolloProvider = createMockApollo([], { Query: { availableNamespaces: () => availableNamespacesFixture, @@ -39,6 +43,7 @@ describe('import table', () => { wrapper = shallowMount(ImportTable, { propsData: { sourceUrl: 'https://demo.host', + canCreateGroup, }, stubs: { GlSprintf, @@ -80,14 +85,10 @@ describe('import table', () => { }); await waitForPromises(); - expect(wrapper.find(GlEmptyState).props().title).toBe('No groups available for import'); + expect(wrapper.find(GlEmptyState).props().title).toBe('You have no groups to import'); }); it('renders import row for each group in response', async () => { - const FAKE_GROUPS = [ - generateFakeEntry({ id: 1, status: STATUSES.NONE }), - generateFakeEntry({ id: 2, status: STATUSES.FINISHED }), - ]; createComponent({ bulkImportSourceGroups: () => ({ nodes: FAKE_GROUPS, @@ -99,6 +100,25 @@ describe('import table', () => { expect(wrapper.findAll(ImportTableRow)).toHaveLength(FAKE_GROUPS.length); }); + it.each` + canCreateGroup | userPermissions + ${true} | ${'user can create new top-level group'} + ${false} | ${'user cannot create new top-level group'} + `('correctly passes canCreateGroup to rows when $userPermissions', async ({ canCreateGroup }) => { + createComponent({ + bulkImportSourceGroups: () => ({ + nodes: FAKE_GROUPS, + pageInfo: FAKE_PAGE_INFO, + }), + canCreateGroup, + }); + await waitForPromises(); + + wrapper.findAllComponents(ImportTableRow).wrappers.forEach((w) => { + expect(w.props().canCreateGroup).toBe(canCreateGroup); + }); + }); + it('does not render status string when result list is empty', async () => { createComponent({ bulkImportSourceGroups: jest.fn().mockResolvedValue({ diff --git a/spec/frontend/incidents_settings/components/__snapshots__/alerts_form_spec.js.snap b/spec/frontend/incidents_settings/components/__snapshots__/alerts_form_spec.js.snap index 82d7f691efd..6a4999c42db 100644 --- a/spec/frontend/incidents_settings/components/__snapshots__/alerts_form_spec.js.snap +++ b/spec/frontend/incidents_settings/components/__snapshots__/alerts_form_spec.js.snap @@ -35,7 +35,7 @@ exports[`Alert integration settings form default state should match the default Incident template (optional) <gl-link-stub - href="/help/user/project/description_templates#creating-issue-templates" + href="/help/user/project/description_templates#create-an-issue-template" target="_blank" > <gl-icon-stub diff --git a/spec/frontend/integrations/edit/components/jira_trigger_fields_spec.js b/spec/frontend/integrations/edit/components/jira_trigger_fields_spec.js index c6e7ee44355..6a8ab02a69a 100644 --- a/spec/frontend/integrations/edit/components/jira_trigger_fields_spec.js +++ b/spec/frontend/integrations/edit/components/jira_trigger_fields_spec.js @@ -30,14 +30,21 @@ describe('JiraTriggerFields', () => { const findCommentSettings = () => wrapper.find('[data-testid="comment-settings"]'); const findCommentDetail = () => wrapper.find('[data-testid="comment-detail"]'); const findCommentSettingsCheckbox = () => findCommentSettings().find(GlFormCheckbox); + const findIssueTransitionSettings = () => + wrapper.find('[data-testid="issue-transition-settings"]'); + const findIssueTransitionModeRadios = () => + findIssueTransitionSettings().findAll('input[type="radio"]'); + const findIssueTransitionIdsField = () => + wrapper.find('input[type="text"][name="service[jira_issue_transition_id]"]'); describe('template', () => { describe('initialTriggerCommit and initialTriggerMergeRequest are false', () => { - it('does not show comment settings', () => { + it('does not show trigger settings', () => { createComponent(); expect(findCommentSettings().isVisible()).toBe(false); expect(findCommentDetail().isVisible()).toBe(false); + expect(findIssueTransitionSettings().isVisible()).toBe(false); }); }); @@ -48,9 +55,10 @@ describe('JiraTriggerFields', () => { }); }); - it('shows comment settings', () => { + it('shows trigger settings', () => { expect(findCommentSettings().isVisible()).toBe(true); expect(findCommentDetail().isVisible()).toBe(false); + expect(findIssueTransitionSettings().isVisible()).toBe(true); }); // As per https://vuejs.org/v2/guide/forms.html#Checkbox-1, @@ -73,13 +81,14 @@ describe('JiraTriggerFields', () => { }); describe('initialTriggerMergeRequest is true', () => { - it('shows comment settings', () => { + it('shows trigger settings', () => { createComponent({ initialTriggerMergeRequest: true, }); expect(findCommentSettings().isVisible()).toBe(true); expect(findCommentDetail().isVisible()).toBe(false); + expect(findIssueTransitionSettings().isVisible()).toBe(true); }); }); @@ -95,7 +104,41 @@ describe('JiraTriggerFields', () => { }); }); - it('disables checkboxes and radios if inheriting', () => { + describe('initialJiraIssueTransitionId is not set', () => { + it('uses automatic transitions', () => { + createComponent({ + initialTriggerCommit: true, + }); + + const [radio1, radio2] = findIssueTransitionModeRadios().wrappers; + expect(radio1.element.checked).toBe(true); + expect(radio2.element.checked).toBe(false); + + expect(findIssueTransitionIdsField().exists()).toBe(false); + }); + }); + + describe('initialJiraIssueTransitionId is set', () => { + it('uses custom transitions', () => { + createComponent({ + initialJiraIssueTransitionId: '1, 2, 3', + initialTriggerCommit: true, + }); + + const [radio1, radio2] = findIssueTransitionModeRadios().wrappers; + expect(radio1.element.checked).toBe(false); + expect(radio2.element.checked).toBe(true); + + const field = findIssueTransitionIdsField(); + expect(field.isVisible()).toBe(true); + expect(field.element).toMatchObject({ + type: 'text', + value: '1, 2, 3', + }); + }); + }); + + it('disables input fields if inheriting', () => { createComponent( { initialTriggerCommit: true, @@ -104,12 +147,8 @@ describe('JiraTriggerFields', () => { true, ); - wrapper.findAll('[type=checkbox]').wrappers.forEach((checkbox) => { - expect(checkbox.attributes('disabled')).toBe('disabled'); - }); - - wrapper.findAll('[type=radio]').wrappers.forEach((radio) => { - expect(radio.attributes('disabled')).toBe('disabled'); + wrapper.findAll('[type=text], [type=checkbox], [type=radio]').wrappers.forEach((input) => { + expect(input.attributes('disabled')).toBe('disabled'); }); }); }); diff --git a/spec/frontend/invite_members/components/group_select_spec.js b/spec/frontend/invite_members/components/group_select_spec.js new file mode 100644 index 00000000000..2a6985de136 --- /dev/null +++ b/spec/frontend/invite_members/components/group_select_spec.js @@ -0,0 +1,90 @@ +import { GlDropdown, GlDropdownItem, GlSearchBoxByType } from '@gitlab/ui'; +import { mount } from '@vue/test-utils'; +import waitForPromises from 'helpers/wait_for_promises'; +import Api from '~/api'; +import GroupSelect from '~/invite_members/components/group_select.vue'; + +const createComponent = () => { + return mount(GroupSelect, {}); +}; + +const group1 = { id: 1, full_name: 'Group One' }; +const group2 = { id: 2, full_name: 'Group Two' }; +const allGroups = [group1, group2]; + +describe('GroupSelect', () => { + let wrapper; + + beforeEach(() => { + jest.spyOn(Api, 'groups').mockResolvedValue(allGroups); + + wrapper = createComponent(); + }); + + afterEach(() => { + wrapper.destroy(); + wrapper = null; + }); + + const findSearchBoxByType = () => wrapper.findComponent(GlSearchBoxByType); + const findDropdown = () => wrapper.findComponent(GlDropdown); + const findDropdownToggle = () => findDropdown().find('button[aria-haspopup="true"]'); + const findDropdownItemByText = (text) => + wrapper + .findAllComponents(GlDropdownItem) + .wrappers.find((dropdownItemWrapper) => dropdownItemWrapper.text() === text); + + it('renders GlSearchBoxByType with default attributes', () => { + expect(findSearchBoxByType().exists()).toBe(true); + expect(findSearchBoxByType().vm.$attrs).toMatchObject({ + placeholder: 'Search groups', + }); + }); + + describe('when user types in the search input', () => { + let resolveApiRequest; + + beforeEach(() => { + jest.spyOn(Api, 'groups').mockImplementation( + () => + new Promise((resolve) => { + resolveApiRequest = resolve; + }), + ); + + findSearchBoxByType().vm.$emit('input', group1.name); + }); + + it('calls the API', () => { + resolveApiRequest({ data: allGroups }); + + expect(Api.groups).toHaveBeenCalledWith(group1.name, { + active: true, + exclude_internal: true, + }); + }); + + it('displays loading icon while waiting for API call to resolve', async () => { + expect(findSearchBoxByType().props('isLoading')).toBe(true); + + resolveApiRequest({ data: allGroups }); + await waitForPromises(); + + expect(findSearchBoxByType().props('isLoading')).toBe(false); + }); + }); + + describe('when group is selected from the dropdown', () => { + beforeEach(() => { + findDropdownItemByText(group1.full_name).vm.$emit('click'); + }); + + it('emits `input` event used by `v-model`', () => { + expect(wrapper.emitted('input')[0][0].id).toEqual(group1.id); + }); + + it('sets dropdown toggle text to selected item', () => { + expect(findDropdownToggle().text()).toBe(group1.full_name); + }); + }); +}); diff --git a/spec/frontend/invite_members/components/invite_group_trigger_spec.js b/spec/frontend/invite_members/components/invite_group_trigger_spec.js new file mode 100644 index 00000000000..cb9967ebe8c --- /dev/null +++ b/spec/frontend/invite_members/components/invite_group_trigger_spec.js @@ -0,0 +1,50 @@ +import { GlButton } from '@gitlab/ui'; +import { mount } from '@vue/test-utils'; +import InviteGroupTrigger from '~/invite_members/components/invite_group_trigger.vue'; +import eventHub from '~/invite_members/event_hub'; + +const displayText = 'Invite a group'; + +const createComponent = (props = {}) => { + return mount(InviteGroupTrigger, { + propsData: { + displayText, + ...props, + }, + }); +}; + +describe('InviteGroupTrigger', () => { + let wrapper; + + afterEach(() => { + wrapper.destroy(); + wrapper = null; + }); + + const findButton = () => wrapper.findComponent(GlButton); + + describe('displayText', () => { + beforeEach(() => { + wrapper = createComponent(); + }); + + it('includes the correct displayText for the link', () => { + expect(findButton().text()).toBe(displayText); + }); + }); + + describe('when button is clicked', () => { + beforeEach(() => { + eventHub.$emit = jest.fn(); + + wrapper = createComponent(); + + findButton().trigger('click'); + }); + + it('emits event that triggers opening the modal', () => { + expect(eventHub.$emit).toHaveBeenLastCalledWith('openModal', { inviteeType: 'group' }); + }); + }); +}); diff --git a/spec/frontend/invite_members/components/invite_members_modal_spec.js b/spec/frontend/invite_members/components/invite_members_modal_spec.js index e310a00133c..ca086549f3f 100644 --- a/spec/frontend/invite_members/components/invite_members_modal_spec.js +++ b/spec/frontend/invite_members/components/invite_members_modal_spec.js @@ -6,8 +6,9 @@ import Api from '~/api'; import InviteMembersModal from '~/invite_members/components/invite_members_modal.vue'; const id = '1'; -const name = 'testgroup'; +const name = 'test name'; const isProject = false; +const inviteeType = 'members'; const accessLevels = { Guest: 10, Reporter: 20, Developer: 30, Maintainer: 40, Owner: 50 }; const defaultAccessLevel = '10'; const helpLink = 'https://example.com'; @@ -20,16 +21,19 @@ const user3 = { username: 'one_2', avatar_url: '', }; +const sharedGroup = { id: '981' }; -const createComponent = (data = {}) => { +const createComponent = (data = {}, props = {}) => { return shallowMount(InviteMembersModal, { propsData: { id, name, isProject, + inviteeType, accessLevels, defaultAccessLevel, helpLink, + ...props, }, data() { return data; @@ -46,6 +50,22 @@ const createComponent = (data = {}) => { }); }; +const createInviteMembersToProjectWrapper = () => { + return createComponent({ inviteeType: 'members' }, { isProject: true }); +}; + +const createInviteMembersToGroupWrapper = () => { + return createComponent({ inviteeType: 'members' }, { isProject: false }); +}; + +const createInviteGroupToProjectWrapper = () => { + return createComponent({ inviteeType: 'group' }, { isProject: true }); +}; + +const createInviteGroupToGroupWrapper = () => { + return createComponent({ inviteeType: 'group' }, { isProject: false }); +}; + describe('InviteMembersModal', () => { let wrapper; @@ -54,12 +74,13 @@ describe('InviteMembersModal', () => { wrapper = null; }); - const findDropdown = () => wrapper.find(GlDropdown); - const findDropdownItems = () => findDropdown().findAll(GlDropdownItem); - const findDatepicker = () => wrapper.find(GlDatepicker); - const findLink = () => wrapper.find(GlLink); - const findCancelButton = () => wrapper.find({ ref: 'cancelButton' }); - const findInviteButton = () => wrapper.find({ ref: 'inviteButton' }); + const findDropdown = () => wrapper.findComponent(GlDropdown); + const findDropdownItems = () => findDropdown().findAllComponents(GlDropdownItem); + const findDatepicker = () => wrapper.findComponent(GlDatepicker); + const findLink = () => wrapper.findComponent(GlLink); + const findIntroText = () => wrapper.find({ ref: 'introText' }).text(); + const findCancelButton = () => wrapper.findComponent({ ref: 'cancelButton' }); + const findInviteButton = () => wrapper.findComponent({ ref: 'inviteButton' }); const clickInviteButton = () => findInviteButton().vm.$emit('click'); describe('rendering the modal', () => { @@ -68,7 +89,7 @@ describe('InviteMembersModal', () => { }); it('renders the modal with the correct title', () => { - expect(wrapper.find(GlModal).props('title')).toBe('Invite team members'); + expect(wrapper.findComponent(GlModal).props('title')).toBe('Invite team members'); }); it('renders the Cancel button text correctly', () => { @@ -102,6 +123,44 @@ describe('InviteMembersModal', () => { }); }); + describe('displaying the correct introText', () => { + describe('when inviting to a project', () => { + describe('when inviting members', () => { + it('includes the correct invitee, type, and formatted name', () => { + wrapper = createInviteMembersToProjectWrapper(); + + expect(findIntroText()).toBe("You're inviting members to the TEST NAME project"); + }); + }); + + describe('when sharing with a group', () => { + it('includes the correct invitee, type, and formatted name', () => { + wrapper = createInviteGroupToProjectWrapper(); + + expect(findIntroText()).toBe("You're inviting a group to the TEST NAME project"); + }); + }); + }); + + describe('when inviting to a group', () => { + describe('when inviting members', () => { + it('includes the correct invitee, type, and formatted name', () => { + wrapper = createInviteMembersToGroupWrapper(); + + expect(wrapper.html()).toContain("You're inviting members to the TEST NAME group"); + }); + }); + + describe('when sharing with a group', () => { + it('includes the correct invitee, type, and formatted name', () => { + wrapper = createInviteGroupToGroupWrapper(); + + expect(wrapper.html()).toContain("You're inviting a group to the TEST NAME group"); + }); + }); + }); + }); + describe('submitting the invite form', () => { const apiErrorMessage = 'Member already exists'; @@ -115,8 +174,9 @@ describe('InviteMembersModal', () => { describe('when invites are sent successfully', () => { beforeEach(() => { - wrapper = createComponent({ newUsersToInvite: [user1] }); + wrapper = createInviteMembersToGroupWrapper(); + wrapper.setData({ newUsersToInvite: [user1] }); wrapper.vm.$toast = { show: jest.fn() }; jest.spyOn(Api, 'addGroupMembersByUserId').mockResolvedValue({ data: postData }); jest.spyOn(wrapper.vm, 'showToastMessageSuccess'); @@ -283,5 +343,58 @@ describe('InviteMembersModal', () => { }); }); }); + + describe('when inviting a group to share', () => { + describe('when sharing the group is successful', () => { + const groupPostData = { + group_id: sharedGroup.id, + group_access: '10', + expires_at: undefined, + format: 'json', + }; + + beforeEach(() => { + wrapper = createComponent({ groupToBeSharedWith: sharedGroup }); + + wrapper.setData({ inviteeType: 'group' }); + wrapper.vm.$toast = { show: jest.fn() }; + jest.spyOn(Api, 'groupShareWithGroup').mockResolvedValue({ data: groupPostData }); + jest.spyOn(wrapper.vm, 'showToastMessageSuccess'); + + clickInviteButton(); + }); + + it('calls Api groupShareWithGroup with the correct params', () => { + expect(Api.groupShareWithGroup).toHaveBeenCalledWith(id, groupPostData); + }); + + it('displays the successful toastMessage', () => { + expect(wrapper.vm.showToastMessageSuccess).toHaveBeenCalled(); + }); + }); + + describe('when sharing the group fails', () => { + beforeEach(() => { + wrapper = createComponent({ groupToBeSharedWith: sharedGroup }); + + wrapper.setData({ inviteeType: 'group' }); + wrapper.vm.$toast = { show: jest.fn() }; + + jest + .spyOn(Api, 'groupShareWithGroup') + .mockRejectedValue({ response: { data: { success: false } } }); + + jest.spyOn(wrapper.vm, 'showToastMessageError'); + + clickInviteButton(); + }); + + it('displays the generic error toastMessage', async () => { + await waitForPromises(); + + expect(wrapper.vm.showToastMessageError).toHaveBeenCalled(); + }); + }); + }); }); }); diff --git a/spec/frontend/invite_members/components/invite_members_trigger_spec.js b/spec/frontend/invite_members/components/invite_members_trigger_spec.js index 18d6662d2d4..8beadf12a36 100644 --- a/spec/frontend/invite_members/components/invite_members_trigger_spec.js +++ b/spec/frontend/invite_members/components/invite_members_trigger_spec.js @@ -23,7 +23,7 @@ describe('InviteMembersTrigger', () => { }); describe('displayText', () => { - const findLink = () => wrapper.find(GlLink); + const findLink = () => wrapper.findComponent(GlLink); beforeEach(() => { wrapper = createComponent(); @@ -35,7 +35,7 @@ describe('InviteMembersTrigger', () => { }); describe('icon', () => { - const findIcon = () => wrapper.find(GlIcon); + const findIcon = () => wrapper.findComponent(GlIcon); it('includes the correct icon when an icon is sent', () => { wrapper = createComponent({ icon }); diff --git a/spec/frontend/invite_members/components/members_token_select_spec.js b/spec/frontend/invite_members/components/members_token_select_spec.js index a945b99bd54..f6e79d3607f 100644 --- a/spec/frontend/invite_members/components/members_token_select_spec.js +++ b/spec/frontend/invite_members/components/members_token_select_spec.js @@ -37,7 +37,7 @@ describe('MembersTokenSelect', () => { wrapper = null; }); - const findTokenSelector = () => wrapper.find(GlTokenSelector); + const findTokenSelector = () => wrapper.findComponent(GlTokenSelector); describe('rendering the token-selector component', () => { it('renders with the correct props', () => { diff --git a/spec/frontend/issue_show/components/app_spec.js b/spec/frontend/issue_show/components/app_spec.js index 9e1bc8242fe..d5d1e1d9cf8 100644 --- a/spec/frontend/issue_show/components/app_spec.js +++ b/spec/frontend/issue_show/components/app_spec.js @@ -422,7 +422,18 @@ describe('Issuable output', () => { formSpy = jest.spyOn(wrapper.vm, 'updateAndShowForm'); }); - it('shows the form if template names request is successful', () => { + it('shows the form if template names as hash request is successful', () => { + const mockData = { + test: [{ name: 'test', id: 'test', project_path: '/', namespace_path: '/' }], + }; + mock.onGet('/issuable-templates-path').reply(() => Promise.resolve([200, mockData])); + + return wrapper.vm.requestTemplatesAndShowForm().then(() => { + expect(formSpy).toHaveBeenCalledWith(mockData); + }); + }); + + it('shows the form if template names as array request is successful', () => { const mockData = [{ name: 'test', id: 'test', project_path: '/', namespace_path: '/' }]; mock.onGet('/issuable-templates-path').reply(() => Promise.resolve([200, mockData])); diff --git a/spec/frontend/issue_show/components/fields/description_template_spec.js b/spec/frontend/issue_show/components/fields/description_template_spec.js index 1193d4f8add..dc126c53f5e 100644 --- a/spec/frontend/issue_show/components/fields/description_template_spec.js +++ b/spec/frontend/issue_show/components/fields/description_template_spec.js @@ -1,7 +1,7 @@ import Vue from 'vue'; import descriptionTemplate from '~/issue_show/components/fields/description_template.vue'; -describe('Issue description template component', () => { +describe('Issue description template component with templates as hash', () => { let vm; let formState; @@ -14,7 +14,9 @@ describe('Issue description template component', () => { vm = new Component({ propsData: { formState, - issuableTemplates: [{ name: 'test', id: 'test', project_path: '/', namespace_path: '/' }], + issuableTemplates: { + test: [{ name: 'test', id: 'test', project_path: '/', namespace_path: '/' }], + }, projectId: 1, projectPath: '/', namespacePath: '/', @@ -23,9 +25,9 @@ describe('Issue description template component', () => { }).$mount(); }); - it('renders templates as JSON array in data attribute', () => { + it('renders templates as JSON hash in data attribute', () => { expect(vm.$el.querySelector('.js-issuable-selector').getAttribute('data-data')).toBe( - '[{"name":"test","id":"test","project_path":"/","namespace_path":"/"}]', + '{"test":[{"name":"test","id":"test","project_path":"/","namespace_path":"/"}]}', ); }); @@ -41,3 +43,32 @@ describe('Issue description template component', () => { expect(vm.issuableTemplate.editor.getValue()).toBe('testing new template'); }); }); + +describe('Issue description template component with templates as array', () => { + let vm; + let formState; + + beforeEach(() => { + const Component = Vue.extend(descriptionTemplate); + formState = { + description: 'test', + }; + + vm = new Component({ + propsData: { + formState, + issuableTemplates: [{ name: 'test', id: 'test', project_path: '/', namespace_path: '/' }], + projectId: 1, + projectPath: '/', + namespacePath: '/', + projectNamespace: '/', + }, + }).$mount(); + }); + + it('renders templates as JSON array in data attribute', () => { + expect(vm.$el.querySelector('.js-issuable-selector').getAttribute('data-data')).toBe( + '[{"name":"test","id":"test","project_path":"/","namespace_path":"/"}]', + ); + }); +}); diff --git a/spec/frontend/issue_show/components/form_spec.js b/spec/frontend/issue_show/components/form_spec.js index 4a8ec3cf66a..fc2e224ad92 100644 --- a/spec/frontend/issue_show/components/form_spec.js +++ b/spec/frontend/issue_show/components/form_spec.js @@ -42,7 +42,7 @@ describe('Inline edit form component', () => { expect(vm.$el.querySelector('.js-issuable-selector-wrap')).toBeNull(); }); - it('renders template selector when templates exists', () => { + it('renders template selector when templates as array exists', () => { createComponent({ issuableTemplates: [ { name: 'test', id: 'test', project_path: 'test', namespace_path: 'test' }, @@ -52,6 +52,16 @@ describe('Inline edit form component', () => { expect(vm.$el.querySelector('.js-issuable-selector-wrap')).not.toBeNull(); }); + it('renders template selector when templates as hash exists', () => { + createComponent({ + issuableTemplates: { + test: [{ name: 'test', id: 'test', project_path: 'test', namespace_path: 'test' }], + }, + }); + + expect(vm.$el.querySelector('.js-issuable-selector-wrap')).not.toBeNull(); + }); + it('hides locked warning by default', () => { createComponent(); diff --git a/spec/frontend/monitoring/components/dashboard_panel_builder_spec.js b/spec/frontend/monitoring/components/dashboard_panel_builder_spec.js index b794d0c571e..400ac2e8f85 100644 --- a/spec/frontend/monitoring/components/dashboard_panel_builder_spec.js +++ b/spec/frontend/monitoring/components/dashboard_panel_builder_spec.js @@ -188,7 +188,7 @@ describe('dashboard invalid url parameters', () => { }); describe('when there is an error', () => { - const mockError = 'an error ocurred!'; + const mockError = 'an error occurred!'; beforeEach(() => { store.commit(`monitoringDashboard/${types.RECEIVE_PANEL_PREVIEW_FAILURE}`, mockError); diff --git a/spec/frontend/monitoring/requests/index_spec.js b/spec/frontend/monitoring/requests/index_spec.js index b30b1e60575..03bf5d70153 100644 --- a/spec/frontend/monitoring/requests/index_spec.js +++ b/spec/frontend/monitoring/requests/index_spec.js @@ -94,7 +94,7 @@ describe('monitoring metrics_requests', () => { it('rejects after getting an HTTP 500 error', () => { mock.onGet(prometheusEndpoint).reply(500, { status: 'error', - error: 'An error ocurred', + error: 'An error occurred', }); return getPrometheusQueryData(prometheusEndpoint, params).catch((error) => { @@ -106,7 +106,7 @@ describe('monitoring metrics_requests', () => { // Mock multiple attempts while the cache is filling up and fails mock.onGet(prometheusEndpoint).reply(statusCodes.UNAUTHORIZED, { status: 'error', - error: 'An error ocurred', + error: 'An error occurred', }); return getPrometheusQueryData(prometheusEndpoint, params).catch((error) => { @@ -120,7 +120,7 @@ describe('monitoring metrics_requests', () => { mock.onGet(prometheusEndpoint).replyOnce(statusCodes.NO_CONTENT); mock.onGet(prometheusEndpoint).reply(500, { status: 'error', - error: 'An error ocurred', + error: 'An error occurred', }); // 3rd attempt return getPrometheusQueryData(prometheusEndpoint, params).catch((error) => { diff --git a/spec/frontend/notes/components/discussion_actions_spec.js b/spec/frontend/notes/components/discussion_actions_spec.js index 03e5842bb0f..c6a7d7ead98 100644 --- a/spec/frontend/notes/components/discussion_actions_spec.js +++ b/spec/frontend/notes/components/discussion_actions_spec.js @@ -96,7 +96,7 @@ describe('DiscussionActions', () => { it('emits showReplyForm event when clicking on reply placeholder', () => { jest.spyOn(wrapper.vm, '$emit'); - wrapper.find(ReplyPlaceholder).find('button').trigger('click'); + wrapper.find(ReplyPlaceholder).find('textarea').trigger('focus'); expect(wrapper.vm.$emit).toHaveBeenCalledWith('showReplyForm'); }); diff --git a/spec/frontend/notes/components/discussion_reply_placeholder_spec.js b/spec/frontend/notes/components/discussion_reply_placeholder_spec.js index b7b7ec08867..2a4cd0df0c7 100644 --- a/spec/frontend/notes/components/discussion_reply_placeholder_spec.js +++ b/spec/frontend/notes/components/discussion_reply_placeholder_spec.js @@ -1,17 +1,17 @@ import { shallowMount } from '@vue/test-utils'; import ReplyPlaceholder from '~/notes/components/discussion_reply_placeholder.vue'; -const buttonText = 'Test Button Text'; +const placeholderText = 'Test Button Text'; describe('ReplyPlaceholder', () => { let wrapper; - const findButton = () => wrapper.find({ ref: 'button' }); + const findTextarea = () => wrapper.find({ ref: 'textarea' }); beforeEach(() => { wrapper = shallowMount(ReplyPlaceholder, { propsData: { - buttonText, + placeholderText, }, }); }); @@ -20,17 +20,17 @@ describe('ReplyPlaceholder', () => { wrapper.destroy(); }); - it('emits onClick event on button click', () => { - findButton().trigger('click'); + it('emits focus event on button click', () => { + findTextarea().trigger('focus'); return wrapper.vm.$nextTick().then(() => { expect(wrapper.emitted()).toEqual({ - onClick: [[]], + focus: [[]], }); }); }); it('should render reply button', () => { - expect(findButton().text()).toEqual(buttonText); + expect(findTextarea().attributes('placeholder')).toEqual(placeholderText); }); }); diff --git a/spec/frontend/notes/components/noteable_discussion_spec.js b/spec/frontend/notes/components/noteable_discussion_spec.js index 87538279c3d..34df39bf1c7 100644 --- a/spec/frontend/notes/components/noteable_discussion_spec.js +++ b/spec/frontend/notes/components/noteable_discussion_spec.js @@ -65,7 +65,7 @@ describe('noteable_discussion component', () => { expect(wrapper.vm.isReplying).toEqual(false); const replyPlaceholder = wrapper.find(ReplyPlaceholder); - replyPlaceholder.vm.$emit('onClick'); + replyPlaceholder.vm.$emit('focus'); await nextTick(); expect(wrapper.vm.isReplying).toEqual(true); diff --git a/spec/frontend/notifications/components/custom_notifications_modal_spec.js b/spec/frontend/notifications/components/custom_notifications_modal_spec.js index 3e87f3107bd..5e4114d91f5 100644 --- a/spec/frontend/notifications/components/custom_notifications_modal_spec.js +++ b/spec/frontend/notifications/components/custom_notifications_modal_spec.js @@ -180,7 +180,7 @@ describe('CustomNotificationsModal', () => { expect( mockToastShow, ).toHaveBeenCalledWith( - 'An error occured while loading the notification settings. Please try again.', + 'An error occurred while loading the notification settings. Please try again.', { type: 'error' }, ); }); @@ -258,7 +258,7 @@ describe('CustomNotificationsModal', () => { expect( mockToastShow, ).toHaveBeenCalledWith( - 'An error occured while updating the notification settings. Please try again.', + 'An error occurred while updating the notification settings. Please try again.', { type: 'error' }, ); }); diff --git a/spec/frontend/notifications/components/notifications_dropdown_spec.js b/spec/frontend/notifications/components/notifications_dropdown_spec.js index 0673fb51a91..88534a6d690 100644 --- a/spec/frontend/notifications/components/notifications_dropdown_spec.js +++ b/spec/frontend/notifications/components/notifications_dropdown_spec.js @@ -255,7 +255,7 @@ describe('NotificationsDropdown', () => { expect( mockToastShow, ).toHaveBeenCalledWith( - 'An error occured while updating the notification settings. Please try again.', + 'An error occurred while updating the notification settings. Please try again.', { type: 'error' }, ); }); diff --git a/spec/frontend/packages/details/store/getters_spec.js b/spec/frontend/packages/details/store/getters_spec.js index 07c120f57f7..4642dbc1ea6 100644 --- a/spec/frontend/packages/details/store/getters_spec.js +++ b/spec/frontend/packages/details/store/getters_spec.js @@ -99,7 +99,7 @@ describe('Getters PackageDetails Store', () => { packageEntity | expectedResult ${conanPackage} | ${'Conan'} ${packageWithoutBuildInfo} | ${'Maven'} - ${npmPackage} | ${'NPM'} + ${npmPackage} | ${'npm'} ${nugetPackage} | ${'NuGet'} ${pypiPackage} | ${'PyPI'} `(`package type`, ({ packageEntity, expectedResult }) => { @@ -168,13 +168,13 @@ describe('Getters PackageDetails Store', () => { }); describe('npm string getters', () => { - it('gets the correct npmInstallationCommand for NPM', () => { + it('gets the correct npmInstallationCommand for npm', () => { setupState({ packageEntity: npmPackage }); expect(npmInstallationCommand(state)(NpmManager.NPM)).toBe(npmInstallStr); }); - it('gets the correct npmSetupCommand for NPM', () => { + it('gets the correct npmSetupCommand for npm', () => { setupState({ packageEntity: npmPackage }); expect(npmSetupCommand(state)(NpmManager.NPM)).toBe(npmSetupStr); diff --git a/spec/frontend/packages/shared/utils_spec.js b/spec/frontend/packages/shared/utils_spec.js index 506f37f8895..4a95def1bef 100644 --- a/spec/frontend/packages/shared/utils_spec.js +++ b/spec/frontend/packages/shared/utils_spec.js @@ -35,7 +35,7 @@ describe('Packages shared utils', () => { packageType | expectedResult ${'conan'} | ${'Conan'} ${'maven'} | ${'Maven'} - ${'npm'} | ${'NPM'} + ${'npm'} | ${'npm'} ${'nuget'} | ${'NuGet'} ${'pypi'} | ${'PyPI'} ${'composer'} | ${'Composer'} diff --git a/spec/frontend/performance_bar/components/performance_bar_app_spec.js b/spec/frontend/performance_bar/components/performance_bar_app_spec.js index 417a655093c..67a4259a8e3 100644 --- a/spec/frontend/performance_bar/components/performance_bar_app_spec.js +++ b/spec/frontend/performance_bar/components/performance_bar_app_spec.js @@ -9,6 +9,7 @@ describe('performance bar app', () => { store, env: 'development', requestId: '123', + statsUrl: 'https://log.gprd.gitlab.net/app/dashboards#/view/', peekUrl: '/-/peek/results', profileUrl: '?lineprofiler=true', }, diff --git a/spec/frontend/performance_bar/index_spec.js b/spec/frontend/performance_bar/index_spec.js index 8d9c32b7f12..819b2bcbacf 100644 --- a/spec/frontend/performance_bar/index_spec.js +++ b/spec/frontend/performance_bar/index_spec.js @@ -19,6 +19,7 @@ describe('performance bar wrapper', () => { peekWrapper.setAttribute('data-env', 'development'); peekWrapper.setAttribute('data-request-id', '123'); peekWrapper.setAttribute('data-peek-url', '/-/peek/results'); + peekWrapper.setAttribute('data-stats-url', 'https://log.gprd.gitlab.net/app/dashboards#/view/'); peekWrapper.setAttribute('data-profile-url', '?lineprofiler=true'); mock = new MockAdapter(axios); diff --git a/spec/frontend/pipeline_editor/components/header/pipeline_editor_header_spec.js b/spec/frontend/pipeline_editor/components/header/pipeline_editor_header_spec.js index df15a6c8e7f..5eeccd78265 100644 --- a/spec/frontend/pipeline_editor/components/header/pipeline_editor_header_spec.js +++ b/spec/frontend/pipeline_editor/components/header/pipeline_editor_header_spec.js @@ -1,14 +1,24 @@ import { shallowMount } from '@vue/test-utils'; import PipelineEditorHeader from '~/pipeline_editor/components/header/pipeline_editor_header.vue'; +import PipelineStatus from '~/pipeline_editor/components/header/pipeline_status.vue'; import ValidationSegment from '~/pipeline_editor/components/header/validation_segment.vue'; import { mockLintResponse } from '../../mock_data'; describe('Pipeline editor header', () => { let wrapper; + const mockProvide = { + glFeatures: { + pipelineStatusForPipelineEditor: true, + }, + }; - const createComponent = () => { + const createComponent = ({ provide = {} } = {}) => { wrapper = shallowMount(PipelineEditorHeader, { + provide: { + ...mockProvide, + ...provide, + }, props: { ciConfigData: mockLintResponse, isCiConfigDataLoading: false, @@ -16,6 +26,7 @@ describe('Pipeline editor header', () => { }); }; + const findPipelineStatus = () => wrapper.findComponent(PipelineStatus); const findValidationSegment = () => wrapper.findComponent(ValidationSegment); afterEach(() => { @@ -27,8 +38,27 @@ describe('Pipeline editor header', () => { beforeEach(() => { createComponent(); }); + + it('renders the pipeline status', () => { + expect(findPipelineStatus().exists()).toBe(true); + }); + it('renders the validation segment', () => { expect(findValidationSegment().exists()).toBe(true); }); }); + + describe('with pipeline status feature flag off', () => { + beforeEach(() => { + createComponent({ + provide: { + glFeatures: { pipelineStatusForPipelineEditor: false }, + }, + }); + }); + + it('does not render the pipeline status', () => { + expect(findPipelineStatus().exists()).toBe(false); + }); + }); }); diff --git a/spec/frontend/pipeline_editor/components/header/pipeline_status_spec.js b/spec/frontend/pipeline_editor/components/header/pipeline_status_spec.js new file mode 100644 index 00000000000..de6e112866b --- /dev/null +++ b/spec/frontend/pipeline_editor/components/header/pipeline_status_spec.js @@ -0,0 +1,150 @@ +import { GlIcon, GlLink, GlLoadingIcon, GlSprintf } from '@gitlab/ui'; +import { shallowMount, createLocalVue } from '@vue/test-utils'; +import VueApollo from 'vue-apollo'; +import createMockApollo from 'helpers/mock_apollo_helper'; +import waitForPromises from 'helpers/wait_for_promises'; +import PipelineStatus, { i18n } from '~/pipeline_editor/components/header/pipeline_status.vue'; +import CiIcon from '~/vue_shared/components/ci_icon.vue'; +import { mockCommitSha, mockProjectPipeline, mockProjectFullPath } from '../../mock_data'; + +const localVue = createLocalVue(); +localVue.use(VueApollo); + +const mockProvide = { + projectFullPath: mockProjectFullPath, +}; + +describe('Pipeline Status', () => { + let wrapper; + let mockApollo; + let mockPipelineQuery; + + const createComponent = ({ hasPipeline = true, isQueryLoading = false }) => { + const pipeline = hasPipeline + ? { loading: isQueryLoading, ...mockProjectPipeline.pipeline } + : { loading: isQueryLoading }; + + wrapper = shallowMount(PipelineStatus, { + provide: mockProvide, + stubs: { GlLink, GlSprintf }, + data: () => (hasPipeline ? { pipeline } : {}), + mocks: { + $apollo: { + queries: { + pipeline, + }, + }, + }, + }); + }; + + const createComponentWithApollo = () => { + const resolvers = { + Query: { + project: mockPipelineQuery, + }, + }; + mockApollo = createMockApollo([], resolvers); + + wrapper = shallowMount(PipelineStatus, { + localVue, + apolloProvider: mockApollo, + provide: mockProvide, + stubs: { GlLink, GlSprintf }, + data() { + return { + commitSha: mockCommitSha, + }; + }, + }); + }; + + const findIcon = () => wrapper.findComponent(GlIcon); + const findCiIcon = () => wrapper.findComponent(CiIcon); + const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon); + const findPipelineId = () => wrapper.find('[data-testid="pipeline-id"]'); + const findPipelineCommit = () => wrapper.find('[data-testid="pipeline-commit"]'); + const findPipelineErrorMsg = () => wrapper.find('[data-testid="pipeline-error-msg"]'); + const findPipelineLoadingMsg = () => wrapper.find('[data-testid="pipeline-loading-msg"]'); + + beforeEach(() => { + mockPipelineQuery = jest.fn(); + }); + + afterEach(() => { + mockPipelineQuery.mockReset(); + + wrapper.destroy(); + wrapper = null; + }); + + describe('while querying', () => { + it('renders loading icon', () => { + createComponent({ isQueryLoading: true, hasPipeline: false }); + + expect(findLoadingIcon().exists()).toBe(true); + expect(findPipelineLoadingMsg().text()).toBe(i18n.fetchLoading); + }); + + it('does not render loading icon if pipeline data is already set', () => { + createComponent({ isQueryLoading: true }); + + expect(findLoadingIcon().exists()).toBe(false); + }); + }); + + describe('when querying data', () => { + describe('when data is set', () => { + beforeEach(async () => { + mockPipelineQuery.mockResolvedValue(mockProjectPipeline); + + createComponentWithApollo(); + await waitForPromises(); + }); + + it('query is called with correct variables', async () => { + expect(mockPipelineQuery).toHaveBeenCalledTimes(1); + expect(mockPipelineQuery).toHaveBeenCalledWith( + expect.anything(), + { + fullPath: mockProjectFullPath, + }, + expect.anything(), + expect.anything(), + ); + }); + + it('does not render error', () => { + expect(findIcon().exists()).toBe(false); + }); + + it('renders pipeline data', () => { + const { id } = mockProjectPipeline.pipeline; + + expect(findCiIcon().exists()).toBe(true); + expect(findPipelineId().text()).toBe(`#${id.match(/\d+/g)[0]}`); + expect(findPipelineCommit().text()).toBe(mockCommitSha); + }); + }); + + describe('when data cannot be fetched', () => { + beforeEach(async () => { + mockPipelineQuery.mockRejectedValue(new Error()); + + createComponentWithApollo(); + await waitForPromises(); + }); + + it('renders error', () => { + expect(findIcon().attributes('name')).toBe('warning-solid'); + expect(findPipelineErrorMsg().text()).toBe(i18n.fetchError); + }); + + it('does not render pipeline data', () => { + expect(findCiIcon().exists()).toBe(false); + expect(findPipelineId().exists()).toBe(false); + expect(findPipelineCommit().exists()).toBe(false); + }); + }); + }); +}); diff --git a/spec/frontend/pipeline_editor/graphql/resolvers_spec.js b/spec/frontend/pipeline_editor/graphql/resolvers_spec.js index d39c0d80296..196a4133eea 100644 --- a/spec/frontend/pipeline_editor/graphql/resolvers_spec.js +++ b/spec/frontend/pipeline_editor/graphql/resolvers_spec.js @@ -46,6 +46,24 @@ describe('~/pipeline_editor/graphql/resolvers', () => { await expect(result.rawData).resolves.toBe(mockCiYml); }); }); + + describe('pipeline', () => { + it('resolves pipeline data with type names', async () => { + const result = await resolvers.Query.project(null); + + // eslint-disable-next-line no-underscore-dangle + expect(result.__typename).toBe('Project'); + }); + + it('resolves pipeline data with necessary data', async () => { + const result = await resolvers.Query.project(null); + const pipelineKeys = Object.keys(result.pipeline); + const statusKeys = Object.keys(result.pipeline.detailedStatus); + + expect(pipelineKeys).toContain('id', 'commitPath', 'detailedStatus', 'shortSha'); + expect(statusKeys).toContain('detailsPath', 'text'); + }); + }); }); describe('Mutation', () => { diff --git a/spec/frontend/pipeline_editor/mock_data.js b/spec/frontend/pipeline_editor/mock_data.js index 8e248c11b87..16d5ba0e714 100644 --- a/spec/frontend/pipeline_editor/mock_data.js +++ b/spec/frontend/pipeline_editor/mock_data.js @@ -138,6 +138,22 @@ export const mergeUnwrappedCiConfig = (mergedConfig) => { }; }; +export const mockProjectPipeline = { + pipeline: { + commitPath: '/-/commit/aabbccdd', + id: 'gid://gitlab/Ci::Pipeline/118', + iid: '28', + shortSha: mockCommitSha, + status: 'SUCCESS', + detailedStatus: { + detailsPath: '/root/sample-ci-project/-/pipelines/118"', + group: 'success', + icon: 'status_success', + text: 'passed', + }, + }, +}; + export const mockLintResponse = { valid: true, mergedYaml: mockCiYml, diff --git a/spec/frontend/pipelines/empty_state_spec.js b/spec/frontend/pipelines/empty_state_spec.js index 3ebedc9ac87..fdbb98dfd55 100644 --- a/spec/frontend/pipelines/empty_state_spec.js +++ b/spec/frontend/pipelines/empty_state_spec.js @@ -9,7 +9,6 @@ describe('Pipelines Empty State', () => { const createWrapper = () => { wrapper = shallowMount(EmptyState, { propsData: { - helpPagePath: 'foo', emptyStateSvgPath: 'foo', canSetCi: true, }, @@ -35,7 +34,7 @@ describe('Pipelines Empty State', () => { }); it('should render a link with provided help path', () => { - expect(findGetStartedButton().attributes('href')).toBe('foo'); + expect(findGetStartedButton().attributes('href')).toBe('/help/ci/quick_start/index.md'); }); it('should render empty state information', () => { diff --git a/spec/frontend/pipelines/graph_shared/links_inner_spec.js b/spec/frontend/pipelines/graph_shared/links_inner_spec.js index 6cabe2bc8a7..15a5fbfeffe 100644 --- a/spec/frontend/pipelines/graph_shared/links_inner_spec.js +++ b/spec/frontend/pipelines/graph_shared/links_inner_spec.js @@ -1,5 +1,15 @@ import { shallowMount } from '@vue/test-utils'; +import MockAdapter from 'axios-mock-adapter'; import { setHTMLFixture } from 'helpers/fixtures'; +import axios from '~/lib/utils/axios_utils'; +import { + PIPELINES_DETAIL_LINK_DURATION, + PIPELINES_DETAIL_LINKS_TOTAL, + PIPELINES_DETAIL_LINKS_JOB_RATIO, +} from '~/performance/constants'; +import * as perfUtils from '~/performance/utils'; +import * as sentryUtils from '~/pipelines/components/graph/utils'; +import * as Api from '~/pipelines/components/graph_shared/api'; import LinksInner from '~/pipelines/components/graph_shared/links_inner.vue'; import { createJobsHash } from '~/pipelines/utils'; import { @@ -18,7 +28,9 @@ describe('Links Inner component', () => { containerMeasurements: { width: 1019, height: 445 }, pipelineId: 1, pipelineData: [], + totalGroups: 10, }; + let wrapper; const createComponent = (props) => { @@ -194,4 +206,141 @@ describe('Links Inner component', () => { expect(firstLink.classes(hoverColorClass)).toBe(true); }); }); + + describe('performance metrics', () => { + let markAndMeasure; + let reportToSentry; + let reportPerformance; + let mock; + + beforeEach(() => { + mock = new MockAdapter(axios); + jest.spyOn(window, 'requestAnimationFrame').mockImplementation((cb) => cb()); + markAndMeasure = jest.spyOn(perfUtils, 'performanceMarkAndMeasure'); + reportToSentry = jest.spyOn(sentryUtils, 'reportToSentry'); + reportPerformance = jest.spyOn(Api, 'reportPerformance'); + }); + + afterEach(() => { + mock.restore(); + }); + + describe('with no metrics config object', () => { + beforeEach(() => { + setFixtures(pipelineData); + createComponent({ + pipelineData: pipelineData.stages, + }); + }); + + it('is not called', () => { + expect(markAndMeasure).not.toHaveBeenCalled(); + expect(reportToSentry).not.toHaveBeenCalled(); + expect(reportPerformance).not.toHaveBeenCalled(); + }); + }); + + describe('with metrics config set to false', () => { + beforeEach(() => { + setFixtures(pipelineData); + createComponent({ + pipelineData: pipelineData.stages, + metricsConfig: { + collectMetrics: false, + metricsPath: '/path/to/metrics', + }, + }); + }); + + it('is not called', () => { + expect(markAndMeasure).not.toHaveBeenCalled(); + expect(reportToSentry).not.toHaveBeenCalled(); + expect(reportPerformance).not.toHaveBeenCalled(); + }); + }); + + describe('with no metrics path', () => { + beforeEach(() => { + setFixtures(pipelineData); + createComponent({ + pipelineData: pipelineData.stages, + metricsConfig: { + collectMetrics: true, + metricsPath: '', + }, + }); + }); + + it('is not called', () => { + expect(markAndMeasure).not.toHaveBeenCalled(); + expect(reportToSentry).not.toHaveBeenCalled(); + expect(reportPerformance).not.toHaveBeenCalled(); + }); + }); + + describe('with metrics path and collect set to true', () => { + const metricsPath = '/root/project/-/ci/prometheus_metrics/histograms.json'; + const duration = 0.0478; + const numLinks = 1; + const metricsData = { + histograms: [ + { name: PIPELINES_DETAIL_LINK_DURATION, value: duration }, + { name: PIPELINES_DETAIL_LINKS_TOTAL, value: numLinks }, + { + name: PIPELINES_DETAIL_LINKS_JOB_RATIO, + value: numLinks / defaultProps.totalGroups, + }, + ], + }; + + describe('when no duration is obtained', () => { + beforeEach(() => { + jest.spyOn(window.performance, 'getEntriesByName').mockImplementation(() => { + return []; + }); + + setFixtures(pipelineData); + + createComponent({ + pipelineData: pipelineData.stages, + metricsConfig: { + collectMetrics: true, + path: metricsPath, + }, + }); + }); + + it('attempts to collect metrics', () => { + expect(markAndMeasure).toHaveBeenCalled(); + expect(reportPerformance).not.toHaveBeenCalled(); + expect(reportToSentry).not.toHaveBeenCalled(); + }); + }); + + describe('with duration and no error', () => { + beforeEach(() => { + jest.spyOn(window.performance, 'getEntriesByName').mockImplementation(() => { + return [{ duration }]; + }); + + setFixtures(pipelineData); + + createComponent({ + pipelineData: pipelineData.stages, + metricsConfig: { + collectMetrics: true, + path: metricsPath, + }, + }); + }); + + it('it calls reportPerformance with expected arguments', () => { + expect(markAndMeasure).toHaveBeenCalled(); + expect(reportPerformance).toHaveBeenCalled(); + expect(reportPerformance).toHaveBeenCalledWith(metricsPath, metricsData); + expect(reportToSentry).not.toHaveBeenCalled(); + }); + }); + }); + }); }); diff --git a/spec/frontend/pipelines/mock_data.js b/spec/frontend/pipelines/mock_data.js index 2afdbb05107..337838c41b3 100644 --- a/spec/frontend/pipelines/mock_data.js +++ b/spec/frontend/pipelines/mock_data.js @@ -2,328 +2,6 @@ const PIPELINE_RUNNING = 'RUNNING'; const PIPELINE_CANCELED = 'CANCELED'; const PIPELINE_FAILED = 'FAILED'; -export const pipelineWithStages = { - id: 20333396, - user: { - id: 128633, - name: 'Rémy Coutable', - username: 'rymai', - state: 'active', - avatar_url: - 'https://secure.gravatar.com/avatar/263da227929cc0035cb0eba512bcf81a?s=80\u0026d=identicon', - web_url: 'https://gitlab.com/rymai', - path: '/rymai', - }, - active: true, - coverage: '58.24', - source: 'push', - created_at: '2018-04-11T14:04:53.881Z', - updated_at: '2018-04-11T14:05:00.792Z', - path: '/gitlab-org/gitlab/pipelines/20333396', - flags: { - latest: true, - stuck: false, - auto_devops: false, - yaml_errors: false, - retryable: false, - cancelable: true, - failure_reason: false, - }, - details: { - status: { - icon: 'status_running', - text: 'running', - label: 'running', - group: 'running', - has_details: true, - details_path: '/gitlab-org/gitlab/pipelines/20333396', - favicon: - 'https://assets.gitlab-static.net/assets/ci_favicons/favicon_status_running-2eb56be2871937954b2ba6d6f4ee9fdf7e5e1c146ac45f7be98119ccaca1aca9.ico', - }, - duration: null, - finished_at: null, - stages: [ - { - name: 'build', - title: 'build: skipped', - status: { - icon: 'status_skipped', - text: 'skipped', - label: 'skipped', - group: 'skipped', - has_details: true, - details_path: '/gitlab-org/gitlab/pipelines/20333396#build', - favicon: - 'https://assets.gitlab-static.net/assets/ci_favicons/favicon_status_skipped-a2eee568a5bffdb494050c7b62dde241de9189280836288ac8923d369f16222d.ico', - }, - path: '/gitlab-org/gitlab/pipelines/20333396#build', - dropdown_path: '/gitlab-org/gitlab/pipelines/20333396/stage.json?stage=build', - }, - { - name: 'prepare', - title: 'prepare: passed', - status: { - icon: 'status_success', - text: 'passed', - label: 'passed', - group: 'success', - has_details: true, - details_path: '/gitlab-org/gitlab/pipelines/20333396#prepare', - favicon: - 'https://assets.gitlab-static.net/assets/ci_favicons/favicon_status_success-26f59841becbef8c6fe414e9e74471d8bfd6a91b5855c19fe7f5923a40a7da47.ico', - }, - path: '/gitlab-org/gitlab/pipelines/20333396#prepare', - dropdown_path: '/gitlab-org/gitlab/pipelines/20333396/stage.json?stage=prepare', - }, - { - name: 'test', - title: 'test: running', - status: { - icon: 'status_running', - text: 'running', - label: 'running', - group: 'running', - has_details: true, - details_path: '/gitlab-org/gitlab/pipelines/20333396#test', - favicon: - 'https://assets.gitlab-static.net/assets/ci_favicons/favicon_status_running-2eb56be2871937954b2ba6d6f4ee9fdf7e5e1c146ac45f7be98119ccaca1aca9.ico', - }, - path: '/gitlab-org/gitlab/pipelines/20333396#test', - dropdown_path: '/gitlab-org/gitlab/pipelines/20333396/stage.json?stage=test', - }, - { - name: 'post-test', - title: 'post-test: created', - status: { - icon: 'status_created', - text: 'created', - label: 'created', - group: 'created', - has_details: true, - details_path: '/gitlab-org/gitlab/pipelines/20333396#post-test', - favicon: - 'https://assets.gitlab-static.net/assets/ci_favicons/favicon_status_created-e997aa0b7db73165df8a9d6803932b18d7b7cc37d604d2d96e378fea2dba9c5f.ico', - }, - path: '/gitlab-org/gitlab/pipelines/20333396#post-test', - dropdown_path: '/gitlab-org/gitlab/pipelines/20333396/stage.json?stage=post-test', - }, - { - name: 'pages', - title: 'pages: created', - status: { - icon: 'status_created', - text: 'created', - label: 'created', - group: 'created', - has_details: true, - details_path: '/gitlab-org/gitlab/pipelines/20333396#pages', - favicon: - 'https://assets.gitlab-static.net/assets/ci_favicons/favicon_status_created-e997aa0b7db73165df8a9d6803932b18d7b7cc37d604d2d96e378fea2dba9c5f.ico', - }, - path: '/gitlab-org/gitlab/pipelines/20333396#pages', - dropdown_path: '/gitlab-org/gitlab/pipelines/20333396/stage.json?stage=pages', - }, - { - name: 'post-cleanup', - title: 'post-cleanup: created', - status: { - icon: 'status_created', - text: 'created', - label: 'created', - group: 'created', - has_details: true, - details_path: '/gitlab-org/gitlab/pipelines/20333396#post-cleanup', - favicon: - 'https://assets.gitlab-static.net/assets/ci_favicons/favicon_status_created-e997aa0b7db73165df8a9d6803932b18d7b7cc37d604d2d96e378fea2dba9c5f.ico', - }, - path: '/gitlab-org/gitlab/pipelines/20333396#post-cleanup', - dropdown_path: '/gitlab-org/gitlab/pipelines/20333396/stage.json?stage=post-cleanup', - }, - ], - artifacts: [ - { - name: 'gitlab:assets:compile', - expired: false, - expire_at: '2018-05-12T14:22:54.730Z', - path: '/gitlab-org/gitlab/-/jobs/62411438/artifacts/download', - keep_path: '/gitlab-org/gitlab/-/jobs/62411438/artifacts/keep', - browse_path: '/gitlab-org/gitlab/-/jobs/62411438/artifacts/browse', - }, - { - name: 'rspec-mysql 12 28', - expired: false, - expire_at: '2018-05-12T14:22:45.136Z', - path: '/gitlab-org/gitlab/-/jobs/62411397/artifacts/download', - keep_path: '/gitlab-org/gitlab/-/jobs/62411397/artifacts/keep', - browse_path: '/gitlab-org/gitlab/-/jobs/62411397/artifacts/browse', - }, - { - name: 'rspec-mysql 6 28', - expired: false, - expire_at: '2018-05-12T14:22:41.523Z', - path: '/gitlab-org/gitlab/-/jobs/62411391/artifacts/download', - keep_path: '/gitlab-org/gitlab/-/jobs/62411391/artifacts/keep', - browse_path: '/gitlab-org/gitlab/-/jobs/62411391/artifacts/browse', - }, - { - name: 'rspec-pg geo 0 1', - expired: false, - expire_at: '2018-05-12T14:22:13.287Z', - path: '/gitlab-org/gitlab/-/jobs/62411353/artifacts/download', - keep_path: '/gitlab-org/gitlab/-/jobs/62411353/artifacts/keep', - browse_path: '/gitlab-org/gitlab/-/jobs/62411353/artifacts/browse', - }, - { - name: 'rspec-mysql 0 28', - expired: false, - expire_at: '2018-05-12T14:22:06.834Z', - path: '/gitlab-org/gitlab/-/jobs/62411385/artifacts/download', - keep_path: '/gitlab-org/gitlab/-/jobs/62411385/artifacts/keep', - browse_path: '/gitlab-org/gitlab/-/jobs/62411385/artifacts/browse', - }, - { - name: 'spinach-mysql 0 2', - expired: false, - expire_at: '2018-05-12T14:21:51.409Z', - path: '/gitlab-org/gitlab/-/jobs/62411423/artifacts/download', - keep_path: '/gitlab-org/gitlab/-/jobs/62411423/artifacts/keep', - browse_path: '/gitlab-org/gitlab/-/jobs/62411423/artifacts/browse', - }, - { - name: 'karma', - expired: false, - expire_at: '2018-05-12T14:21:20.934Z', - path: '/gitlab-org/gitlab/-/jobs/62411440/artifacts/download', - keep_path: '/gitlab-org/gitlab/-/jobs/62411440/artifacts/keep', - browse_path: '/gitlab-org/gitlab/-/jobs/62411440/artifacts/browse', - }, - { - name: 'spinach-pg 0 2', - expired: false, - expire_at: '2018-05-12T14:20:01.028Z', - path: '/gitlab-org/gitlab/-/jobs/62411419/artifacts/download', - keep_path: '/gitlab-org/gitlab/-/jobs/62411419/artifacts/keep', - browse_path: '/gitlab-org/gitlab/-/jobs/62411419/artifacts/browse', - }, - { - name: 'spinach-pg 1 2', - expired: false, - expire_at: '2018-05-12T14:19:04.336Z', - path: '/gitlab-org/gitlab/-/jobs/62411421/artifacts/download', - keep_path: '/gitlab-org/gitlab/-/jobs/62411421/artifacts/keep', - browse_path: '/gitlab-org/gitlab/-/jobs/62411421/artifacts/browse', - }, - { - name: 'sast', - expired: null, - expire_at: null, - path: '/gitlab-org/gitlab/-/jobs/62411442/artifacts/download', - browse_path: '/gitlab-org/gitlab/-/jobs/62411442/artifacts/browse', - }, - { - name: 'code_quality', - expired: false, - expire_at: '2018-04-18T14:16:24.484Z', - path: '/gitlab-org/gitlab/-/jobs/62411441/artifacts/download', - keep_path: '/gitlab-org/gitlab/-/jobs/62411441/artifacts/keep', - browse_path: '/gitlab-org/gitlab/-/jobs/62411441/artifacts/browse', - }, - { - name: 'cache gems', - expired: null, - expire_at: null, - path: '/gitlab-org/gitlab/-/jobs/62411447/artifacts/download', - browse_path: '/gitlab-org/gitlab/-/jobs/62411447/artifacts/browse', - }, - { - name: 'dependency_scanning', - expired: null, - expire_at: null, - path: '/gitlab-org/gitlab/-/jobs/62411443/artifacts/download', - browse_path: '/gitlab-org/gitlab/-/jobs/62411443/artifacts/browse', - }, - { - name: 'compile-assets', - expired: false, - expire_at: '2018-04-18T14:12:07.638Z', - path: '/gitlab-org/gitlab/-/jobs/62411334/artifacts/download', - keep_path: '/gitlab-org/gitlab/-/jobs/62411334/artifacts/keep', - browse_path: '/gitlab-org/gitlab/-/jobs/62411334/artifacts/browse', - }, - { - name: 'setup-test-env', - expired: false, - expire_at: '2018-04-18T14:10:27.024Z', - path: '/gitlab-org/gitlab/-/jobs/62411336/artifacts/download', - keep_path: '/gitlab-org/gitlab/-/jobs/62411336/artifacts/keep', - browse_path: '/gitlab-org/gitlab/-/jobs/62411336/artifacts/browse', - }, - { - name: 'retrieve-tests-metadata', - expired: false, - expire_at: '2018-05-12T14:06:35.926Z', - path: '/gitlab-org/gitlab/-/jobs/62411333/artifacts/download', - keep_path: '/gitlab-org/gitlab/-/jobs/62411333/artifacts/keep', - browse_path: '/gitlab-org/gitlab/-/jobs/62411333/artifacts/browse', - }, - ], - manual_actions: [ - { - name: 'package-and-qa', - path: '/gitlab-org/gitlab/-/jobs/62411330/play', - playable: true, - }, - { - name: 'review-docs-deploy', - path: '/gitlab-org/gitlab/-/jobs/62411332/play', - playable: true, - }, - ], - }, - ref: { - name: 'master', - path: '/gitlab-org/gitlab/commits/master', - tag: false, - branch: true, - }, - commit: { - id: 'e6a2885c503825792cb8a84a8731295e361bd059', - short_id: 'e6a2885c', - title: "Merge branch 'ce-to-ee-2018-04-11' into 'master'", - created_at: '2018-04-11T14:04:39.000Z', - parent_ids: [ - '5d9b5118f6055f72cff1a82b88133609912f2c1d', - '6fdc6ee76a8062fe41b1a33f7c503334a6ebdc02', - ], - message: - "Merge branch 'ce-to-ee-2018-04-11' into 'master'\n\nCE upstream - 2018-04-11 12:26 UTC\n\nSee merge request gitlab-org/gitlab-ee!5326", - author_name: 'Rémy Coutable', - author_email: 'remy@rymai.me', - authored_date: '2018-04-11T14:04:39.000Z', - committer_name: 'Rémy Coutable', - committer_email: 'remy@rymai.me', - committed_date: '2018-04-11T14:04:39.000Z', - author: { - id: 128633, - name: 'Rémy Coutable', - username: 'rymai', - state: 'active', - avatar_url: - 'https://secure.gravatar.com/avatar/263da227929cc0035cb0eba512bcf81a?s=80\u0026d=identicon', - web_url: 'https://gitlab.com/rymai', - path: '/rymai', - }, - author_gravatar_url: - 'https://secure.gravatar.com/avatar/263da227929cc0035cb0eba512bcf81a?s=80\u0026d=identicon', - commit_url: - 'https://gitlab.com/gitlab-org/gitlab/commit/e6a2885c503825792cb8a84a8731295e361bd059', - commit_path: '/gitlab-org/gitlab/commit/e6a2885c503825792cb8a84a8731295e361bd059', - }, - cancel_path: '/gitlab-org/gitlab/pipelines/20333396/cancel', - triggered_by: null, - triggered: [], -}; - const threeWeeksAgo = new Date(); threeWeeksAgo.setDate(threeWeeksAgo.getDate() - 21); diff --git a/spec/frontend/pipelines/pipeline_url_spec.js b/spec/frontend/pipelines/pipeline_url_spec.js index 44c9def99cc..4997e9cf3ec 100644 --- a/spec/frontend/pipelines/pipeline_url_spec.js +++ b/spec/frontend/pipelines/pipeline_url_spec.js @@ -1,9 +1,8 @@ import { shallowMount } from '@vue/test-utils'; -import $ from 'jquery'; import { trimText } from 'helpers/text_helper'; import PipelineUrlComponent from '~/pipelines/components/pipelines_list/pipeline_url.vue'; -$.fn.popover = () => {}; +const projectPath = 'test/test'; describe('Pipeline Url Component', () => { let wrapper; @@ -14,6 +13,7 @@ describe('Pipeline Url Component', () => { const findYamlTag = () => wrapper.find('[data-testid="pipeline-url-yaml"]'); const findFailureTag = () => wrapper.find('[data-testid="pipeline-url-failure"]'); const findAutoDevopsTag = () => wrapper.find('[data-testid="pipeline-url-autodevops"]'); + const findAutoDevopsTagLink = () => wrapper.find('[data-testid="pipeline-url-autodevops-link"]'); const findStuckTag = () => wrapper.find('[data-testid="pipeline-url-stuck"]'); const findDetachedTag = () => wrapper.find('[data-testid="pipeline-url-detached"]'); const findForkTag = () => wrapper.find('[data-testid="pipeline-url-fork"]'); @@ -23,9 +23,9 @@ describe('Pipeline Url Component', () => { pipeline: { id: 1, path: 'foo', + project: { full_path: `/${projectPath}` }, flags: {}, }, - autoDevopsHelpPath: 'foo', pipelineScheduleUrl: 'foo', }; @@ -33,7 +33,7 @@ describe('Pipeline Url Component', () => { wrapper = shallowMount(PipelineUrlComponent, { propsData: { ...defaultProps, ...props }, provide: { - targetProjectFullPath: 'test/test', + targetProjectFullPath: projectPath, }, }); }; @@ -57,6 +57,19 @@ describe('Pipeline Url Component', () => { expect(findPipelineUrlLink().text()).toBe('#1'); }); + it('should not render tags when flags are not set', () => { + createComponent(); + + expect(findStuckTag().exists()).toBe(false); + expect(findLatestTag().exists()).toBe(false); + expect(findYamlTag().exists()).toBe(false); + expect(findAutoDevopsTag().exists()).toBe(false); + expect(findFailureTag().exists()).toBe(false); + expect(findScheduledTag().exists()).toBe(false); + expect(findForkTag().exists()).toBe(false); + expect(findTrainTag().exists()).toBe(false); + }); + it('should render the stuck tag when flag is provided', () => { createComponent({ pipeline: { @@ -96,6 +109,7 @@ describe('Pipeline Url Component', () => { it('should render an autodevops badge when flag is provided', () => { createComponent({ pipeline: { + ...defaultProps.pipeline, flags: { auto_devops: true, }, @@ -103,6 +117,11 @@ describe('Pipeline Url Component', () => { }); expect(trimText(findAutoDevopsTag().text())).toBe('Auto DevOps'); + + expect(findAutoDevopsTagLink().attributes()).toMatchObject({ + href: '/help/topics/autodevops/index.md', + target: '_blank', + }); }); it('should render a detached badge when flag is provided', () => { @@ -147,7 +166,7 @@ describe('Pipeline Url Component', () => { createComponent({ pipeline: { flags: {}, - project: { fullPath: 'test/forked' }, + project: { fullPath: '/test/forked' }, }, }); diff --git a/spec/frontend/pipelines/pipelines_spec.js b/spec/frontend/pipelines/pipelines_spec.js index 811303a5624..ccf3f0b6667 100644 --- a/spec/frontend/pipelines/pipelines_spec.js +++ b/spec/frontend/pipelines/pipelines_spec.js @@ -18,7 +18,7 @@ import Store from '~/pipelines/stores/pipelines_store'; import NavigationTabs from '~/vue_shared/components/navigation_tabs.vue'; import TablePagination from '~/vue_shared/components/pagination/table_pagination.vue'; -import { pipelineWithStages, stageReply, users, mockSearch, branches } from './mock_data'; +import { stageReply, users, mockSearch, branches } from './mock_data'; jest.mock('~/flash'); @@ -27,6 +27,9 @@ const mockProjectId = '21'; const mockPipelinesEndpoint = `/${mockProjectPath}/pipelines.json`; const mockPipelinesResponse = getJSONFixture('pipelines/pipelines.json'); const mockPipelinesIds = mockPipelinesResponse.pipelines.map(({ id }) => id); +const mockPipelineWithStages = mockPipelinesResponse.pipelines.find( + (p) => p.details.stages && p.details.stages.length, +); describe('Pipelines', () => { let wrapper; @@ -34,8 +37,6 @@ describe('Pipelines', () => { let origWindowLocation; const paths = { - autoDevopsHelpPath: '/help/topics/autodevops/index.md', - helpPagePath: '/help/ci/quick_start/README', emptyStateSvgPath: '/assets/illustrations/pipelines_empty.svg', errorStateSvgPath: '/assets/illustrations/pipelines_failed.svg', noPipelinesSvgPath: '/assets/illustrations/pipelines_pending.svg', @@ -45,8 +46,6 @@ describe('Pipelines', () => { }; const noPermissions = { - autoDevopsHelpPath: '/help/topics/autodevops/index.md', - helpPagePath: '/help/ci/quick_start/README', emptyStateSvgPath: '/assets/illustrations/pipelines_empty.svg', errorStateSvgPath: '/assets/illustrations/pipelines_failed.svg', noPipelinesSvgPath: '/assets/illustrations/pipelines_pending.svg', @@ -546,7 +545,9 @@ describe('Pipelines', () => { 'GitLab CI/CD can automatically build, test, and deploy your code.', ); expect(findEmptyState().find(GlButton).text()).toBe('Get started with CI/CD'); - expect(findEmptyState().find(GlButton).attributes('href')).toBe(paths.helpPagePath); + expect(findEmptyState().find(GlButton).attributes('href')).toBe( + '/help/ci/quick_start/index.md', + ); }); it('does not render tabs nor buttons', () => { @@ -613,14 +614,15 @@ describe('Pipelines', () => { mock.onGet(mockPipelinesEndpoint, { scope: 'all', page: '1' }).reply( 200, { - pipelines: [pipelineWithStages], + pipelines: [mockPipelineWithStages], count: { all: '1' }, }, { 'POLL-INTERVAL': 100, }, ); - mock.onGet(pipelineWithStages.details.stages[0].dropdown_path).reply(200, stageReply); + + mock.onGet(mockPipelineWithStages.details.stages[0].dropdown_path).reply(200, stageReply); createComponent(); diff --git a/spec/frontend/pipelines/pipelines_table_row_spec.js b/spec/frontend/pipelines/pipelines_table_row_spec.js index 660651547fc..8edf891b443 100644 --- a/spec/frontend/pipelines/pipelines_table_row_spec.js +++ b/spec/frontend/pipelines/pipelines_table_row_spec.js @@ -9,7 +9,6 @@ describe('Pipelines Table Row', () => { mount(PipelinesTableRowComponent, { propsData: { pipeline, - autoDevopsHelpPath: 'foo', viewType: 'root', }, }); @@ -156,7 +155,7 @@ describe('Pipelines Table Row', () => { it('should render an icon for each stage', () => { expect( wrapper.findAll( - '.table-section:nth-child(4) [data-testid="mini-pipeline-graph-dropdown-toggle"]', + '.table-section:nth-child(5) [data-testid="mini-pipeline-graph-dropdown-toggle"]', ).length, ).toEqual(pipeline.details.stages.length); }); @@ -183,9 +182,10 @@ describe('Pipelines Table Row', () => { expect(wrapper.find('.js-pipelines-retry-button').attributes('title')).toMatch('Retry'); expect(wrapper.find('.js-pipelines-cancel-button').exists()).toBe(true); expect(wrapper.find('.js-pipelines-cancel-button').attributes('title')).toMatch('Cancel'); - const dropdownMenu = wrapper.find('.dropdown-menu'); - expect(dropdownMenu.text()).toContain(scheduledJobAction.name); + const actionsMenu = wrapper.find('[data-testid="pipelines-manual-actions-dropdown"]'); + + expect(actionsMenu.text()).toContain(scheduledJobAction.name); }); it('emits `retryPipeline` event when retry button is clicked and toggles loading', () => { diff --git a/spec/frontend/pipelines/pipelines_table_spec.js b/spec/frontend/pipelines/pipelines_table_spec.js index fd73d507919..64febd7311c 100644 --- a/spec/frontend/pipelines/pipelines_table_spec.js +++ b/spec/frontend/pipelines/pipelines_table_spec.js @@ -9,7 +9,6 @@ describe('Pipelines Table', () => { const defaultProps = { pipelines: [], - autoDevopsHelpPath: 'foo', viewType: 'root', }; @@ -58,7 +57,7 @@ describe('Pipelines Table', () => { describe('with data', () => { it('should render rows', () => { - createComponent({ pipelines: [pipeline], autoDevopsHelpPath: 'foo', viewType: 'root' }); + createComponent({ pipelines: [pipeline], viewType: 'root' }); expect(findRows()).toHaveLength(1); }); diff --git a/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js b/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js index f9fbb1b3016..8acf2376860 100644 --- a/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js +++ b/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js @@ -154,7 +154,7 @@ describe('ServiceDeskRoot', () => { }); it('shows an error message', () => { - expect(getAlertText()).toContain('An error occured while saving changes:'); + expect(getAlertText()).toContain('An error occurred while saving changes:'); }); }); }); diff --git a/spec/frontend/projects/upload_file_experiment_spec.js b/spec/frontend/projects/upload_file_experiment_spec.js new file mode 100644 index 00000000000..57abce779a5 --- /dev/null +++ b/spec/frontend/projects/upload_file_experiment_spec.js @@ -0,0 +1,46 @@ +import ExperimentTracking from '~/experiment_tracking'; +import * as UploadFileExperiment from '~/projects/upload_file_experiment'; + +const mockExperimentTrackingEvent = jest.fn(); +jest.mock('~/experiment_tracking', () => + jest.fn().mockImplementation(() => ({ + event: mockExperimentTrackingEvent, + })), +); + +const fixture = `<a class='js-upload-file-experiment-trigger' data-toggle='modal' data-target='#modal-upload-blob'></a><div id='modal-upload-blob'></div>`; +const findModal = () => document.querySelector('[aria-modal="true"]'); +const findTrigger = () => document.querySelector('.js-upload-file-experiment-trigger'); + +beforeEach(() => { + ExperimentTracking.mockClear(); + mockExperimentTrackingEvent.mockClear(); + + document.body.innerHTML = fixture; +}); + +afterEach(() => { + document.body.innerHTML = ''; +}); + +describe('trackUploadFileFormSubmitted', () => { + it('initializes ExperimentTracking with the correct arguments and calls the tracking event with correct arguments', () => { + UploadFileExperiment.trackUploadFileFormSubmitted(); + + expect(ExperimentTracking).toHaveBeenCalledWith('empty_repo_upload', { + label: 'blob-upload-modal', + }); + expect(mockExperimentTrackingEvent).toHaveBeenCalledWith('click_upload_modal_form_submit'); + }); +}); + +describe('initUploadFileTrigger', () => { + it('calls modal and tracks event', () => { + UploadFileExperiment.initUploadFileTrigger(); + + expect(findModal()).not.toExist(); + findTrigger().click(); + expect(findModal()).toExist(); + expect(mockExperimentTrackingEvent).toHaveBeenCalledWith('click_upload_modal_trigger'); + }); +}); diff --git a/spec/frontend/security_configuration/configuration_table_spec.js b/spec/frontend/security_configuration/configuration_table_spec.js index 49f9a7a3ea8..a9d9a0dbf1a 100644 --- a/spec/frontend/security_configuration/configuration_table_spec.js +++ b/spec/frontend/security_configuration/configuration_table_spec.js @@ -5,11 +5,7 @@ import { features, UPGRADE_CTA } from '~/security_configuration/components/featu import { REPORT_TYPE_SAST, - REPORT_TYPE_DAST, - REPORT_TYPE_DEPENDENCY_SCANNING, - REPORT_TYPE_CONTAINER_SCANNING, - REPORT_TYPE_COVERAGE_FUZZING, - REPORT_TYPE_LICENSE_COMPLIANCE, + REPORT_TYPE_SECRET_DETECTION, } from '~/vue_shared/security_reports/constants'; describe('Configuration Table Component', () => { @@ -19,6 +15,8 @@ describe('Configuration Table Component', () => { wrapper = extendedWrapper(mount(ConfigurationTable, {})); }; + const findHelpLinks = () => wrapper.findAll('[data-testid="help-link"]'); + afterEach(() => { wrapper.destroy(); }); @@ -27,22 +25,20 @@ describe('Configuration Table Component', () => { createComponent(); }); - it.each(features)('should match strings', (feature) => { - expect(wrapper.text()).toContain(feature.name); - expect(wrapper.text()).toContain(feature.description); - - if (feature.type === REPORT_TYPE_SAST) { - expect(wrapper.findByTestId(feature.type).text()).toBe('Configure via Merge Request'); - } else if ( - [ - REPORT_TYPE_DAST, - REPORT_TYPE_DEPENDENCY_SCANNING, - REPORT_TYPE_CONTAINER_SCANNING, - REPORT_TYPE_COVERAGE_FUZZING, - REPORT_TYPE_LICENSE_COMPLIANCE, - ].includes(feature.type) - ) { - expect(wrapper.findByTestId(feature.type).text()).toMatchInterpolatedText(UPGRADE_CTA); - } + describe.each(features.map((feature, i) => [feature, i]))('given feature %s', (feature, i) => { + it('should match strings', () => { + expect(wrapper.text()).toContain(feature.name); + expect(wrapper.text()).toContain(feature.description); + if (feature.type === REPORT_TYPE_SAST) { + expect(wrapper.findByTestId(feature.type).text()).toBe('Configure via Merge Request'); + } else if (feature.type !== REPORT_TYPE_SECRET_DETECTION) { + expect(wrapper.findByTestId(feature.type).text()).toMatchInterpolatedText(UPGRADE_CTA); + } + }); + + it('should show expected help link', () => { + const helpLink = findHelpLinks().at(i); + expect(helpLink.attributes('href')).toBe(feature.helpPath); + }); }); }); diff --git a/spec/frontend/sentry/sentry_config_spec.js b/spec/frontend/sentry/sentry_config_spec.js index f7102f9b2f9..1f5097ef2a8 100644 --- a/spec/frontend/sentry/sentry_config_spec.js +++ b/spec/frontend/sentry/sentry_config_spec.js @@ -1,5 +1,5 @@ +import * as Sentry from '@sentry/browser'; import SentryConfig from '~/sentry/sentry_config'; -import * as Sentry from '~/sentry/wrapper'; describe('SentryConfig', () => { describe('IGNORE_ERRORS', () => { diff --git a/spec/frontend/sidebar/subscriptions_spec.js b/spec/frontend/sidebar/subscriptions_spec.js index e7ae59e26cf..6ab8e1e0ebc 100644 --- a/spec/frontend/sidebar/subscriptions_spec.js +++ b/spec/frontend/sidebar/subscriptions_spec.js @@ -84,6 +84,15 @@ describe('Subscriptions', () => { spy.mockRestore(); }); + it('has visually hidden label', () => { + wrapper = mountComponent(); + + expect(findToggleButton().props()).toMatchObject({ + label: 'Notifications', + labelPosition: 'hidden', + }); + }); + describe('given project emails are disabled', () => { const subscribeDisabledDescription = 'Notifications have been disabled'; diff --git a/spec/frontend/tracking_spec.js b/spec/frontend/tracking_spec.js index a516a4a8269..be86e8ed90f 100644 --- a/spec/frontend/tracking_spec.js +++ b/spec/frontend/tracking_spec.js @@ -1,5 +1,5 @@ import { setHTMLFixture } from 'helpers/fixtures'; -import Tracking, { initUserTracking, initDefaultTrackers } from '~/tracking'; +import Tracking, { initUserTracking, initDefaultTrackers, STANDARD_CONTEXT } from '~/tracking'; describe('Tracking', () => { let snowplowSpy; @@ -45,7 +45,7 @@ describe('Tracking', () => { it('should activate features based on what has been enabled', () => { initDefaultTrackers(); expect(snowplowSpy).toHaveBeenCalledWith('enableActivityTracking', 30, 30); - expect(snowplowSpy).toHaveBeenCalledWith('trackPageView'); + expect(snowplowSpy).toHaveBeenCalledWith('trackPageView', null, [STANDARD_CONTEXT]); expect(snowplowSpy).not.toHaveBeenCalledWith('enableFormTracking'); expect(snowplowSpy).not.toHaveBeenCalledWith('enableLinkClickTracking'); @@ -88,7 +88,7 @@ describe('Tracking', () => { '_label_', undefined, undefined, - undefined, + [STANDARD_CONTEXT], ); }); diff --git a/spec/frontend/user_popovers_spec.js b/spec/frontend/user_popovers_spec.js index 7c9c3d69efa..5c6053f413f 100644 --- a/spec/frontend/user_popovers_spec.js +++ b/spec/frontend/user_popovers_spec.js @@ -6,6 +6,19 @@ describe('User Popovers', () => { preloadFixtures(fixtureTemplate); const selector = '.js-user-link, .gfm-project_member'; + const findFixtureLinks = () => { + return Array.from(document.querySelectorAll(selector)).filter( + ({ dataset }) => dataset.user || dataset.userId, + ); + }; + const createUserLink = () => { + const link = document.createElement('a'); + + link.classList.add('js-user-link'); + link.setAttribute('data-user', '1'); + + return link; + }; const dummyUser = { name: 'root' }; const dummyUserStatus = { message: 'active' }; @@ -37,13 +50,20 @@ describe('User Popovers', () => { }); it('initializes a popover for each user link with a user id', () => { - const linksWithUsers = Array.from(document.querySelectorAll(selector)).filter( - ({ dataset }) => dataset.user || dataset.userId, - ); + const linksWithUsers = findFixtureLinks(); expect(linksWithUsers.length).toBe(popovers.length); }); + it('adds popovers to user links added to the DOM tree after the initial call', async () => { + document.body.appendChild(createUserLink()); + document.body.appendChild(createUserLink()); + + const linksWithUsers = findFixtureLinks(); + + expect(linksWithUsers.length).toBe(popovers.length + 2); + }); + it('does not initialize the user popovers twice for the same element', () => { const newPopovers = initUserPopovers(document.querySelectorAll(selector)); const samePopovers = popovers.every((popover, index) => newPopovers[index] === popover); diff --git a/spec/frontend/vue_shared/alert_details/alert_details_spec.js b/spec/frontend/vue_shared/alert_details/alert_details_spec.js index dd9a7be6268..ce410a8b3e7 100644 --- a/spec/frontend/vue_shared/alert_details/alert_details_spec.js +++ b/spec/frontend/vue_shared/alert_details/alert_details_spec.js @@ -128,6 +128,10 @@ describe('AlertDetails', () => { expect(wrapper.findByTestId('startTimeItem').exists()).toBe(true); expect(wrapper.findByTestId('startTimeItem').props('time')).toBe(mockAlert.startedAt); }); + + it('renders the metrics tab', () => { + expect(findMetricsTab().exists()).toBe(true); + }); }); describe('individual alert fields', () => { @@ -179,7 +183,8 @@ describe('AlertDetails', () => { describe('Threat Monitoring details', () => { it('should not render the metrics tab', () => { mountComponent({ - data: { alert: mockAlert, provide: { isThreatMonitoringPage: true } }, + data: { alert: mockAlert }, + provide: { isThreatMonitoringPage: true }, }); expect(findMetricsTab().exists()).toBe(false); }); diff --git a/spec/frontend/vue_shared/components/settings/__snapshots__/settings_block_spec.js.snap b/spec/frontend/vue_shared/components/settings/__snapshots__/settings_block_spec.js.snap index 51b8aa162bc..ed085fb66dc 100644 --- a/spec/frontend/vue_shared/components/settings/__snapshots__/settings_block_spec.js.snap +++ b/spec/frontend/vue_shared/components/settings/__snapshots__/settings_block_spec.js.snap @@ -2,7 +2,7 @@ exports[`Settings Block renders the correct markup 1`] = ` <section - class="settings no-animate" + class="settings" > <div class="settings-header" diff --git a/spec/frontend/vue_shared/components/settings/settings_block_spec.js b/spec/frontend/vue_shared/components/settings/settings_block_spec.js index 2db0b001b5b..be5a15631eb 100644 --- a/spec/frontend/vue_shared/components/settings/settings_block_spec.js +++ b/spec/frontend/vue_shared/components/settings/settings_block_spec.js @@ -50,6 +50,27 @@ describe('Settings Block', () => { expect(findDescriptionSlot().exists()).toBe(true); }); + describe('slide animation behaviour', () => { + it('is animated by default', () => { + mountComponent(); + + expect(wrapper.classes('no-animate')).toBe(false); + }); + + it.each` + slideAnimated | noAnimatedClass + ${true} | ${false} + ${false} | ${true} + `( + 'sets the correct state when slideAnimated is $slideAnimated', + ({ slideAnimated, noAnimatedClass }) => { + mountComponent({ slideAnimated }); + + expect(wrapper.classes('no-animate')).toBe(noAnimatedClass); + }, + ); + }); + describe('expanded behaviour', () => { it('is collapsed by default', () => { mountComponent(); diff --git a/spec/frontend/vue_shared/components/tabs/tab_spec.js b/spec/frontend/vue_shared/components/tabs/tab_spec.js deleted file mode 100644 index ee0c983c764..00000000000 --- a/spec/frontend/vue_shared/components/tabs/tab_spec.js +++ /dev/null @@ -1,32 +0,0 @@ -import Vue from 'vue'; -import mountComponent from 'helpers/vue_mount_component_helper'; -import Tab from '~/vue_shared/components/tabs/tab.vue'; - -describe('Tab component', () => { - const Component = Vue.extend(Tab); - let vm; - - beforeEach(() => { - vm = mountComponent(Component); - }); - - it('sets localActive to equal active', (done) => { - vm.active = true; - - vm.$nextTick(() => { - expect(vm.localActive).toBe(true); - - done(); - }); - }); - - it('sets active class', (done) => { - vm.active = true; - - vm.$nextTick(() => { - expect(vm.$el.classList).toContain('active'); - - done(); - }); - }); -}); diff --git a/spec/frontend/vue_shared/components/tabs/tabs_spec.js b/spec/frontend/vue_shared/components/tabs/tabs_spec.js deleted file mode 100644 index fe7be5be899..00000000000 --- a/spec/frontend/vue_shared/components/tabs/tabs_spec.js +++ /dev/null @@ -1,61 +0,0 @@ -import Vue from 'vue'; -import Tab from '~/vue_shared/components/tabs/tab.vue'; -import Tabs from '~/vue_shared/components/tabs/tabs'; - -describe('Tabs component', () => { - let vm; - - beforeEach(() => { - vm = new Vue({ - components: { - Tabs, - Tab, - }, - render(h) { - return h('div', [ - h('tabs', [ - h('tab', { attrs: { title: 'Testing', active: true } }, 'First tab'), - h('tab', [h('template', { slot: 'title' }, 'Test slot'), 'Second tab']), - ]), - ]); - }, - }).$mount(); - - return vm.$nextTick(); - }); - - describe('tab links', () => { - it('renders links for tabs', () => { - expect(vm.$el.querySelectorAll('a').length).toBe(2); - }); - - it('renders link titles from props', () => { - expect(vm.$el.querySelector('a').textContent).toContain('Testing'); - }); - - it('renders link titles from slot', () => { - expect(vm.$el.querySelectorAll('a')[1].textContent).toContain('Test slot'); - }); - - it('renders active class', () => { - expect(vm.$el.querySelector('a').classList).toContain('active'); - }); - - it('updates active class on click', () => { - vm.$el.querySelectorAll('a')[1].click(); - - return vm.$nextTick(() => { - expect(vm.$el.querySelector('a').classList).not.toContain('active'); - expect(vm.$el.querySelectorAll('a')[1].classList).toContain('active'); - }); - }); - }); - - describe('content', () => { - it('renders content panes', () => { - expect(vm.$el.querySelectorAll('.tab-pane').length).toBe(2); - expect(vm.$el.querySelectorAll('.tab-pane')[0].textContent).toContain('First tab'); - expect(vm.$el.querySelectorAll('.tab-pane')[1].textContent).toContain('Second tab'); - }); - }); -}); diff --git a/spec/frontend/vue_shared/components/tooltip_on_truncate_spec.js b/spec/frontend/vue_shared/components/tooltip_on_truncate_spec.js index 27c9b099306..380b7231acd 100644 --- a/spec/frontend/vue_shared/components/tooltip_on_truncate_spec.js +++ b/spec/frontend/vue_shared/components/tooltip_on_truncate_spec.js @@ -11,6 +11,15 @@ jest.mock('~/lib/utils/dom_utils', () => ({ throw new Error('this needs to be mocked'); }), })); +jest.mock('@gitlab/ui', () => ({ + GlTooltipDirective: { + bind(el, binding) { + el.classList.add('gl-tooltip'); + el.setAttribute('data-original-title', el.title); + el.dataset.placement = binding.value.placement; + }, + }, +})); describe('TooltipOnTruncate component', () => { let wrapper; @@ -52,7 +61,7 @@ describe('TooltipOnTruncate component', () => { wrapper = parent.find(TooltipOnTruncate); }; - const hasTooltip = () => wrapper.classes('js-show-tooltip'); + const hasTooltip = () => wrapper.classes('gl-tooltip'); afterEach(() => { wrapper.destroy(); diff --git a/spec/frontend/vue_shared/components/upload_dropzone/__snapshots__/upload_dropzone_spec.js.snap b/spec/frontend/vue_shared/components/upload_dropzone/__snapshots__/upload_dropzone_spec.js.snap index d2fe3cd76cb..af4fa462cbf 100644 --- a/spec/frontend/vue_shared/components/upload_dropzone/__snapshots__/upload_dropzone_spec.js.snap +++ b/spec/frontend/vue_shared/components/upload_dropzone/__snapshots__/upload_dropzone_spec.js.snap @@ -19,6 +19,7 @@ exports[`Upload dropzone component correctly overrides description and drop mess <p class="gl-mb-0" + data-testid="upload-text" > <span> Test %{linkStart}description%{linkEnd} message. @@ -98,10 +99,15 @@ exports[`Upload dropzone component when dragging renders correct template when d <p class="gl-mb-0" + data-testid="upload-text" > - <gl-sprintf-stub - message="Drop or %{linkStart}upload%{linkEnd} files to attach" - /> + Drop or + <gl-link-stub> + + upload + + </gl-link-stub> + files to attach </p> </div> </button> @@ -178,10 +184,15 @@ exports[`Upload dropzone component when dragging renders correct template when d <p class="gl-mb-0" + data-testid="upload-text" > - <gl-sprintf-stub - message="Drop or %{linkStart}upload%{linkEnd} files to attach" - /> + Drop or + <gl-link-stub> + + upload + + </gl-link-stub> + files to attach </p> </div> </button> @@ -258,10 +269,15 @@ exports[`Upload dropzone component when dragging renders correct template when d <p class="gl-mb-0" + data-testid="upload-text" > - <gl-sprintf-stub - message="Drop or %{linkStart}upload%{linkEnd} files to attach" - /> + Drop or + <gl-link-stub> + + upload + + </gl-link-stub> + files to attach </p> </div> </button> @@ -337,10 +353,15 @@ exports[`Upload dropzone component when dragging renders correct template when d <p class="gl-mb-0" + data-testid="upload-text" > - <gl-sprintf-stub - message="Drop or %{linkStart}upload%{linkEnd} files to attach" - /> + Drop or + <gl-link-stub> + + upload + + </gl-link-stub> + files to attach </p> </div> </button> @@ -416,10 +437,15 @@ exports[`Upload dropzone component when dragging renders correct template when d <p class="gl-mb-0" + data-testid="upload-text" > - <gl-sprintf-stub - message="Drop or %{linkStart}upload%{linkEnd} files to attach" - /> + Drop or + <gl-link-stub> + + upload + + </gl-link-stub> + files to attach </p> </div> </button> @@ -495,10 +521,15 @@ exports[`Upload dropzone component when no slot provided renders default dropzon <p class="gl-mb-0" + data-testid="upload-text" > - <gl-sprintf-stub - message="Drop or %{linkStart}upload%{linkEnd} files to attach" - /> + Drop or + <gl-link-stub> + + upload + + </gl-link-stub> + files to attach </p> </div> </button> diff --git a/spec/frontend/vue_shared/components/upload_dropzone/upload_dropzone_spec.js b/spec/frontend/vue_shared/components/upload_dropzone/upload_dropzone_spec.js index ace486b1f32..b3cdbccb271 100644 --- a/spec/frontend/vue_shared/components/upload_dropzone/upload_dropzone_spec.js +++ b/spec/frontend/vue_shared/components/upload_dropzone/upload_dropzone_spec.js @@ -1,4 +1,4 @@ -import { GlIcon } from '@gitlab/ui'; +import { GlIcon, GlSprintf } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; import UploadDropzone from '~/vue_shared/components/upload_dropzone/upload_dropzone.vue'; @@ -14,6 +14,7 @@ describe('Upload dropzone component', () => { const findDropzoneCard = () => wrapper.find('.upload-dropzone-card'); const findDropzoneArea = () => wrapper.find('[data-testid="dropzone-area"]'); const findIcon = () => wrapper.find(GlIcon); + const findUploadText = () => wrapper.find('[data-testid="upload-text"]').text(); function createComponent({ slots = {}, data = {}, props = {} } = {}) { wrapper = shallowMount(UploadDropzone, { @@ -22,6 +23,9 @@ describe('Upload dropzone component', () => { displayAsCard: true, ...props, }, + stubs: { + GlSprintf, + }, data() { return data; }, @@ -30,6 +34,7 @@ describe('Upload dropzone component', () => { afterEach(() => { wrapper.destroy(); + wrapper = null; }); describe('when slot provided', () => { @@ -60,6 +65,18 @@ describe('Upload dropzone component', () => { }); }); + describe('upload text', () => { + it.each` + collection | description | props | expected + ${'multiple'} | ${'by default'} | ${null} | ${'files to attach'} + ${'singular'} | ${'when singleFileSelection'} | ${{ singleFileSelection: true }} | ${'file to attach'} + `('displays $collection version $description', ({ props, expected }) => { + createComponent({ props }); + + expect(findUploadText()).toContain(expected); + }); + }); + describe('when dragging', () => { it.each` description | eventPayload @@ -141,6 +158,21 @@ describe('Upload dropzone component', () => { wrapper.vm.ondrop(mockEvent); expect(wrapper.emitted()).not.toHaveProperty('error'); }); + + describe('singleFileSelection = true', () => { + it('emits a single file on drop', () => { + createComponent({ + data: mockData, + props: { singleFileSelection: true }, + }); + + const mockFile = { type: 'image/jpg' }; + const mockEvent = mockDragEvent({ files: [mockFile] }); + + wrapper.vm.ondrop(mockEvent); + expect(wrapper.emitted().change[0]).toEqual([mockFile]); + }); + }); }); }); diff --git a/spec/frontend/vue_shared/directives/tooltip_spec.js b/spec/frontend/vue_shared/directives/tooltip_spec.js deleted file mode 100644 index 99e8b5b552b..00000000000 --- a/spec/frontend/vue_shared/directives/tooltip_spec.js +++ /dev/null @@ -1,157 +0,0 @@ -import { mount } from '@vue/test-utils'; -import $ from 'jquery'; -import { escape } from 'lodash'; -import tooltip from '~/vue_shared/directives/tooltip'; - -const DEFAULT_TOOLTIP_TEMPLATE = '<div v-tooltip :title="tooltip"></div>'; -const HTML_TOOLTIP_TEMPLATE = '<div v-tooltip data-html="true" :title="tooltip"></div>'; - -describe('Tooltip directive', () => { - let wrapper; - - function createTooltipContainer({ - template = DEFAULT_TOOLTIP_TEMPLATE, - text = 'some text', - } = {}) { - wrapper = mount( - { - directives: { tooltip }, - data: () => ({ tooltip: text }), - template, - }, - { attachTo: document.body }, - ); - } - - async function showTooltip() { - $(wrapper.vm.$el).tooltip('show'); - jest.runOnlyPendingTimers(); - await wrapper.vm.$nextTick(); - } - - function findTooltipInnerHtml() { - return document.querySelector('.tooltip-inner').innerHTML; - } - - function findTooltipHtml() { - return document.querySelector('.tooltip').innerHTML; - } - - afterEach(() => { - wrapper.destroy(); - wrapper = null; - }); - - describe('with a single tooltip', () => { - it('should have tooltip plugin applied', () => { - createTooltipContainer(); - - expect($(wrapper.vm.$el).data('bs.tooltip')).toBeDefined(); - }); - - it('displays the title as tooltip', () => { - createTooltipContainer(); - - $(wrapper.vm.$el).tooltip('show'); - - jest.runOnlyPendingTimers(); - - const tooltipElement = document.querySelector('.tooltip-inner'); - - expect(tooltipElement.textContent).toContain('some text'); - }); - - it.each` - condition | template | sanitize - ${'does not contain any html'} | ${DEFAULT_TOOLTIP_TEMPLATE} | ${false} - ${'contains html'} | ${HTML_TOOLTIP_TEMPLATE} | ${true} - `('passes sanitize=$sanitize if the tooltip $condition', ({ template, sanitize }) => { - createTooltipContainer({ template }); - - expect($(wrapper.vm.$el).data('bs.tooltip').config.sanitize).toEqual(sanitize); - }); - - it('updates a visible tooltip', async () => { - createTooltipContainer(); - - $(wrapper.vm.$el).tooltip('show'); - jest.runOnlyPendingTimers(); - - const tooltipElement = document.querySelector('.tooltip-inner'); - - wrapper.vm.tooltip = 'other text'; - - jest.runOnlyPendingTimers(); - await wrapper.vm.$nextTick(); - - expect(tooltipElement.textContent).toContain('other text'); - }); - - describe('tooltip sanitization', () => { - it('reads tooltip content as text if data-html is not passed', async () => { - createTooltipContainer({ text: 'sample text<script>alert("XSS!!")</script>' }); - - await showTooltip(); - - const result = findTooltipInnerHtml(); - expect(result).toEqual('sample text<script>alert("XSS!!")</script>'); - }); - - it('sanitizes tooltip if data-html is passed', async () => { - createTooltipContainer({ - template: HTML_TOOLTIP_TEMPLATE, - text: 'sample text<script>alert("XSS!!")</script>', - }); - - await showTooltip(); - - const result = findTooltipInnerHtml(); - expect(result).toEqual('sample text'); - expect(result).not.toContain('XSS!!'); - }); - - it('sanitizes tooltip if data-template is passed', async () => { - const tooltipTemplate = escape( - '<div class="tooltip" role="tooltip"><div onclick="alert(\'XSS!\')" class="arrow"></div><div class="tooltip-inner"></div></div>', - ); - - createTooltipContainer({ - template: `<div v-tooltip :title="tooltip" data-html="false" data-template="${tooltipTemplate}"></div>`, - }); - - await showTooltip(); - - const result = findTooltipHtml(); - expect(result).toEqual( - // objectionable element is removed - '<div class="arrow"></div><div class="tooltip-inner">some text</div>', - ); - expect(result).not.toContain('XSS!!'); - }); - }); - }); - - describe('with multiple tooltips', () => { - beforeEach(() => { - createTooltipContainer({ - template: ` - <div> - <div - v-tooltip - class="js-look-for-tooltip" - title="foo"> - </div> - <div - v-tooltip - title="bar"> - </div> - </div> - `, - }); - }); - - it('should have tooltip plugin applied to all instances', () => { - expect($(wrapper.vm.$el).find('.js-look-for-tooltip').data('bs.tooltip')).toBeDefined(); - }); - }); -}); diff --git a/spec/frontend_integration/ide/helpers/ide_helper.js b/spec/frontend_integration/ide/helpers/ide_helper.js index 9e6bafc1297..6c09b44d891 100644 --- a/spec/frontend_integration/ide/helpers/ide_helper.js +++ b/spec/frontend_integration/ide/helpers/ide_helper.js @@ -25,6 +25,9 @@ export const getStatusBar = () => document.querySelector('.ide-status-bar'); export const waitForMonacoEditor = () => new Promise((resolve) => window.monaco.editor.onDidCreateEditor(resolve)); +export const waitForEditorModelChange = (instance) => + new Promise((resolve) => instance.onDidChangeModel(resolve)); + export const findMonacoEditor = () => screen.findAllByLabelText(/Editor content;/).then(([x]) => x.closest('.monaco-editor')); diff --git a/spec/frontend_integration/ide/helpers/mock_data.js b/spec/frontend_integration/ide/helpers/mock_data.js index f70739e5ac0..8c9ec74541f 100644 --- a/spec/frontend_integration/ide/helpers/mock_data.js +++ b/spec/frontend_integration/ide/helpers/mock_data.js @@ -4,7 +4,6 @@ export const IDE_DATASET = { committedStateSvgPath: '/test/committed_state.svg', pipelinesEmptyStateSvgPath: '/test/pipelines_empty_state.svg', promotionSvgPath: '/test/promotion.svg', - ciHelpPagePath: '/test/ci_help_page', webIDEHelpPagePath: '/test/web_ide_help_page', clientsidePreviewEnabled: 'true', renderWhitespaceInCode: 'false', diff --git a/spec/frontend_integration/ide/helpers/start.js b/spec/frontend_integration/ide/helpers/start.js index 173a9610c84..cc6abd9e01f 100644 --- a/spec/frontend_integration/ide/helpers/start.js +++ b/spec/frontend_integration/ide/helpers/start.js @@ -1,6 +1,7 @@ +/* global monaco */ + import { TEST_HOST } from 'helpers/test_constants'; import { initIde } from '~/ide'; -import Editor from '~/ide/lib/editor'; import extendStore from '~/ide/stores/extend'; import { IDE_DATASET } from './mock_data'; @@ -18,13 +19,7 @@ export default (container, { isRepoEmpty = false, path = '', mrId = '' } = {}) = const vm = initIde(el, { extendStore }); // We need to dispose of editor Singleton things or tests will bump into eachother - vm.$on('destroy', () => { - if (Editor.editorInstance) { - Editor.editorInstance.modelManager.dispose(); - Editor.editorInstance.dispose(); - Editor.editorInstance = null; - } - }); + vm.$on('destroy', () => monaco.editor.getModels().forEach((model) => model.dispose())); return vm; }; diff --git a/spec/frontend_integration/ide/ide_integration_spec.js b/spec/frontend_integration/ide/ide_integration_spec.js index 3ce88de11fe..5f1a5b0d048 100644 --- a/spec/frontend_integration/ide/ide_integration_spec.js +++ b/spec/frontend_integration/ide/ide_integration_spec.js @@ -96,16 +96,6 @@ describe('WebIDE', () => { let statusBar; let editor; - const waitForEditor = async () => { - editor = await ideHelper.waitForMonacoEditor(); - }; - - const changeEditorPosition = async (lineNumber, column) => { - editor.setPosition({ lineNumber, column }); - - await vm.$nextTick(); - }; - beforeEach(async () => { vm = startWebIDE(container); @@ -134,16 +124,17 @@ describe('WebIDE', () => { // Need to wait for monaco editor to load so it doesn't through errors on dispose await ideHelper.openFile('.gitignore'); - await ideHelper.waitForMonacoEditor(); + await ideHelper.waitForEditorModelChange(editor); await ideHelper.openFile('README.md'); - await ideHelper.waitForMonacoEditor(); + await ideHelper.waitForEditorModelChange(editor); expect(el).toHaveText(markdownPreview); }); describe('when editor position changes', () => { beforeEach(async () => { - await changeEditorPosition(4, 10); + editor.setPosition({ lineNumber: 4, column: 10 }); + await vm.$nextTick(); }); it('shows new line position', () => { @@ -153,7 +144,8 @@ describe('WebIDE', () => { it('updates after rename', async () => { await ideHelper.renameFile('README.md', 'READMEZ.txt'); - await waitForEditor(); + await ideHelper.waitForEditorModelChange(editor); + await vm.$nextTick(); expect(statusBar).toHaveText('1:1'); expect(statusBar).toHaveText('plaintext'); @@ -161,10 +153,10 @@ describe('WebIDE', () => { it('persists position after opening then rename', async () => { await ideHelper.openFile('files/js/application.js'); - await waitForEditor(); + await ideHelper.waitForEditorModelChange(editor); await ideHelper.renameFile('README.md', 'READING_RAINBOW.md'); await ideHelper.openFile('READING_RAINBOW.md'); - await waitForEditor(); + await ideHelper.waitForEditorModelChange(editor); expect(statusBar).toHaveText('4:10'); expect(statusBar).toHaveText('markdown'); @@ -173,7 +165,8 @@ describe('WebIDE', () => { it('persists position after closing', async () => { await ideHelper.closeFile('README.md'); await ideHelper.openFile('README.md'); - await waitForEditor(); + await ideHelper.waitForMonacoEditor(); + await vm.$nextTick(); expect(statusBar).toHaveText('4:10'); expect(statusBar).toHaveText('markdown'); diff --git a/spec/frontend_integration/ide/user_opens_mr_spec.js b/spec/frontend_integration/ide/user_opens_mr_spec.js index 9cf0ff5da56..3ffc5169351 100644 --- a/spec/frontend_integration/ide/user_opens_mr_spec.js +++ b/spec/frontend_integration/ide/user_opens_mr_spec.js @@ -24,11 +24,11 @@ describe('IDE: User opens Merge Request', () => { vm = startWebIDE(container, { mrId }); - await ideHelper.waitForTabToOpen(basename(changes[0].new_path)); - await ideHelper.waitForMonacoEditor(); + const editor = await ideHelper.waitForMonacoEditor(); + await ideHelper.waitForEditorModelChange(editor); }); - afterEach(async () => { + afterEach(() => { vm.$destroy(); vm = null; }); diff --git a/spec/frontend_integration/test_helpers/mock_server/graphql.js b/spec/frontend_integration/test_helpers/mock_server/graphql.js index 654c373e5a6..e2658852599 100644 --- a/spec/frontend_integration/test_helpers/mock_server/graphql.js +++ b/spec/frontend_integration/test_helpers/mock_server/graphql.js @@ -1,5 +1,11 @@ import { buildSchema, graphql } from 'graphql'; -import gitlabSchemaStr from '../../../../doc/api/graphql/reference/gitlab_schema.graphql'; + +/* eslint-disable import/no-unresolved */ +// This rule is disabled for the following line. +// The graphql schema is dynamically generated in CI +// during the `graphql-schema-dump` job. +import gitlabSchemaStr from '../../../../tmp/tests/graphql/gitlab_schema.graphql'; +/* eslint-enable import/no-unresolved */ const graphqlSchema = buildSchema(gitlabSchemaStr.loc.source.body); const graphqlResolvers = { diff --git a/spec/graphql/mutations/boards/update_spec.rb b/spec/graphql/mutations/boards/update_spec.rb new file mode 100644 index 00000000000..6f05b21b0b4 --- /dev/null +++ b/spec/graphql/mutations/boards/update_spec.rb @@ -0,0 +1,57 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Mutations::Boards::Update do + let_it_be(:project) { create(:project) } + let_it_be(:user) { create(:user) } + let_it_be(:board) { create(:board, project: project) } + + let(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) } + let(:mutated_board) { subject[:board] } + + let(:mutation_params) do + { + id: board.to_global_id, + hide_backlog_list: true, + hide_closed_list: false + } + end + + subject { mutation.resolve(**mutation_params) } + + specify { expect(described_class).to require_graphql_authorizations(:admin_board) } + + describe '#resolve' do + context 'when the user cannot admin the board' do + it 'raises an error' do + expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable) + end + end + + context 'with invalid params' do + it 'raises an error' do + mutation_params[:id] = project.to_global_id + + expect { subject }.to raise_error(::GraphQL::CoercionError) + end + end + + context 'when user can update board' do + before do + board.resource_parent.add_reporter(user) + end + + it 'updates board with correct values' do + expected_attributes = { + hide_backlog_list: true, + hide_closed_list: false + } + + subject + + expect(board.reload).to have_attributes(expected_attributes) + end + end + end +end diff --git a/spec/graphql/mutations/custom_emoji/create_spec.rb b/spec/graphql/mutations/custom_emoji/create_spec.rb new file mode 100644 index 00000000000..118c5d67188 --- /dev/null +++ b/spec/graphql/mutations/custom_emoji/create_spec.rb @@ -0,0 +1,27 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Mutations::CustomEmoji::Create do + let_it_be(:group) { create(:group) } + let_it_be(:user) { create(:user) } + let(:args) { { group_path: group.full_path, name: 'tanuki', url: 'https://about.gitlab.com/images/press/logo/png/gitlab-icon-rgb.png' } } + + before do + group.add_developer(user) + end + + describe '#resolve' do + subject(:resolve) { described_class.new(object: nil, context: { current_user: user }, field: nil).resolve(**args) } + + it 'creates the custom emoji' do + expect { resolve }.to change(CustomEmoji, :count).by(1) + end + + it 'sets the creator to be the user who added the emoji' do + resolve + + expect(CustomEmoji.last.creator).to eq(user) + end + end +end diff --git a/spec/graphql/resolvers/admin/analytics/instance_statistics/measurements_resolver_spec.rb b/spec/graphql/resolvers/admin/analytics/usage_trends/measurements_resolver_spec.rb index 578d679ade4..269a1fb1758 100644 --- a/spec/graphql/resolvers/admin/analytics/instance_statistics/measurements_resolver_spec.rb +++ b/spec/graphql/resolvers/admin/analytics/usage_trends/measurements_resolver_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Resolvers::Admin::Analytics::InstanceStatistics::MeasurementsResolver do +RSpec.describe Resolvers::Admin::Analytics::UsageTrends::MeasurementsResolver do include GraphqlHelpers let_it_be(:admin_user) { create(:user, :admin) } @@ -11,8 +11,8 @@ RSpec.describe Resolvers::Admin::Analytics::InstanceStatistics::MeasurementsReso describe '#resolve' do let_it_be(:user) { create(:user) } - let_it_be(:project_measurement_new) { create(:instance_statistics_measurement, :project_count, recorded_at: 2.days.ago) } - let_it_be(:project_measurement_old) { create(:instance_statistics_measurement, :project_count, recorded_at: 10.days.ago) } + let_it_be(:project_measurement_new) { create(:usage_trends_measurement, :project_count, recorded_at: 2.days.ago) } + let_it_be(:project_measurement_old) { create(:usage_trends_measurement, :project_count, recorded_at: 10.days.ago) } let(:arguments) { { identifier: 'projects' } } @@ -63,8 +63,8 @@ RSpec.describe Resolvers::Admin::Analytics::InstanceStatistics::MeasurementsReso end context 'when requesting pipeline counts by pipeline status' do - let_it_be(:pipelines_succeeded_measurement) { create(:instance_statistics_measurement, :pipelines_succeeded_count, recorded_at: 2.days.ago) } - let_it_be(:pipelines_skipped_measurement) { create(:instance_statistics_measurement, :pipelines_skipped_count, recorded_at: 2.days.ago) } + let_it_be(:pipelines_succeeded_measurement) { create(:usage_trends_measurement, :pipelines_succeeded_count, recorded_at: 2.days.ago) } + let_it_be(:pipelines_skipped_measurement) { create(:usage_trends_measurement, :pipelines_skipped_count, recorded_at: 2.days.ago) } subject { resolve_measurements({ identifier: identifier }, { current_user: current_user }) } diff --git a/spec/graphql/resolvers/alert_management/http_integrations_resolver_spec.rb b/spec/graphql/resolvers/alert_management/http_integrations_resolver_spec.rb new file mode 100644 index 00000000000..2cd61dd7bcf --- /dev/null +++ b/spec/graphql/resolvers/alert_management/http_integrations_resolver_spec.rb @@ -0,0 +1,51 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Resolvers::AlertManagement::HttpIntegrationsResolver do + include GraphqlHelpers + + let_it_be(:guest) { create(:user) } + let_it_be(:developer) { create(:user) } + let_it_be(:maintainer) { create(:user) } + let_it_be(:project) { create(:project) } + let_it_be(:prometheus_integration) { create(:prometheus_service, project: project) } + let_it_be(:active_http_integration) { create(:alert_management_http_integration, project: project) } + let_it_be(:inactive_http_integration) { create(:alert_management_http_integration, :inactive, project: project) } + let_it_be(:other_proj_integration) { create(:alert_management_http_integration) } + + subject { sync(resolve_http_integrations) } + + before do + project.add_developer(developer) + project.add_maintainer(maintainer) + end + + specify do + expect(described_class).to have_nullable_graphql_type(Types::AlertManagement::HttpIntegrationType.connection_type) + end + + context 'user does not have permission' do + let(:current_user) { guest } + + it { is_expected.to be_empty } + end + + context 'user has developer permission' do + let(:current_user) { developer } + + it { is_expected.to be_empty } + end + + context 'user has maintainer permission' do + let(:current_user) { maintainer } + + it { is_expected.to contain_exactly(active_http_integration) } + end + + private + + def resolve_http_integrations(args = {}, context = { current_user: current_user }) + resolve(described_class, obj: project, ctx: context) + end +end diff --git a/spec/graphql/types/admin/analytics/instance_statistics/measurement_identifier_enum_spec.rb b/spec/graphql/types/admin/analytics/usage_trends/measurement_identifier_enum_spec.rb index 8a7408224a2..91851c11dc8 100644 --- a/spec/graphql/types/admin/analytics/instance_statistics/measurement_identifier_enum_spec.rb +++ b/spec/graphql/types/admin/analytics/usage_trends/measurement_identifier_enum_spec.rb @@ -7,7 +7,7 @@ RSpec.describe GitlabSchema.types['MeasurementIdentifier'] do it 'exposes all the existing identifier values' do ee_only_identifiers = %w[billable_users] - identifiers = Analytics::InstanceStatistics::Measurement.identifiers.keys.reject do |x| + identifiers = Analytics::UsageTrends::Measurement.identifiers.keys.reject do |x| ee_only_identifiers.include?(x) end.map(&:upcase) diff --git a/spec/graphql/types/admin/analytics/instance_statistics/measurement_type_spec.rb b/spec/graphql/types/admin/analytics/usage_trends/measurement_type_spec.rb index ffb1a0f30c9..c50092d7f0e 100644 --- a/spec/graphql/types/admin/analytics/instance_statistics/measurement_type_spec.rb +++ b/spec/graphql/types/admin/analytics/usage_trends/measurement_type_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe GitlabSchema.types['InstanceStatisticsMeasurement'] do +RSpec.describe GitlabSchema.types['UsageTrendsMeasurement'] do subject { described_class } it { is_expected.to have_graphql_field(:recorded_at) } @@ -10,13 +10,13 @@ RSpec.describe GitlabSchema.types['InstanceStatisticsMeasurement'] do it { is_expected.to have_graphql_field(:count) } describe 'authorization' do - let_it_be(:measurement) { create(:instance_statistics_measurement, :project_count) } + let_it_be(:measurement) { create(:usage_trends_measurement, :project_count) } let(:user) { create(:user) } let(:query) do <<~GRAPHQL - query instanceStatisticsMeasurements($identifier: MeasurementIdentifier!) { - instanceStatisticsMeasurements(identifier: $identifier) { + query usageTrendsMeasurements($identifier: MeasurementIdentifier!) { + usageTrendsMeasurements(identifier: $identifier) { nodes { count identifier @@ -36,7 +36,7 @@ RSpec.describe GitlabSchema.types['InstanceStatisticsMeasurement'] do context 'when the user is not admin' do it 'returns no data' do - expect(subject.dig('data', 'instanceStatisticsMeasurements')).to be_nil + expect(subject.dig('data', 'usageTrendsMeasurements')).to be_nil end end @@ -48,7 +48,7 @@ RSpec.describe GitlabSchema.types['InstanceStatisticsMeasurement'] do end it 'returns data' do - expect(subject.dig('data', 'instanceStatisticsMeasurements', 'nodes')).not_to be_empty + expect(subject.dig('data', 'usageTrendsMeasurements', 'nodes')).not_to be_empty end end end diff --git a/spec/graphql/types/ci/pipeline_type_spec.rb b/spec/graphql/types/ci/pipeline_type_spec.rb index 2a1e030480d..c4b34ca5316 100644 --- a/spec/graphql/types/ci/pipeline_type_spec.rb +++ b/spec/graphql/types/ci/pipeline_type_spec.rb @@ -16,7 +16,7 @@ RSpec.describe Types::Ci::PipelineType do ] if Gitlab.ee? - expected_fields << 'security_report_summary' + expected_fields += %w[security_report_summary security_report_findings] end expect(described_class).to have_graphql_fields(*expected_fields) diff --git a/spec/graphql/types/global_id_type_spec.rb b/spec/graphql/types/global_id_type_spec.rb index cb129868f7e..8eb023ad2a3 100644 --- a/spec/graphql/types/global_id_type_spec.rb +++ b/spec/graphql/types/global_id_type_spec.rb @@ -5,7 +5,6 @@ require 'spec_helper' RSpec.describe Types::GlobalIDType do let_it_be(:project) { create(:project) } let(:gid) { project.to_global_id } - let(:foreign_gid) { GlobalID.new(::URI::GID.build(app: 'otherapp', model_name: 'Project', model_id: project.id, params: nil)) } it 'is has the correct name' do expect(described_class.to_graphql.name).to eq('GlobalID') @@ -41,16 +40,18 @@ RSpec.describe Types::GlobalIDType do it 'rejects invalid input' do expect { described_class.coerce_isolated_input('not valid') } - .to raise_error(GraphQL::CoercionError) + .to raise_error(GraphQL::CoercionError, /not a valid Global ID/) end it 'rejects nil' do expect(described_class.coerce_isolated_input(nil)).to be_nil end - it 'rejects gids from different apps' do - expect { described_class.coerce_isolated_input(foreign_gid) } - .to raise_error(GraphQL::CoercionError) + it 'rejects GIDs from different apps' do + invalid_gid = GlobalID.new(::URI::GID.build(app: 'otherapp', model_name: 'Project', model_id: project.id, params: nil)) + + expect { described_class.coerce_isolated_input(invalid_gid) } + .to raise_error(GraphQL::CoercionError, /is not a Gitlab Global ID/) end end @@ -79,14 +80,22 @@ RSpec.describe Types::GlobalIDType do let(:gid) { build_stubbed(:user).to_global_id } it 'raises errors when coercing results' do - expect { type.coerce_isolated_result(gid) }.to raise_error(GraphQL::CoercionError) + expect { type.coerce_isolated_result(gid) } + .to raise_error(GraphQL::CoercionError, /Expected a Project ID/) end it 'will not coerce invalid input, even if its a valid GID' do expect { type.coerce_isolated_input(gid.to_s) } - .to raise_error(GraphQL::CoercionError) + .to raise_error(GraphQL::CoercionError, /does not represent an instance of Project/) end end + + it 'handles GIDs for invalid resource names gracefully' do + invalid_gid = GlobalID.new(::URI::GID.build(app: GlobalID.app, model_name: 'invalid', model_id: 1, params: nil)) + + expect { type.coerce_isolated_input(invalid_gid) } + .to raise_error(GraphQL::CoercionError, /does not represent an instance of Project/) + end end describe 'a parameterized type with a namespace' do diff --git a/spec/graphql/types/label_type_spec.rb b/spec/graphql/types/label_type_spec.rb index 6a999a2e925..427b5d2dcef 100644 --- a/spec/graphql/types/label_type_spec.rb +++ b/spec/graphql/types/label_type_spec.rb @@ -3,7 +3,16 @@ require 'spec_helper' RSpec.describe GitlabSchema.types['Label'] do it 'has the correct fields' do - expected_fields = [:id, :description, :description_html, :title, :color, :text_color] + expected_fields = [ + :id, + :description, + :description_html, + :title, + :color, + :text_color, + :created_at, + :updated_at + ] expect(described_class).to have_graphql_fields(*expected_fields) end diff --git a/spec/graphql/types/query_type_spec.rb b/spec/graphql/types/query_type_spec.rb index fea0a3bd37e..cb8e875dbf4 100644 --- a/spec/graphql/types/query_type_spec.rb +++ b/spec/graphql/types/query_type_spec.rb @@ -21,7 +21,7 @@ RSpec.describe GitlabSchema.types['Query'] do user users issue - instance_statistics_measurements + usage_trends_measurements runner_platforms ] @@ -65,11 +65,11 @@ RSpec.describe GitlabSchema.types['Query'] do end end - describe 'instance_statistics_measurements field' do - subject { described_class.fields['instanceStatisticsMeasurements'] } + describe 'usage_trends_measurements field' do + subject { described_class.fields['usageTrendsMeasurements'] } - it 'returns instance statistics measurements' do - is_expected.to have_graphql_type(Types::Admin::Analytics::InstanceStatistics::MeasurementType.connection_type) + it 'returns usage trends measurements' do + is_expected.to have_graphql_type(Types::Admin::Analytics::UsageTrends::MeasurementType.connection_type) end end diff --git a/spec/helpers/boards_helper_spec.rb b/spec/helpers/boards_helper_spec.rb index b85ebec5545..83aad206547 100644 --- a/spec/helpers/boards_helper_spec.rb +++ b/spec/helpers/boards_helper_spec.rb @@ -3,52 +3,71 @@ require 'spec_helper' RSpec.describe BoardsHelper do + let_it_be(:user) { create(:user) } let_it_be(:project) { create(:project) } + let_it_be(:base_group) { create(:group, path: 'base') } + let_it_be(:project_board) { create(:board, project: project) } + let_it_be(:group_board) { create(:board, group: base_group) } describe '#build_issue_link_base' do context 'project board' do it 'returns correct path for project board' do - @project = project - @board = create(:board, project: @project) + assign(:project, project) + assign(:board, project_board) - expect(build_issue_link_base).to eq("/#{@project.namespace.path}/#{@project.path}/-/issues") + expect(helper.build_issue_link_base).to eq("/#{project.namespace.path}/#{project.path}/-/issues") end end context 'group board' do - let(:base_group) { create(:group, path: 'base') } - it 'returns correct path for base group' do - @board = create(:board, group: base_group) + assign(:board, group_board) - expect(build_issue_link_base).to eq('/base/:project_path/issues') + expect(helper.build_issue_link_base).to eq('/base/:project_path/issues') end it 'returns correct path for subgroup' do subgroup = create(:group, parent: base_group, path: 'sub') - @board = create(:board, group: subgroup) + assign(:board, create(:board, group: subgroup)) - expect(build_issue_link_base).to eq('/base/sub/:project_path/issues') + expect(helper.build_issue_link_base).to eq('/base/sub/:project_path/issues') end end end - describe '#board_data' do - let_it_be(:user) { create(:user) } - let_it_be(:board) { create(:board, project: project) } + describe '#board_base_url' do + context 'when project board' do + it 'generates the correct url' do + assign(:board, group_board) + assign(:group, base_group) + + expect(helper.board_base_url).to eq "http://test.host/groups/#{base_group.full_path}/-/boards" + end + end + + context 'when project board' do + it 'generates the correct url' do + assign(:board, project_board) + assign(:project, project) + + expect(helper.board_base_url).to eq "/#{project.full_path}/-/boards" + end + end + end + describe '#board_data' do context 'project_board' do before do assign(:project, project) - assign(:board, board) + assign(:board, project_board) allow(helper).to receive(:current_user) { user } - allow(helper).to receive(:can?).with(user, :create_non_backlog_issues, board).and_return(true) - allow(helper).to receive(:can?).with(user, :admin_issue, board).and_return(true) + allow(helper).to receive(:can?).with(user, :create_non_backlog_issues, project_board).and_return(true) + allow(helper).to receive(:can?).with(user, :admin_issue, project_board).and_return(true) end it 'returns a board_lists_path as lists_endpoint' do - expect(helper.board_data[:lists_endpoint]).to eq(board_lists_path(board)) + expect(helper.board_data[:lists_endpoint]).to eq(board_lists_path(project_board)) end it 'returns board type as parent' do @@ -66,25 +85,22 @@ RSpec.describe BoardsHelper do end context 'group board' do - let_it_be(:group) { create(:group, path: 'base') } - let_it_be(:board) { create(:board, group: group) } - before do - assign(:group, group) - assign(:board, board) + assign(:group, base_group) + assign(:board, group_board) allow(helper).to receive(:current_user) { user } - allow(helper).to receive(:can?).with(user, :create_non_backlog_issues, board).and_return(true) - allow(helper).to receive(:can?).with(user, :admin_issue, board).and_return(true) + allow(helper).to receive(:can?).with(user, :create_non_backlog_issues, group_board).and_return(true) + allow(helper).to receive(:can?).with(user, :admin_issue, group_board).and_return(true) end it 'returns correct path for base group' do - expect(helper.build_issue_link_base).to eq('/base/:project_path/issues') + expect(helper.build_issue_link_base).to eq("/#{base_group.full_path}/:project_path/issues") end it 'returns required label endpoints' do - expect(helper.board_data[:labels_fetch_path]).to eq("/groups/base/-/labels.json?include_ancestor_groups=true&only_group_labels=true") - expect(helper.board_data[:labels_manage_path]).to eq("/groups/base/-/labels") + expect(helper.board_data[:labels_fetch_path]).to eq("/groups/#{base_group.full_path}/-/labels.json?include_ancestor_groups=true&only_group_labels=true") + expect(helper.board_data[:labels_manage_path]).to eq("/groups/#{base_group.full_path}/-/labels") end end end @@ -93,8 +109,7 @@ RSpec.describe BoardsHelper do let(:board_json) { helper.current_board_json } it 'can serialise with a basic set of attributes' do - board = create(:board, project: project) - assign(:board, board) + assign(:board, project_board) expect(board_json).to match_schema('current-board') end diff --git a/spec/helpers/commits_helper_spec.rb b/spec/helpers/commits_helper_spec.rb index 2f5f4c4596b..4a841fac064 100644 --- a/spec/helpers/commits_helper_spec.rb +++ b/spec/helpers/commits_helper_spec.rb @@ -238,15 +238,5 @@ RSpec.describe CommitsHelper do expect(subject).to be_a(Gitlab::Git::DiffCollection) end end - - context "feature flag is disabled" do - let(:paginate) { true } - - it "returns a standard DiffCollection" do - stub_feature_flags(paginate_commit_view: false) - - expect(subject).to be_a(Gitlab::Git::DiffCollection) - end - end end end diff --git a/spec/helpers/invite_members_helper_spec.rb b/spec/helpers/invite_members_helper_spec.rb index 576021b37b3..ef6bc864eef 100644 --- a/spec/helpers/invite_members_helper_spec.rb +++ b/spec/helpers/invite_members_helper_spec.rb @@ -11,6 +11,21 @@ RSpec.describe InviteMembersHelper do helper.extend(Gitlab::Experimentation::ControllerConcern) end + describe '#show_invite_members_track_event' do + it 'shows values when can directly invite members' do + allow(helper).to receive(:directly_invite_members?).and_return(true) + + expect(helper.show_invite_members_track_event).to eq 'show_invite_members' + end + + it 'shows values when can indirectly invite members' do + allow(helper).to receive(:directly_invite_members?).and_return(false) + allow(helper).to receive(:indirectly_invite_members?).and_return(true) + + expect(helper.show_invite_members_track_event).to eq 'show_invite_members_version_b' + end + end + context 'with project' do before do assign(:project, project) diff --git a/spec/helpers/issuables_description_templates_helper_spec.rb b/spec/helpers/issuables_description_templates_helper_spec.rb index 42643b755f8..e8961ccb535 100644 --- a/spec/helpers/issuables_description_templates_helper_spec.rb +++ b/spec/helpers/issuables_description_templates_helper_spec.rb @@ -13,22 +13,33 @@ RSpec.describe IssuablesDescriptionTemplatesHelper, :clean_gitlab_redis_cache do let_it_be(:group_member) { create(:group_member, :developer, group: parent_group, user: user) } let_it_be(:project_member) { create(:project_member, :developer, user: user, project: project) } - it 'returns empty hash when template type does not exist' do - expect(helper.issuable_templates(build(:project), 'non-existent-template-type')).to eq([]) + context 'when feature flag disabled' do + before do + stub_feature_flags(inherited_issuable_templates: false) + end + + it 'returns empty array when template type does not exist' do + expect(helper.issuable_templates(project, 'non-existent-template-type')).to eq([]) + end end - context 'with cached issuable templates' do + context 'when feature flag enabled' do before do - allow(Gitlab::Template::IssueTemplate).to receive(:template_names).and_return({}) - allow(Gitlab::Template::MergeRequestTemplate).to receive(:template_names).and_return({}) + stub_feature_flags(inherited_issuable_templates: true) + end - helper.issuable_templates(project, 'issues') - helper.issuable_templates(project, 'merge_request') + it 'returns empty hash when template type does not exist' do + expect(helper.issuable_templates(build(:project), 'non-existent-template-type')).to eq({}) end + end + context 'with cached issuable templates' do it 'does not call TemplateFinder' do - expect(Gitlab::Template::IssueTemplate).not_to receive(:template_names) - expect(Gitlab::Template::MergeRequestTemplate).not_to receive(:template_names) + expect(Gitlab::Template::IssueTemplate).to receive(:template_names).once.and_call_original + expect(Gitlab::Template::MergeRequestTemplate).to receive(:template_names).once.and_call_original + + helper.issuable_templates(project, 'issues') + helper.issuable_templates(project, 'merge_request') helper.issuable_templates(project, 'issues') helper.issuable_templates(project, 'merge_request') end @@ -63,29 +74,78 @@ RSpec.describe IssuablesDescriptionTemplatesHelper, :clean_gitlab_redis_cache do end describe '#issuable_templates_names' do - let(:project) { double(Project, id: 21) } - - let(:templates) do - [ - { name: "another_issue_template", id: "another_issue_template", project_id: project.id }, - { name: "custom_issue_template", id: "custom_issue_template", project_id: project.id } - ] - end + let_it_be(:project) { build(:project) } - it 'returns project templates only' do + before do allow(helper).to receive(:ref_project).and_return(project) allow(helper).to receive(:issuable_templates).and_return(templates) + end + + context 'when feature flag disabled' do + let(:templates) do + [ + { name: "another_issue_template", id: "another_issue_template", project_id: project.id }, + { name: "custom_issue_template", id: "custom_issue_template", project_id: project.id } + ] + end - expect(helper.issuable_templates_names(Issue.new)).to eq(%w[another_issue_template custom_issue_template]) + before do + stub_feature_flags(inherited_issuable_templates: false) + end + + it 'returns project templates only' do + expect(helper.issuable_templates_names(Issue.new)).to eq(%w[another_issue_template custom_issue_template]) + end + end + + context 'when feature flag enabled' do + before do + stub_feature_flags(inherited_issuable_templates: true) + end + + context 'with matching project templates' do + let(:templates) do + { + "" => [ + { name: "another_issue_template", id: "another_issue_template", project_id: project.id }, + { name: "custom_issue_template", id: "custom_issue_template", project_id: project.id } + ], + "Instance" => [ + { name: "first_issue_issue_template", id: "first_issue_issue_template", project_id: non_existing_record_id }, + { name: "second_instance_issue_template", id: "second_instance_issue_template", project_id: non_existing_record_id } + ] + } + end + + it 'returns project templates only' do + expect(helper.issuable_templates_names(Issue.new)).to eq(%w[another_issue_template custom_issue_template]) + end + end + + context 'without matching project templates' do + let(:templates) do + { + "Project Templates" => [ + { name: "another_issue_template", id: "another_issue_template", project_id: non_existing_record_id }, + { name: "custom_issue_template", id: "custom_issue_template", project_id: non_existing_record_id } + ], + "Instance" => [ + { name: "first_issue_issue_template", id: "first_issue_issue_template", project_id: non_existing_record_id }, + { name: "second_instance_issue_template", id: "second_instance_issue_template", project_id: non_existing_record_id } + ] + } + end + + it 'returns empty array' do + expect(helper.issuable_templates_names(Issue.new)).to eq([]) + end + end end context 'when there are not templates in the project' do let(:templates) { {} } it 'returns empty array' do - allow(helper).to receive(:ref_project).and_return(project) - allow(helper).to receive(:issuable_templates).and_return(templates) - expect(helper.issuable_templates_names(Issue.new)).to eq([]) end end diff --git a/spec/helpers/services_helper_spec.rb b/spec/helpers/services_helper_spec.rb index 534f33d9b5a..3b08393ebdf 100644 --- a/spec/helpers/services_helper_spec.rb +++ b/spec/helpers/services_helper_spec.rb @@ -4,32 +4,49 @@ require 'spec_helper' RSpec.describe ServicesHelper do describe '#integration_form_data' do + let(:fields) do + [ + :id, + :show_active, + :activated, + :type, + :merge_request_events, + :commit_events, + :enable_comments, + :comment_detail, + :learn_more_path, + :trigger_events, + :fields, + :inherit_from_id, + :integration_level, + :editable, + :cancel_path, + :can_test, + :test_path, + :reset_path + ] + end + + let(:jira_fields) { %i[jira_issue_transition_id] } + subject { helper.integration_form_data(integration) } - context 'Jira service' do - let(:integration) { build(:jira_service) } + context 'Slack service' do + let(:integration) { build(:slack_service) } - it 'includes Jira specific fields' do - is_expected.to include( - :id, - :show_active, - :activated, - :type, - :merge_request_events, - :commit_events, - :enable_comments, - :comment_detail, - :trigger_events, - :fields, - :inherit_from_id, - :integration_level - ) - end + it { is_expected.to include(*fields) } + it { is_expected.not_to include(*jira_fields) } specify do expect(subject[:reset_path]).to eq(helper.scoped_reset_integration_path(integration)) end end + + context 'Jira service' do + let(:integration) { build(:jira_service) } + + it { is_expected.to include(*fields, *jira_fields) } + end end describe '#scoped_reset_integration_path' do diff --git a/spec/helpers/stat_anchors_helper_spec.rb b/spec/helpers/stat_anchors_helper_spec.rb index 0615baac3cb..f3830bf4172 100644 --- a/spec/helpers/stat_anchors_helper_spec.rb +++ b/spec/helpers/stat_anchors_helper_spec.rb @@ -18,7 +18,7 @@ RSpec.describe StatAnchorsHelper do context 'when anchor is not a link' do context 'when class_modifier is set' do - let(:anchor) { anchor_klass.new(false, nil, nil, 'default') } + let(:anchor) { anchor_klass.new(false, nil, nil, 'btn-default') } it 'returns the proper attributes' do expect(subject[:class]).to include('gl-button btn btn-default') @@ -49,5 +49,21 @@ RSpec.describe StatAnchorsHelper do expect(subject[:itemprop]).to eq true end end + + context 'when data is not set' do + let(:anchor) { anchor_klass.new(false, nil, nil, nil, nil, nil, nil) } + + it 'returns the data attributes' do + expect(subject[:data]).to be_nil + end + end + + context 'when itemprop is set' do + let(:anchor) { anchor_klass.new(false, nil, nil, nil, nil, nil, { 'toggle' => 'modal' }) } + + it 'returns the data attributes' do + expect(subject[:data]).to eq({ 'toggle' => 'modal' }) + end + end end end diff --git a/spec/lib/backup/repositories_spec.rb b/spec/lib/backup/repositories_spec.rb index 492058c6a00..7a8cc713e4f 100644 --- a/spec/lib/backup/repositories_spec.rb +++ b/spec/lib/backup/repositories_spec.rb @@ -230,6 +230,16 @@ RSpec.describe Backup::Repositories do expect(pool_repository).not_to be_failed expect(pool_repository.object_pool.exists?).to be(true) end + + it 'skips pools with no source project, :sidekiq_might_not_need_inline' do + pool_repository = create(:pool_repository, state: :obsolete) + pool_repository.update_column(:source_project_id, nil) + + subject.restore + + pool_repository.reload + expect(pool_repository).to be_obsolete + end end it 'cleans existing repositories' do diff --git a/spec/lib/banzai/filter/video_link_filter_spec.rb b/spec/lib/banzai/filter/video_link_filter_spec.rb index 32fbc6b687f..ec954aa9163 100644 --- a/spec/lib/banzai/filter/video_link_filter_spec.rb +++ b/spec/lib/banzai/filter/video_link_filter_spec.rb @@ -33,6 +33,7 @@ RSpec.describe Banzai::Filter::VideoLinkFilter do expect(video.name).to eq 'video' expect(video['src']).to eq src expect(video['width']).to eq "400" + expect(video['preload']).to eq 'metadata' expect(paragraph.name).to eq 'p' diff --git a/spec/lib/bulk_imports/common/loaders/entity_loader_spec.rb b/spec/lib/bulk_imports/common/loaders/entity_loader_spec.rb deleted file mode 100644 index 57ffdfa9aee..00000000000 --- a/spec/lib/bulk_imports/common/loaders/entity_loader_spec.rb +++ /dev/null @@ -1,30 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe BulkImports::Common::Loaders::EntityLoader do - describe '#load' do - it "creates entities for the given data" do - group = create(:group, path: "imported-group") - parent_entity = create(:bulk_import_entity, group: group, bulk_import: create(:bulk_import)) - context = BulkImports::Pipeline::Context.new(parent_entity) - - data = { - source_type: :group_entity, - source_full_path: "parent/subgroup", - destination_name: "subgroup", - destination_namespace: parent_entity.group.full_path, - parent_id: parent_entity.id - } - - expect { subject.load(context, data) }.to change(BulkImports::Entity, :count).by(1) - - subgroup_entity = BulkImports::Entity.last - - expect(subgroup_entity.source_full_path).to eq 'parent/subgroup' - expect(subgroup_entity.destination_namespace).to eq 'imported-group' - expect(subgroup_entity.destination_name).to eq 'subgroup' - expect(subgroup_entity.parent_id).to eq parent_entity.id - end - end -end diff --git a/spec/lib/bulk_imports/groups/graphql/get_labels_query_spec.rb b/spec/lib/bulk_imports/groups/graphql/get_labels_query_spec.rb index 247da200d68..85f82be7d18 100644 --- a/spec/lib/bulk_imports/groups/graphql/get_labels_query_spec.rb +++ b/spec/lib/bulk_imports/groups/graphql/get_labels_query_spec.rb @@ -3,15 +3,18 @@ require 'spec_helper' RSpec.describe BulkImports::Groups::Graphql::GetLabelsQuery do - describe '#variables' do - let(:entity) { double(source_full_path: 'test', next_page_for: 'next_page', bulk_import: nil) } - let(:context) { BulkImports::Pipeline::Context.new(entity) } - - it 'returns query variables based on entity information' do - expected = { full_path: entity.source_full_path, cursor: entity.next_page_for } - - expect(described_class.variables(context)).to eq(expected) - end + it 'has a valid query' do + entity = create(:bulk_import_entity) + context = BulkImports::Pipeline::Context.new(entity) + + query = GraphQL::Query.new( + GitlabSchema, + described_class.to_s, + variables: described_class.variables(context) + ) + result = GitlabSchema.static_validator.validate(query) + + expect(result[:errors]).to be_empty end describe '#data_path' do diff --git a/spec/lib/bulk_imports/groups/loaders/labels_loader_spec.rb b/spec/lib/bulk_imports/groups/loaders/labels_loader_spec.rb deleted file mode 100644 index ac2f9c8cb1d..00000000000 --- a/spec/lib/bulk_imports/groups/loaders/labels_loader_spec.rb +++ /dev/null @@ -1,30 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe BulkImports::Groups::Loaders::LabelsLoader do - describe '#load' do - let(:user) { create(:user) } - let(:group) { create(:group) } - let(:entity) { create(:bulk_import_entity, group: group) } - let(:context) { BulkImports::Pipeline::Context.new(entity) } - - let(:data) do - { - 'title' => 'label', - 'description' => 'description', - 'color' => '#FFFFFF' - } - end - - it 'creates the label' do - expect { subject.load(context, data) }.to change(Label, :count).by(1) - - label = group.labels.first - - expect(label.title).to eq(data['title']) - expect(label.description).to eq(data['description']) - expect(label.color).to eq(data['color']) - end - end -end diff --git a/spec/lib/bulk_imports/groups/loaders/members_loader_spec.rb b/spec/lib/bulk_imports/groups/loaders/members_loader_spec.rb deleted file mode 100644 index d552578e7be..00000000000 --- a/spec/lib/bulk_imports/groups/loaders/members_loader_spec.rb +++ /dev/null @@ -1,42 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe BulkImports::Groups::Loaders::MembersLoader do - describe '#load' do - let_it_be(:user_importer) { create(:user) } - let_it_be(:user_member) { create(:user) } - let_it_be(:group) { create(:group) } - let_it_be(:bulk_import) { create(:bulk_import, user: user_importer) } - let_it_be(:entity) { create(:bulk_import_entity, bulk_import: bulk_import, group: group) } - let_it_be(:context) { BulkImports::Pipeline::Context.new(entity) } - - let_it_be(:data) do - { - 'user_id' => user_member.id, - 'created_by_id' => user_importer.id, - 'access_level' => 30, - 'created_at' => '2020-01-01T00:00:00Z', - 'updated_at' => '2020-01-01T00:00:00Z', - 'expires_at' => nil - } - end - - it 'does nothing when there is no data' do - expect { subject.load(context, nil) }.not_to change(GroupMember, :count) - end - - it 'creates the member' do - expect { subject.load(context, data) }.to change(GroupMember, :count).by(1) - - member = group.members.last - - expect(member.user).to eq(user_member) - expect(member.created_by).to eq(user_importer) - expect(member.access_level).to eq(30) - expect(member.created_at).to eq('2020-01-01T00:00:00Z') - expect(member.updated_at).to eq('2020-01-01T00:00:00Z') - expect(member.expires_at).to eq(nil) - end - end -end diff --git a/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb index 63f28916d9a..3327a30f1d5 100644 --- a/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb +++ b/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb @@ -6,6 +6,7 @@ RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do let(:user) { create(:user) } let(:group) { create(:group) } let(:cursor) { 'cursor' } + let(:timestamp) { Time.new(2020, 01, 01).utc } let(:entity) do create( :bulk_import_entity, @@ -20,21 +21,23 @@ RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do subject { described_class.new(context) } - def extractor_data(title:, has_next_page:, cursor: nil) - data = [ - { - 'title' => title, - 'description' => 'desc', - 'color' => '#428BCA' - } - ] + def label_data(title) + { + 'title' => title, + 'description' => 'desc', + 'color' => '#428BCA', + 'created_at' => timestamp.to_s, + 'updated_at' => timestamp.to_s + } + end + def extractor_data(title:, has_next_page:, cursor: nil) page_info = { 'end_cursor' => cursor, 'has_next_page' => has_next_page } - BulkImports::Pipeline::ExtractedData.new(data: data, page_info: page_info) + BulkImports::Pipeline::ExtractedData.new(data: [label_data(title)], page_info: page_info) end describe '#run' do @@ -55,6 +58,8 @@ RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do expect(label.title).to eq('label2') expect(label.description).to eq('desc') expect(label.color).to eq('#428BCA') + expect(label.created_at).to eq(timestamp) + expect(label.updated_at).to eq(timestamp) end end @@ -90,6 +95,20 @@ RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do end end + describe '#load' do + it 'creates the label' do + data = label_data('label') + + expect { subject.load(context, data) }.to change(Label, :count).by(1) + + label = group.labels.first + + data.each do |key, value| + expect(label[key]).to eq(value) + end + end + end + describe 'pipeline parts' do it { expect(described_class).to include_module(BulkImports::Pipeline) } it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) } @@ -110,9 +129,5 @@ RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do { klass: BulkImports::Common::Transformers::ProhibitedAttributesTransformer, options: nil } ) end - - it 'has loaders' do - expect(described_class.get_loader).to eq(klass: BulkImports::Groups::Loaders::LabelsLoader, options: nil) - end end end diff --git a/spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb index 9f498f8154f..74d3e09d263 100644 --- a/spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb +++ b/spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb @@ -37,6 +37,34 @@ RSpec.describe BulkImports::Groups::Pipelines::MembersPipeline do end end + describe '#load' do + it 'does nothing when there is no data' do + expect { subject.load(context, nil) }.not_to change(GroupMember, :count) + end + + it 'creates the member' do + data = { + 'user_id' => member_user1.id, + 'created_by_id' => member_user2.id, + 'access_level' => 30, + 'created_at' => '2020-01-01T00:00:00Z', + 'updated_at' => '2020-01-01T00:00:00Z', + 'expires_at' => nil + } + + expect { subject.load(context, data) }.to change(GroupMember, :count).by(1) + + member = group.members.last + + expect(member.user).to eq(member_user1) + expect(member.created_by).to eq(member_user2) + expect(member.access_level).to eq(30) + expect(member.created_at).to eq('2020-01-01T00:00:00Z') + expect(member.updated_at).to eq('2020-01-01T00:00:00Z') + expect(member.expires_at).to eq(nil) + end + end + describe 'pipeline parts' do it { expect(described_class).to include_module(BulkImports::Pipeline) } it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) } @@ -58,10 +86,6 @@ RSpec.describe BulkImports::Groups::Pipelines::MembersPipeline do { klass: BulkImports::Groups::Transformers::MemberAttributesTransformer, options: nil } ) end - - it 'has loaders' do - expect(described_class.get_loader).to eq(klass: BulkImports::Groups::Loaders::MembersLoader, options: nil) - end end def member_data(email:, has_next_page:, cursor: nil) diff --git a/spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb index 0404c52b895..2a99646bb4a 100644 --- a/spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb +++ b/spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb @@ -3,9 +3,14 @@ require 'spec_helper' RSpec.describe BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline do + let_it_be(:user) { create(:user) } + let_it_be(:group) { create(:group, path: 'group') } + let_it_be(:parent) { create(:group, name: 'imported-group', path: 'imported-group') } + let(:context) { BulkImports::Pipeline::Context.new(parent_entity) } + + subject { described_class.new(context) } + describe '#run' do - let_it_be(:user) { create(:user) } - let(:parent) { create(:group, name: 'imported-group', path: 'imported-group') } let!(:parent_entity) do create( :bulk_import_entity, @@ -14,8 +19,6 @@ RSpec.describe BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline do ) end - let(:context) { BulkImports::Pipeline::Context.new(parent_entity) } - let(:subgroup_data) do [ { @@ -25,8 +28,6 @@ RSpec.describe BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline do ] end - subject { described_class.new(context) } - before do allow_next_instance_of(BulkImports::Groups::Extractors::SubgroupsExtractor) do |extractor| allow(extractor).to receive(:extract).and_return(subgroup_data) @@ -47,6 +48,29 @@ RSpec.describe BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline do end end + describe '#load' do + let(:parent_entity) { create(:bulk_import_entity, group: group, bulk_import: create(:bulk_import)) } + + it 'creates entities for the given data' do + data = { + source_type: :group_entity, + source_full_path: 'parent/subgroup', + destination_name: 'subgroup', + destination_namespace: parent_entity.group.full_path, + parent_id: parent_entity.id + } + + expect { subject.load(context, data) }.to change(BulkImports::Entity, :count).by(1) + + subgroup_entity = BulkImports::Entity.last + + expect(subgroup_entity.source_full_path).to eq 'parent/subgroup' + expect(subgroup_entity.destination_namespace).to eq 'group' + expect(subgroup_entity.destination_name).to eq 'subgroup' + expect(subgroup_entity.parent_id).to eq parent_entity.id + end + end + describe 'pipeline parts' do it { expect(described_class).to include_module(BulkImports::Pipeline) } it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) } @@ -61,9 +85,5 @@ RSpec.describe BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline do { klass: BulkImports::Groups::Transformers::SubgroupToEntityTransformer, options: nil } ) end - - it 'has loaders' do - expect(described_class.get_loader).to eq(klass: BulkImports::Common::Loaders::EntityLoader, options: nil) - end end end diff --git a/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb b/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb index 5a7a51675d6..b3fe8a2ba25 100644 --- a/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb +++ b/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb @@ -80,14 +80,14 @@ RSpec.describe BulkImports::Groups::Transformers::GroupAttributesTransformer do expect(transformed_data['parent_id']).to eq(parent.id) end - context 'when destination namespace is user namespace' do + context 'when destination namespace is empty' do it 'does not set parent id' do entity = create( :bulk_import_entity, bulk_import: bulk_import, source_full_path: 'source/full/path', destination_name: group.name, - destination_namespace: user.namespace.full_path + destination_namespace: '' ) context = BulkImports::Pipeline::Context.new(entity) diff --git a/spec/lib/bulk_imports/pipeline_spec.rb b/spec/lib/bulk_imports/pipeline_spec.rb index 3811a02a7fd..2bbb55f9419 100644 --- a/spec/lib/bulk_imports/pipeline_spec.rb +++ b/spec/lib/bulk_imports/pipeline_spec.rb @@ -3,25 +3,25 @@ require 'spec_helper' RSpec.describe BulkImports::Pipeline do - describe 'pipeline attributes' do - before do - stub_const('BulkImports::Extractor', Class.new) - stub_const('BulkImports::Transformer', Class.new) - stub_const('BulkImports::Loader', Class.new) - - klass = Class.new do - include BulkImports::Pipeline + before do + stub_const('BulkImports::Extractor', Class.new) + stub_const('BulkImports::Transformer', Class.new) + stub_const('BulkImports::Loader', Class.new) - abort_on_failure! + klass = Class.new do + include BulkImports::Pipeline - extractor BulkImports::Extractor, { foo: :bar } - transformer BulkImports::Transformer, { foo: :bar } - loader BulkImports::Loader, { foo: :bar } - end + abort_on_failure! - stub_const('BulkImports::MyPipeline', klass) + extractor BulkImports::Extractor, foo: :bar + transformer BulkImports::Transformer, foo: :bar + loader BulkImports::Loader, foo: :bar end + stub_const('BulkImports::MyPipeline', klass) + end + + describe 'pipeline attributes' do describe 'getters' do it 'retrieves class attributes' do expect(BulkImports::MyPipeline.get_extractor).to eq({ klass: BulkImports::Extractor, options: { foo: :bar } }) @@ -29,6 +29,27 @@ RSpec.describe BulkImports::Pipeline do expect(BulkImports::MyPipeline.get_loader).to eq({ klass: BulkImports::Loader, options: { foo: :bar } }) expect(BulkImports::MyPipeline.abort_on_failure?).to eq(true) end + + context 'when extractor and loader are defined within the pipeline' do + before do + klass = Class.new do + include BulkImports::Pipeline + + def extract; end + + def load; end + end + + stub_const('BulkImports::AnotherPipeline', klass) + end + + it 'returns itself when retrieving extractor & loader' do + pipeline = BulkImports::AnotherPipeline.new(nil) + + expect(pipeline.send(:extractor)).to eq(pipeline) + expect(pipeline.send(:loader)).to eq(pipeline) + end + end end describe 'setters' do @@ -54,4 +75,46 @@ RSpec.describe BulkImports::Pipeline do end end end + + describe '#instantiate' do + context 'when options are present' do + it 'instantiates new object with options' do + expect(BulkImports::Extractor).to receive(:new).with(foo: :bar) + expect(BulkImports::Transformer).to receive(:new).with(foo: :bar) + expect(BulkImports::Loader).to receive(:new).with(foo: :bar) + + pipeline = BulkImports::MyPipeline.new(nil) + + pipeline.send(:extractor) + pipeline.send(:transformers) + pipeline.send(:loader) + end + end + + context 'when options are missing' do + before do + klass = Class.new do + include BulkImports::Pipeline + + extractor BulkImports::Extractor + transformer BulkImports::Transformer + loader BulkImports::Loader + end + + stub_const('BulkImports::NoOptionsPipeline', klass) + end + + it 'instantiates new object without options' do + expect(BulkImports::Extractor).to receive(:new).with(no_args) + expect(BulkImports::Transformer).to receive(:new).with(no_args) + expect(BulkImports::Loader).to receive(:new).with(no_args) + + pipeline = BulkImports::NoOptionsPipeline.new(nil) + + pipeline.send(:extractor) + pipeline.send(:transformers) + pipeline.send(:loader) + end + end + end end diff --git a/spec/lib/gitlab/analytics/cycle_analytics/average_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/average_spec.rb new file mode 100644 index 00000000000..e2fdd4918d5 --- /dev/null +++ b/spec/lib/gitlab/analytics/cycle_analytics/average_spec.rb @@ -0,0 +1,66 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Analytics::CycleAnalytics::Average do + let_it_be(:project) { create(:project) } + + let_it_be(:issue_1) do + # Duration: 10 days + create(:issue, project: project, created_at: 20.days.ago).tap do |issue| + issue.metrics.update!(first_mentioned_in_commit_at: 10.days.ago) + end + end + + let_it_be(:issue_2) do + # Duration: 5 days + create(:issue, project: project, created_at: 20.days.ago).tap do |issue| + issue.metrics.update!(first_mentioned_in_commit_at: 15.days.ago) + end + end + + let(:stage) do + build( + :cycle_analytics_project_stage, + start_event_identifier: Gitlab::Analytics::CycleAnalytics::StageEvents::IssueCreated.identifier, + end_event_identifier: Gitlab::Analytics::CycleAnalytics::StageEvents::IssueFirstMentionedInCommit.identifier, + project: project + ) + end + + let(:query) { Issue.joins(:metrics).in_projects(project.id) } + + around do |example| + freeze_time { example.run } + end + + subject(:average) { described_class.new(stage: stage, query: query) } + + describe '#seconds' do + subject(:average_duration_in_seconds) { average.seconds } + + context 'when no results' do + let(:query) { Issue.none } + + it { is_expected.to eq(nil) } + end + + context 'returns the average duration in seconds' do + it { is_expected.to be_within(0.5).of(7.5.days.to_f) } + end + end + + describe '#days' do + subject(:average_duration_in_days) { average.days } + + context 'when no results' do + let(:query) { Issue.none } + + it { is_expected.to eq(nil) } + end + + context 'returns the average duration in days' do + it { is_expected.to be_within(0.01).of(7.5) } + end + end +end diff --git a/spec/lib/gitlab/analytics/instance_statistics/workers_argument_builder_spec.rb b/spec/lib/gitlab/analytics/usage_trends/workers_argument_builder_spec.rb index 115c8145f59..34c5bd6c6ae 100644 --- a/spec/lib/gitlab/analytics/instance_statistics/workers_argument_builder_spec.rb +++ b/spec/lib/gitlab/analytics/usage_trends/workers_argument_builder_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Analytics::InstanceStatistics::WorkersArgumentBuilder do +RSpec.describe Gitlab::Analytics::UsageTrends::WorkersArgumentBuilder do context 'when no measurement identifiers are given' do it 'returns empty array' do expect(described_class.new(measurement_identifiers: []).execute).to be_empty @@ -16,8 +16,8 @@ RSpec.describe Gitlab::Analytics::InstanceStatistics::WorkersArgumentBuilder do let_it_be(:project_3) { create(:project, namespace: user_1.namespace, creator: user_1) } let(:recorded_at) { 2.days.ago } - let(:projects_measurement_identifier) { ::Analytics::InstanceStatistics::Measurement.identifiers.fetch(:projects) } - let(:users_measurement_identifier) { ::Analytics::InstanceStatistics::Measurement.identifiers.fetch(:users) } + let(:projects_measurement_identifier) { ::Analytics::UsageTrends::Measurement.identifiers.fetch(:projects) } + let(:users_measurement_identifier) { ::Analytics::UsageTrends::Measurement.identifiers.fetch(:users) } let(:measurement_identifiers) { [projects_measurement_identifier, users_measurement_identifier] } subject { described_class.new(measurement_identifiers: measurement_identifiers, recorded_at: recorded_at).execute } @@ -46,19 +46,19 @@ RSpec.describe Gitlab::Analytics::InstanceStatistics::WorkersArgumentBuilder do context 'when custom min and max queries are present' do let(:min_id) { User.second.id } let(:max_id) { User.maximum(:id) } - let(:users_measurement_identifier) { ::Analytics::InstanceStatistics::Measurement.identifiers.fetch(:users) } + let(:users_measurement_identifier) { ::Analytics::UsageTrends::Measurement.identifiers.fetch(:users) } before do create_list(:user, 2) min_max_queries = { - ::Analytics::InstanceStatistics::Measurement.identifiers[:users] => { + ::Analytics::UsageTrends::Measurement.identifiers[:users] => { minimum_query: -> { min_id }, maximum_query: -> { max_id } } } - allow(::Analytics::InstanceStatistics::Measurement).to receive(:identifier_min_max_queries) { min_max_queries } + allow(::Analytics::UsageTrends::Measurement).to receive(:identifier_min_max_queries) { min_max_queries } end subject do diff --git a/spec/lib/gitlab/background_migration/set_default_iteration_cadences_spec.rb b/spec/lib/gitlab/background_migration/set_default_iteration_cadences_spec.rb new file mode 100644 index 00000000000..d0e51593fd4 --- /dev/null +++ b/spec/lib/gitlab/background_migration/set_default_iteration_cadences_spec.rb @@ -0,0 +1,75 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::SetDefaultIterationCadences, schema: 20201231133921 do + let(:namespaces) { table(:namespaces) } + let(:iterations) { table(:sprints) } + let(:iterations_cadences) { table(:iterations_cadences) } + + describe '#perform' do + context 'when no iteration cadences exists' do + let!(:group_1) { namespaces.create!(name: 'group 1', path: 'group-1') } + let!(:group_2) { namespaces.create!(name: 'group 2', path: 'group-2') } + let!(:group_3) { namespaces.create!(name: 'group 3', path: 'group-3') } + + let!(:iteration_1) { iterations.create!(group_id: group_1.id, iid: 1, title: 'Iteration 1', start_date: 10.days.ago, due_date: 8.days.ago) } + let!(:iteration_2) { iterations.create!(group_id: group_3.id, iid: 1, title: 'Iteration 2', start_date: 10.days.ago, due_date: 8.days.ago) } + let!(:iteration_3) { iterations.create!(group_id: group_3.id, iid: 1, title: 'Iteration 3', start_date: 5.days.ago, due_date: 2.days.ago) } + + before do + described_class.new.perform(group_1.id, group_2.id, group_3.id, namespaces.last.id + 1) + end + + it 'creates iterations_cadence records for the requested groups' do + expect(iterations_cadences.count).to eq(2) + end + + it 'assigns the iteration cadences to the iterations correctly' do + iterations_cadence = iterations_cadences.find_by(group_id: group_1.id) + iteration_records = iterations.where(iterations_cadence_id: iterations_cadence.id) + + expect(iterations_cadence.start_date).to eq(iteration_1.start_date) + expect(iterations_cadence.last_run_date).to eq(iteration_1.start_date) + expect(iterations_cadence.title).to eq('group 1 Iterations') + expect(iteration_records.size).to eq(1) + expect(iteration_records.first.id).to eq(iteration_1.id) + + iterations_cadence = iterations_cadences.find_by(group_id: group_3.id) + iteration_records = iterations.where(iterations_cadence_id: iterations_cadence.id) + + expect(iterations_cadence.start_date).to eq(iteration_3.start_date) + expect(iterations_cadence.last_run_date).to eq(iteration_3.start_date) + expect(iterations_cadence.title).to eq('group 3 Iterations') + expect(iteration_records.size).to eq(2) + expect(iteration_records.first.id).to eq(iteration_2.id) + expect(iteration_records.second.id).to eq(iteration_3.id) + end + end + + context 'when an iteration cadence exists for a group' do + let!(:group) { namespaces.create!(name: 'group', path: 'group') } + + let!(:iterations_cadence_1) { iterations_cadences.create!(group_id: group.id, start_date: 5.days.ago, title: 'Cadence 1') } + let!(:iterations_cadence_2) { iterations_cadences.create!(group_id: group.id, start_date: 2.days.ago, title: 'Cadence 2') } + + let!(:iteration_1) { iterations.create!(group_id: group.id, iid: 1, title: 'Iteration 1', start_date: 10.days.ago, due_date: 8.days.ago) } + let!(:iteration_2) { iterations.create!(group_id: group.id, iterations_cadence_id: iterations_cadence_1.id, iid: 2, title: 'Iteration 2', start_date: 5.days.ago, due_date: 3.days.ago) } + let!(:iteration_3) { iterations.create!(group_id: group.id, iterations_cadence_id: iterations_cadence_2.id, iid: 3, title: 'Iteration 3', start_date: 2.days.ago, due_date: 1.day.ago) } + + subject { described_class.new.perform(group.id) } + + it 'does not create a new iterations_cadence' do + expect { subject }.not_to change { iterations_cadences.count } + end + + it 'assigns iteration cadences to iterations if needed' do + subject + + expect(iteration_1.reload.iterations_cadence_id).to eq(iterations_cadence_1.id) + expect(iteration_2.reload.iterations_cadence_id).to eq(iterations_cadence_1.id) + expect(iteration_3.reload.iterations_cadence_id).to eq(iterations_cadence_2.id) + end + end + end +end diff --git a/spec/lib/gitlab/ci/charts_spec.rb b/spec/lib/gitlab/ci/charts_spec.rb index 46d7d4a58f0..3a82d058819 100644 --- a/spec/lib/gitlab/ci/charts_spec.rb +++ b/spec/lib/gitlab/ci/charts_spec.rb @@ -98,7 +98,12 @@ RSpec.describe Gitlab::Ci::Charts do subject { chart.total } before do - create(:ci_empty_pipeline, project: project, duration: 120) + # The created_at time used by the following execution + # can end up being after the creation of the 'today' time + # objects created above, and cause the queried counts to + # go to zero when the test executes close to midnight on the + # CI system, so we explicitly set it to a day earlier + create(:ci_empty_pipeline, project: project, duration: 120, created_at: today - 1.day) end it 'uses a utc time zone for range times' do diff --git a/spec/lib/gitlab/ci/jwt_spec.rb b/spec/lib/gitlab/ci/jwt_spec.rb index 342ca6b8b75..480a4a05379 100644 --- a/spec/lib/gitlab/ci/jwt_spec.rb +++ b/spec/lib/gitlab/ci/jwt_spec.rb @@ -114,17 +114,6 @@ RSpec.describe Gitlab::Ci::Jwt do expect(payload[:environment]).to eq('production') expect(payload[:environment_protected]).to eq('false') end - - context ':ci_jwt_include_environment feature flag is disabled' do - before do - stub_feature_flags(ci_jwt_include_environment: false) - end - - it 'does not include environment attributes' do - expect(payload).not_to have_key(:environment) - expect(payload).not_to have_key(:environment_protected) - end - end end end diff --git a/spec/lib/gitlab/ci/trace_spec.rb b/spec/lib/gitlab/ci/trace_spec.rb index 92bf2519588..597e4ca9b03 100644 --- a/spec/lib/gitlab/ci/trace_spec.rb +++ b/spec/lib/gitlab/ci/trace_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe Gitlab::Ci::Trace, :clean_gitlab_redis_shared_state, factory_default: :keep do - let_it_be(:project) { create_default(:project) } + let_it_be(:project) { create_default(:project).freeze } let_it_be_with_reload(:build) { create(:ci_build) } let(:trace) { described_class.new(build) } diff --git a/spec/lib/gitlab/current_settings_spec.rb b/spec/lib/gitlab/current_settings_spec.rb index 01aceec12c5..f5cb1987c5c 100644 --- a/spec/lib/gitlab/current_settings_spec.rb +++ b/spec/lib/gitlab/current_settings_spec.rb @@ -24,6 +24,26 @@ RSpec.describe Gitlab::CurrentSettings do end end + describe '.signup_disabled?' do + subject { described_class.signup_disabled? } + + context 'when signup is enabled' do + before do + create(:application_setting, signup_enabled: true) + end + + it { is_expected.to be_falsey } + end + + context 'when signup is disabled' do + before do + create(:application_setting, signup_enabled: false) + end + + it { is_expected.to be_truthy } + end + end + describe '#current_application_settings', :use_clean_rails_memory_store_caching do it 'allows keys to be called directly' do db_settings = create(:application_setting, diff --git a/spec/lib/gitlab/database/migrations/observers/query_statistics_spec.rb b/spec/lib/gitlab/database/migrations/observers/query_statistics_spec.rb new file mode 100644 index 00000000000..a3b03050b33 --- /dev/null +++ b/spec/lib/gitlab/database/migrations/observers/query_statistics_spec.rb @@ -0,0 +1,68 @@ +# frozen_string_literal: true +require 'spec_helper' + +RSpec.describe Gitlab::Database::Migrations::Observers::QueryStatistics do + subject { described_class.new } + + let(:connection) { ActiveRecord::Base.connection } + + def mock_pgss(enabled: true) + if enabled + allow(subject).to receive(:function_exists?).with(:pg_stat_statements_reset).and_return(true) + allow(connection).to receive(:view_exists?).with(:pg_stat_statements).and_return(true) + else + allow(subject).to receive(:function_exists?).with(:pg_stat_statements_reset).and_return(false) + allow(connection).to receive(:view_exists?).with(:pg_stat_statements).and_return(false) + end + end + + describe '#before' do + context 'with pgss available' do + it 'resets pg_stat_statements' do + mock_pgss(enabled: true) + expect(connection).to receive(:execute).with('select pg_stat_statements_reset()').once + + subject.before + end + end + + context 'without pgss available' do + it 'executes nothing' do + mock_pgss(enabled: false) + expect(connection).not_to receive(:execute) + + subject.before + end + end + end + + describe '#record' do + let(:observation) { Gitlab::Database::Migrations::Observation.new } + let(:result) { double } + let(:pgss_query) do + <<~SQL + SELECT query, calls, total_time, max_time, mean_time, rows + FROM pg_stat_statements + ORDER BY total_time DESC + SQL + end + + context 'with pgss available' do + it 'fetches data from pg_stat_statements and stores on the observation' do + mock_pgss(enabled: true) + expect(connection).to receive(:execute).with(pgss_query).once.and_return(result) + + expect { subject.record(observation) }.to change { observation.query_statistics }.from(nil).to(result) + end + end + + context 'without pgss available' do + it 'executes nothing' do + mock_pgss(enabled: false) + expect(connection).not_to receive(:execute) + + expect { subject.record(observation) }.not_to change { observation.query_statistics } + end + end + end +end diff --git a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb index eb11c051adc..7436765e8ee 100644 --- a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb +++ b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb @@ -36,7 +36,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do expect(new_issue.author).to eql(User.support_bot) expect(new_issue.confidential?).to be true expect(new_issue.all_references.all).to be_empty - expect(new_issue.title).to eq("Service Desk (from jake@adventuretime.ooo): The message subject! @all") + expect(new_issue.title).to eq("The message subject! @all") expect(new_issue.description).to eq(expected_description.strip) end diff --git a/spec/lib/gitlab/experimentation_spec.rb b/spec/lib/gitlab/experimentation_spec.rb index 7eeae3f3f33..a4b046a9ed5 100644 --- a/spec/lib/gitlab/experimentation_spec.rb +++ b/spec/lib/gitlab/experimentation_spec.rb @@ -13,7 +13,6 @@ RSpec.describe Gitlab::Experimentation::EXPERIMENTS do :invite_members_version_b, :invite_members_empty_group_version_a, :contact_sales_btn_in_app, - :customize_homepage, :group_only_trials ] diff --git a/spec/lib/gitlab/graphql/docs/renderer_spec.rb b/spec/lib/gitlab/graphql/docs/renderer_spec.rb index 064e0c6828b..2923e589ff7 100644 --- a/spec/lib/gitlab/graphql/docs/renderer_spec.rb +++ b/spec/lib/gitlab/graphql/docs/renderer_spec.rb @@ -5,21 +5,25 @@ require 'spec_helper' RSpec.describe Gitlab::Graphql::Docs::Renderer do describe '#contents' do # Returns a Schema that uses the given `type` - def mock_schema(type) + def mock_schema(type, field_description) query_type = Class.new(Types::BaseObject) do - graphql_name 'QueryType' + graphql_name 'Query' - field :foo, type, null: true + field :foo, type, null: true do + description field_description + argument :id, GraphQL::ID_TYPE, required: false, description: 'ID of the object.' + end end GraphQL::Schema.define(query: query_type) end let_it_be(:template) { Rails.root.join('lib/gitlab/graphql/docs/templates/', 'default.md.haml') } + let(:field_description) { 'List of objects.' } subject(:contents) do described_class.new( - mock_schema(type).graphql_definition, + mock_schema(type, field_description).graphql_definition, output_dir: nil, template: template ).contents @@ -45,6 +49,32 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do is_expected.to include(expectation) end + + context 'query generation' do + let(:expectation) do + <<~DOC + ### Foo + + List of objects. + + | Name | Description | Type | + | ----- | ---- | ----------- | + | `id` | ID of the object. | ID | + DOC + end + + it 'generates the query with arguments' do + expect(subject).to include(expectation) + end + + context 'when description does not end with `.`' do + let(:field_description) { 'List of objects' } + + it 'adds the `.` to the end' do + expect(subject).to include(expectation) + end + end + end end context 'A type with fields defined in reverse alphabetical order' do diff --git a/spec/lib/gitlab/import_export/repo_restorer_spec.rb b/spec/lib/gitlab/import_export/repo_restorer_spec.rb index fe43a23e242..718a23f80a1 100644 --- a/spec/lib/gitlab/import_export/repo_restorer_spec.rb +++ b/spec/lib/gitlab/import_export/repo_restorer_spec.rb @@ -46,7 +46,7 @@ RSpec.describe Gitlab::ImportExport::RepoRestorer do context 'when the repository already exists' do it 'deletes the existing repository before importing' do allow(project.repository).to receive(:exists?).and_return(true) - allow(project.repository).to receive(:path).and_return('repository_path') + allow(project.repository).to receive(:disk_path).and_return('repository_path') expect_next_instance_of(Repositories::DestroyService) do |instance| expect(instance).to receive(:execute).and_call_original diff --git a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb index edcd5b31941..209eb3f90a4 100644 --- a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb +++ b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb @@ -17,6 +17,15 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do ) end + # Emulate Marginalia pre-pending comments + def sql(query, comments: true) + if comments + "/*application:web,controller:badges,action:pipeline,correlation_id:01EYN39K9VMJC56Z7808N7RSRH*/ #{query}" + else + query + end + end + describe '#sql' do shared_examples 'track query in metrics' do before do @@ -101,7 +110,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do expect { subscriber.sql(event) }.to change { Thread.current[:uses_db_connection] }.from(nil).to(true) end - context 'with read query' do + shared_examples 'read queries' do let(:expected_counters) do { db_count: 1, @@ -114,14 +123,14 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do it_behaves_like 'track query in RequestStore' context 'with only select' do - let(:payload) { { sql: 'WITH active_milestones AS (SELECT COUNT(*), state FROM milestones GROUP BY state) SELECT * FROM active_milestones' } } + let(:payload) { { sql: sql('WITH active_milestones AS (SELECT COUNT(*), state FROM milestones GROUP BY state) SELECT * FROM active_milestones', comments: comments) } } it_behaves_like 'track query in metrics' it_behaves_like 'track query in RequestStore' end end - context 'write query' do + shared_examples 'write queries' do let(:expected_counters) do { db_count: 1, @@ -131,7 +140,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do end context 'with select for update sql event' do - let(:payload) { { sql: 'SELECT * FROM users WHERE id = 10 FOR UPDATE' } } + let(:payload) { { sql: sql('SELECT * FROM users WHERE id = 10 FOR UPDATE', comments: comments) } } it_behaves_like 'track query in metrics' it_behaves_like 'track query in RequestStore' @@ -139,7 +148,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do context 'with common table expression' do context 'with insert' do - let(:payload) { { sql: 'WITH archived_rows AS (SELECT * FROM users WHERE archived = true) INSERT INTO products_log SELECT * FROM archived_rows' } } + let(:payload) { { sql: sql('WITH archived_rows AS (SELECT * FROM users WHERE archived = true) INSERT INTO products_log SELECT * FROM archived_rows', comments: comments) } } it_behaves_like 'track query in metrics' it_behaves_like 'track query in RequestStore' @@ -147,27 +156,41 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do end context 'with delete sql event' do - let(:payload) { { sql: 'DELETE FROM users where id = 10' } } + let(:payload) { { sql: sql('DELETE FROM users where id = 10', comments: comments) } } it_behaves_like 'track query in metrics' it_behaves_like 'track query in RequestStore' end context 'with insert sql event' do - let(:payload) { { sql: 'INSERT INTO project_ci_cd_settings (project_id) SELECT id FROM projects' } } + let(:payload) { { sql: sql('INSERT INTO project_ci_cd_settings (project_id) SELECT id FROM projects', comments: comments) } } it_behaves_like 'track query in metrics' it_behaves_like 'track query in RequestStore' end context 'with update sql event' do - let(:payload) { { sql: 'UPDATE users SET admin = true WHERE id = 10' } } + let(:payload) { { sql: sql('UPDATE users SET admin = true WHERE id = 10', comments: comments) } } it_behaves_like 'track query in metrics' it_behaves_like 'track query in RequestStore' end end + context 'without Marginalia comments' do + let(:comments) { false } + + it_behaves_like 'write queries' + it_behaves_like 'read queries' + end + + context 'with Marginalia comments' do + let(:comments) { true } + + it_behaves_like 'write queries' + it_behaves_like 'read queries' + end + context 'with cached query' do let(:expected_counters) do { @@ -180,7 +203,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do context 'with cached payload ' do let(:payload) do { - sql: 'SELECT * FROM users WHERE id = 10', + sql: sql('SELECT * FROM users WHERE id = 10'), cached: true } end @@ -192,7 +215,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do context 'with cached payload name' do let(:payload) do { - sql: 'SELECT * FROM users WHERE id = 10', + sql: sql('SELECT * FROM users WHERE id = 10'), name: 'CACHE' } end @@ -208,7 +231,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do :event, name: 'sql.active_record', payload: { - sql: "SELECT attr.attname FROM pg_attribute attr INNER JOIN pg_constraint cons ON attr.attrelid = cons.conrelid AND attr.attnum = any(cons.conkey) WHERE cons.contype = 'p' AND cons.conrelid = '\"projects\"'::regclass", + sql: sql("SELECT attr.attname FROM pg_attribute attr INNER JOIN pg_constraint cons ON attr.attrelid = cons.conrelid AND attr.attnum = any(cons.conkey) WHERE cons.contype = 'p' AND cons.conrelid = '\"projects\"'::regclass"), name: 'SCHEMA', connection_id: 135, statement_name: nil, diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb index 776ca81a338..1aca3dae41b 100644 --- a/spec/lib/gitlab/regex_spec.rb +++ b/spec/lib/gitlab/regex_spec.rb @@ -367,6 +367,35 @@ RSpec.describe Gitlab::Regex do it { is_expected.not_to match('%2e%2e%2f1.2.3') } end + describe '.npm_package_name_regex' do + subject { described_class.npm_package_name_regex } + + it { is_expected.to match('@scope/package') } + it { is_expected.to match('unscoped-package') } + it { is_expected.not_to match('@first-scope@second-scope/package') } + it { is_expected.not_to match('scope-without-at-symbol/package') } + it { is_expected.not_to match('@not-a-scoped-package') } + it { is_expected.not_to match('@scope/sub/package') } + it { is_expected.not_to match('@scope/../../package') } + it { is_expected.not_to match('@scope%2e%2e%2fpackage') } + it { is_expected.not_to match('@%2e%2e%2f/package') } + + context 'capturing group' do + [ + ['@scope/package', 'scope'], + ['unscoped-package', nil], + ['@not-a-scoped-package', nil], + ['@scope/sub/package', nil], + ['@inv@lid-scope/package', nil] + ].each do |package_name, extracted_scope_name| + it "extracts the scope name for #{package_name}" do + match = package_name.match(described_class.npm_package_name_regex) + expect(match&.captures&.first).to eq(extracted_scope_name) + end + end + end + end + describe '.nuget_version_regex' do subject { described_class.nuget_version_regex } diff --git a/spec/lib/gitlab/tracking/standard_context_spec.rb b/spec/lib/gitlab/tracking/standard_context_spec.rb index 7a0a4f0cc46..561edbd38f8 100644 --- a/spec/lib/gitlab/tracking/standard_context_spec.rb +++ b/spec/lib/gitlab/tracking/standard_context_spec.rb @@ -22,7 +22,7 @@ RSpec.describe Gitlab::Tracking::StandardContext do context 'staging' do before do - allow(Gitlab).to receive(:staging?).and_return(true) + stub_config_setting(url: 'https://staging.gitlab.com') end include_examples 'contains environment', 'staging' @@ -30,11 +30,27 @@ RSpec.describe Gitlab::Tracking::StandardContext do context 'production' do before do - allow(Gitlab).to receive(:com_and_canary?).and_return(true) + stub_config_setting(url: 'https://gitlab.com') end include_examples 'contains environment', 'production' end + + context 'org' do + before do + stub_config_setting(url: 'https://dev.gitlab.org') + end + + include_examples 'contains environment', 'org' + end + + context 'other self-managed instance' do + before do + stub_rails_env('production') + end + + include_examples 'contains environment', 'self-managed' + end end it 'contains source' do diff --git a/spec/lib/gitlab/usage/docs/renderer_spec.rb b/spec/lib/gitlab/usage/docs/renderer_spec.rb index 0677aa2d9d7..07f25cfcfa7 100644 --- a/spec/lib/gitlab/usage/docs/renderer_spec.rb +++ b/spec/lib/gitlab/usage/docs/renderer_spec.rb @@ -5,7 +5,7 @@ require 'spec_helper' RSpec.describe Gitlab::Usage::Docs::Renderer do describe 'contents' do let(:dictionary_path) { Gitlab::Usage::Docs::Renderer::DICTIONARY_PATH } - let(:items) { Gitlab::Usage::MetricDefinition.definitions } + let(:items) { Gitlab::Usage::MetricDefinition.definitions.first(10).to_h } it 'generates dictionary for given items' do generated_dictionary = described_class.new(items).contents diff --git a/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb b/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb index 5469ded18f9..f8db1d5e8be 100644 --- a/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb +++ b/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb @@ -223,7 +223,7 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Aggregate, :clean_gitlab_redi end it 'allows for YAML aliases in aggregated metrics configs' do - expect(YAML).to receive(:safe_load).with(kind_of(String), aliases: true) + expect(YAML).to receive(:safe_load).with(kind_of(String), aliases: true).at_least(:once) described_class.new(recorded_at) end diff --git a/spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb b/spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb new file mode 100644 index 00000000000..664e7938a7e --- /dev/null +++ b/spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +require 'spec_helper' + +# If this spec fails, we need to add the new code review event to the correct aggregated metric +RSpec.describe 'Code review events' do + it 'the aggregated metrics contain all the code review metrics' do + path = Rails.root.join('lib/gitlab/usage_data_counters/aggregated_metrics/code_review.yml') + aggregated_events = YAML.safe_load(File.read(path), aliases: true)&.map(&:with_indifferent_access) + + code_review_aggregated_events = aggregated_events + .map { |event| event['events'] } + .flatten + .uniq + + code_review_events = Gitlab::UsageDataCounters::HLLRedisCounter.events_for_category("code_review") + + exceptions = %w[i_code_review_mr_diffs i_code_review_mr_single_file_diffs] + code_review_aggregated_events += exceptions + + expect(code_review_events - code_review_aggregated_events).to be_empty + end +end diff --git a/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb index 7b5efb11034..1be2a83f98f 100644 --- a/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb +++ b/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb @@ -14,7 +14,7 @@ RSpec.describe Gitlab::UsageDataCounters::PackageEventCounter, :clean_gitlab_red end it 'includes the right events' do - expect(described_class::KNOWN_EVENTS.size).to eq 45 + expect(described_class::KNOWN_EVENTS.size).to eq 48 end described_class::KNOWN_EVENTS.each do |event| diff --git a/spec/lib/marginalia_spec.rb b/spec/lib/marginalia_spec.rb index fa0cd214c7e..2ee27fbe20c 100644 --- a/spec/lib/marginalia_spec.rb +++ b/spec/lib/marginalia_spec.rb @@ -37,26 +37,9 @@ RSpec.describe 'Marginalia spec' do } end - context 'when the feature is enabled' do - before do - stub_feature(true) - end - - it 'generates a query that includes the component and value' do - component_map.each do |component, value| - expect(recorded.log.last).to include("#{component}:#{value}") - end - end - end - - context 'when the feature is disabled' do - before do - stub_feature(false) - end - - it 'excludes annotations in generated queries' do - expect(recorded.log.last).not_to include("/*") - expect(recorded.log.last).not_to include("*/") + it 'generates a query that includes the component and value' do + component_map.each do |component, value| + expect(recorded.log.last).to include("#{component}:#{value}") end end end @@ -90,59 +73,37 @@ RSpec.describe 'Marginalia spec' do } end - context 'when the feature is enabled' do - before do - stub_feature(true) + it 'generates a query that includes the component and value' do + component_map.each do |component, value| + expect(recorded.log.last).to include("#{component}:#{value}") end + end - it 'generates a query that includes the component and value' do - component_map.each do |component, value| - expect(recorded.log.last).to include("#{component}:#{value}") - end - end - - describe 'for ActionMailer delivery jobs' do - let(:delivery_job) { MarginaliaTestMailer.first_user.deliver_later } - - let(:recorded) do - ActiveRecord::QueryRecorder.new do - delivery_job.perform_now - end - end - - let(:component_map) do - { - "application" => "sidekiq", - "jid" => delivery_job.job_id, - "job_class" => delivery_job.arguments.first - } - end + describe 'for ActionMailer delivery jobs' do + let(:delivery_job) { MarginaliaTestMailer.first_user.deliver_later } - it 'generates a query that includes the component and value' do - component_map.each do |component, value| - expect(recorded.log.last).to include("#{component}:#{value}") - end + let(:recorded) do + ActiveRecord::QueryRecorder.new do + delivery_job.perform_now end end - end - context 'when the feature is disabled' do - before do - stub_feature(false) + let(:component_map) do + { + "application" => "sidekiq", + "jid" => delivery_job.job_id, + "job_class" => delivery_job.arguments.first + } end - it 'excludes annotations in generated queries' do - expect(recorded.log.last).not_to include("/*") - expect(recorded.log.last).not_to include("*/") + it 'generates a query that includes the component and value' do + component_map.each do |component, value| + expect(recorded.log.last).to include("#{component}:#{value}") + end end end end - def stub_feature(value) - stub_feature_flags(marginalia: value) - Gitlab::Marginalia.set_enabled_from_feature_flag - end - def make_request(correlation_id) request_env = Rack::MockRequest.env_for('/') diff --git a/spec/migrations/schedule_set_default_iteration_cadences_spec.rb b/spec/migrations/schedule_set_default_iteration_cadences_spec.rb new file mode 100644 index 00000000000..9d7f1ac0dec --- /dev/null +++ b/spec/migrations/schedule_set_default_iteration_cadences_spec.rb @@ -0,0 +1,41 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe ScheduleSetDefaultIterationCadences do + let(:namespaces) { table(:namespaces) } + let(:iterations) { table(:sprints) } + + let(:group_1) { namespaces.create!(name: 'test_1', path: 'test_1') } + let!(:group_2) { namespaces.create!(name: 'test_2', path: 'test_2') } + let(:group_3) { namespaces.create!(name: 'test_3', path: 'test_3') } + let(:group_4) { namespaces.create!(name: 'test_4', path: 'test_4') } + let(:group_5) { namespaces.create!(name: 'test_5', path: 'test_5') } + let(:group_6) { namespaces.create!(name: 'test_6', path: 'test_6') } + let(:group_7) { namespaces.create!(name: 'test_7', path: 'test_7') } + let(:group_8) { namespaces.create!(name: 'test_8', path: 'test_8') } + + let!(:iteration_1) { iterations.create!(iid: 1, title: 'iteration 1', group_id: group_1.id) } + let!(:iteration_2) { iterations.create!(iid: 1, title: 'iteration 2', group_id: group_3.id) } + let!(:iteration_3) { iterations.create!(iid: 1, title: 'iteration 2', group_id: group_4.id) } + let!(:iteration_4) { iterations.create!(iid: 1, title: 'iteration 2', group_id: group_5.id) } + let!(:iteration_5) { iterations.create!(iid: 1, title: 'iteration 2', group_id: group_6.id) } + let!(:iteration_6) { iterations.create!(iid: 1, title: 'iteration 2', group_id: group_7.id) } + let!(:iteration_7) { iterations.create!(iid: 1, title: 'iteration 2', group_id: group_8.id) } + + around do |example| + freeze_time { Sidekiq::Testing.fake! { example.run } } + end + + it 'schedules the background jobs', :aggregate_failures do + stub_const("#{described_class.name}::BATCH_SIZE", 3) + + migrate! + + expect(BackgroundMigrationWorker.jobs.size).to be(3) + expect(described_class::MIGRATION_CLASS).to be_scheduled_delayed_migration(2.minutes, group_1.id, group_3.id, group_4.id) + expect(described_class::MIGRATION_CLASS).to be_scheduled_delayed_migration(4.minutes, group_5.id, group_6.id, group_7.id) + expect(described_class::MIGRATION_CLASS).to be_scheduled_delayed_migration(6.minutes, group_8.id) + end +end diff --git a/spec/models/analytics/instance_statistics/measurement_spec.rb b/spec/models/analytics/usage_trends/measurement_spec.rb index dbb16c5ffbe..d9a6b70c87a 100644 --- a/spec/models/analytics/instance_statistics/measurement_spec.rb +++ b/spec/models/analytics/usage_trends/measurement_spec.rb @@ -2,9 +2,9 @@ require 'spec_helper' -RSpec.describe Analytics::InstanceStatistics::Measurement, type: :model do +RSpec.describe Analytics::UsageTrends::Measurement, type: :model do describe 'validation' do - let!(:measurement) { create(:instance_statistics_measurement) } + let!(:measurement) { create(:usage_trends_measurement) } it { is_expected.to validate_presence_of(:recorded_at) } it { is_expected.to validate_presence_of(:identifier) } @@ -33,9 +33,9 @@ RSpec.describe Analytics::InstanceStatistics::Measurement, type: :model do end describe 'scopes' do - let_it_be(:measurement_1) { create(:instance_statistics_measurement, :project_count, recorded_at: 10.days.ago) } - let_it_be(:measurement_2) { create(:instance_statistics_measurement, :project_count, recorded_at: 2.days.ago) } - let_it_be(:measurement_3) { create(:instance_statistics_measurement, :group_count, recorded_at: 5.days.ago) } + let_it_be(:measurement_1) { create(:usage_trends_measurement, :project_count, recorded_at: 10.days.ago) } + let_it_be(:measurement_2) { create(:usage_trends_measurement, :project_count, recorded_at: 2.days.ago) } + let_it_be(:measurement_3) { create(:usage_trends_measurement, :group_count, recorded_at: 5.days.ago) } describe '.order_by_latest' do subject { described_class.order_by_latest } @@ -101,15 +101,15 @@ RSpec.describe Analytics::InstanceStatistics::Measurement, type: :model do describe '.find_latest_or_fallback' do subject(:count) { described_class.find_latest_or_fallback(:pipelines_skipped).count } - context 'with instance statistics' do - let!(:measurement) { create(:instance_statistics_measurement, :pipelines_skipped_count) } + context 'with usage statistics' do + let!(:measurement) { create(:usage_trends_measurement, :pipelines_skipped_count) } it 'returns the latest stored measurement' do expect(count).to eq measurement.count end end - context 'without instance statistics' do + context 'without usage statistics' do it 'returns the realtime query of the measurement' do expect(count).to eq 0 end diff --git a/spec/models/bulk_imports/entity_spec.rb b/spec/models/bulk_imports/entity_spec.rb index e5ab96ca514..a3a22c386c9 100644 --- a/spec/models/bulk_imports/entity_spec.rb +++ b/spec/models/bulk_imports/entity_spec.rb @@ -14,7 +14,6 @@ RSpec.describe BulkImports::Entity, type: :model do it { is_expected.to validate_presence_of(:source_type) } it { is_expected.to validate_presence_of(:source_full_path) } it { is_expected.to validate_presence_of(:destination_name) } - it { is_expected.to validate_presence_of(:destination_namespace) } it { is_expected.to define_enum_for(:source_type).with_values(%i[group_entity project_entity]) } @@ -38,7 +37,11 @@ RSpec.describe BulkImports::Entity, type: :model do context 'when associated with a group and no project' do it 'is valid as a group_entity' do entity = build(:bulk_import_entity, :group_entity, group: build(:group), project: nil) + expect(entity).to be_valid + end + it 'is valid when destination_namespace is empty' do + entity = build(:bulk_import_entity, :group_entity, group: build(:group), project: nil, destination_namespace: '') expect(entity).to be_valid end @@ -57,6 +60,12 @@ RSpec.describe BulkImports::Entity, type: :model do expect(entity).to be_valid end + it 'is invalid when destination_namespace is nil' do + entity = build(:bulk_import_entity, :group_entity, group: build(:group), project: nil, destination_namespace: nil) + expect(entity).not_to be_valid + expect(entity.errors).to include(:destination_namespace) + end + it 'is invalid as a project_entity' do entity = build(:bulk_import_entity, :group_entity, group: nil, project: build(:project)) diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb index 94943fb3644..351b9f0554c 100644 --- a/spec/models/ci/pipeline_spec.rb +++ b/spec/models/ci/pipeline_spec.rb @@ -8,8 +8,8 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do include Ci::SourcePipelineHelpers let_it_be(:user) { create(:user) } - let_it_be(:namespace) { create_default(:namespace) } - let_it_be(:project) { create_default(:project, :repository) } + let_it_be(:namespace) { create_default(:namespace).freeze } + let_it_be(:project) { create_default(:project, :repository).freeze } let(:pipeline) do create(:ci_empty_pipeline, status: :created, project: project) @@ -3785,17 +3785,11 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do end describe '#default_branch?' do - let(:default_branch) { 'master'} - subject { pipeline.default_branch? } - before do - allow(project).to receive(:default_branch).and_return(default_branch) - end - context 'when pipeline ref is the default branch of the project' do let(:pipeline) do - build(:ci_empty_pipeline, status: :created, project: project, ref: default_branch) + build(:ci_empty_pipeline, status: :created, project: project, ref: project.default_branch) end it "returns true" do diff --git a/spec/models/clusters/agent_token_spec.rb b/spec/models/clusters/agent_token_spec.rb index 5cb84ee131a..8107b5b1388 100644 --- a/spec/models/clusters/agent_token_spec.rb +++ b/spec/models/clusters/agent_token_spec.rb @@ -3,8 +3,9 @@ require 'spec_helper' RSpec.describe Clusters::AgentToken do - it { is_expected.to belong_to(:agent).class_name('Clusters::Agent') } + it { is_expected.to belong_to(:agent).class_name('Clusters::Agent').required } it { is_expected.to belong_to(:created_by_user).class_name('User').optional } + it { is_expected.to validate_length_of(:description).is_at_most(1024) } describe '#token' do it 'is generated on save' do diff --git a/spec/models/concerns/project_features_compatibility_spec.rb b/spec/models/concerns/project_features_compatibility_spec.rb index 2059e170446..7c2caafcdb7 100644 --- a/spec/models/concerns/project_features_compatibility_spec.rb +++ b/spec/models/concerns/project_features_compatibility_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' RSpec.describe ProjectFeaturesCompatibility do let(:project) { create(:project) } - let(:features_enabled) { %w(issues wiki builds merge_requests snippets) } + let(:features_enabled) { %w(issues wiki builds merge_requests snippets security_and_compliance) } let(:features) { features_enabled + %w(repository pages operations) } # We had issues_enabled, snippets_enabled, builds_enabled, merge_requests_enabled and issues_enabled fields on projects table diff --git a/spec/models/custom_emoji_spec.rb b/spec/models/custom_emoji_spec.rb index 41ce480b02f..e34934d393a 100644 --- a/spec/models/custom_emoji_spec.rb +++ b/spec/models/custom_emoji_spec.rb @@ -4,8 +4,10 @@ require 'spec_helper' RSpec.describe CustomEmoji do describe 'Associations' do - it { is_expected.to belong_to(:namespace) } + it { is_expected.to belong_to(:namespace).inverse_of(:custom_emoji) } + it { is_expected.to belong_to(:creator).inverse_of(:created_custom_emoji) } it { is_expected.to have_db_column(:file) } + it { is_expected.to validate_presence_of(:creator) } it { is_expected.to validate_length_of(:name).is_at_most(36) } it { is_expected.to validate_presence_of(:name) } it { is_expected.to have_db_column(:external) } @@ -36,7 +38,7 @@ RSpec.describe CustomEmoji do new_emoji = build(:custom_emoji, name: old_emoji.name, namespace: old_emoji.namespace, group: group) expect(new_emoji).not_to be_valid - expect(new_emoji.errors.messages).to eq(name: ["has already been taken"]) + expect(new_emoji.errors.messages).to include(name: ["has already been taken"]) end it 'disallows non http and https file value' do diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb index e79b54b4674..6afd422e554 100644 --- a/spec/models/group_spec.rb +++ b/spec/models/group_spec.rb @@ -740,6 +740,33 @@ RSpec.describe Group do end end + describe '#direct_members' do + let_it_be(:group) { create(:group, :nested) } + let_it_be(:maintainer) { group.parent.add_user(create(:user), GroupMember::MAINTAINER) } + let_it_be(:developer) { group.add_user(create(:user), GroupMember::DEVELOPER) } + + it 'does not return members of the parent' do + expect(group.direct_members).not_to include(maintainer) + end + + it 'returns the direct member of the group' do + expect(group.direct_members).to include(developer) + end + + context 'group sharing' do + let!(:shared_group) { create(:group) } + + before do + create(:group_group_link, shared_group: shared_group, shared_with_group: group) + end + + it 'does not return members of the shared_with group' do + expect(shared_group.direct_members).not_to( + include(developer)) + end + end + end + describe '#members_with_parents' do let!(:group) { create(:group, :nested) } let!(:maintainer) { group.parent.add_user(create(:user), GroupMember::MAINTAINER) } @@ -932,6 +959,65 @@ RSpec.describe Group do end end + describe '#refresh_members_authorized_projects' do + let_it_be(:group) { create(:group, :nested) } + let_it_be(:parent_group_user) { create(:user) } + let_it_be(:group_user) { create(:user) } + + before do + group.parent.add_maintainer(parent_group_user) + group.add_developer(group_user) + end + + context 'users for which authorizations refresh is executed' do + it 'processes authorizations refresh for all members of the group' do + expect(UserProjectAccessChangedService).to receive(:new).with(contain_exactly(group_user.id, parent_group_user.id)).and_call_original + + group.refresh_members_authorized_projects + end + + context 'when explicitly specified to run only for direct members' do + it 'processes authorizations refresh only for direct members of the group' do + expect(UserProjectAccessChangedService).to receive(:new).with(contain_exactly(group_user.id)).and_call_original + + group.refresh_members_authorized_projects(direct_members_only: true) + end + end + end + end + + describe '#users_ids_of_direct_members' do + let_it_be(:group) { create(:group, :nested) } + let_it_be(:parent_group_user) { create(:user) } + let_it_be(:group_user) { create(:user) } + + before do + group.parent.add_maintainer(parent_group_user) + group.add_developer(group_user) + end + + it 'does not return user ids of the members of the parent' do + expect(group.users_ids_of_direct_members).not_to include(parent_group_user.id) + end + + it 'returns the user ids of the direct member of the group' do + expect(group.users_ids_of_direct_members).to include(group_user.id) + end + + context 'group sharing' do + let!(:shared_group) { create(:group) } + + before do + create(:group_group_link, shared_group: shared_group, shared_with_group: group) + end + + it 'does not return the user ids of members of the shared_with group' do + expect(shared_group.users_ids_of_direct_members).not_to( + include(group_user.id)) + end + end + end + describe '#user_ids_for_project_authorizations' do it 'returns the user IDs for which to refresh authorizations' do maintainer = create(:user) diff --git a/spec/models/iteration_spec.rb b/spec/models/iteration_spec.rb index e7ec5de0ef1..7241a07a215 100644 --- a/spec/models/iteration_spec.rb +++ b/spec/models/iteration_spec.rb @@ -5,6 +5,13 @@ require 'spec_helper' RSpec.describe Iteration do let_it_be(:project) { create(:project) } let_it_be(:group) { create(:group) } + let(:set_cadence) { nil } + + describe 'associations' do + it { is_expected.to belong_to(:project) } + it { is_expected.to belong_to(:group) } + it { is_expected.to belong_to(:iterations_cadence).inverse_of(:iterations) } + end describe "#iid" do it "is properly scoped on project and group" do @@ -32,6 +39,59 @@ RSpec.describe Iteration do end end + describe 'setting iteration cadence' do + let_it_be(:iterations_cadence) { create(:iterations_cadence, group: group, start_date: 10.days.ago) } + let(:iteration) { create(:iteration, group: group, iterations_cadence: set_cadence, start_date: 2.days.from_now) } + + context 'when iterations_cadence is set correctly' do + let(:set_cadence) { iterations_cadence} + + it 'does not change the iterations_cadence' do + expect(iteration.iterations_cadence).to eq(iterations_cadence) + end + end + + context 'when iterations_cadence exists for the group' do + let(:set_cadence) { nil } + + it 'sets the iterations_cadence to the existing record' do + expect(iteration.iterations_cadence).to eq(iterations_cadence) + end + end + + context 'when iterations_cadence does not exists for the group' do + let_it_be(:group) { create(:group, name: 'Test group')} + let(:iteration) { build(:iteration, group: group, iterations_cadence: set_cadence) } + + it 'creates a default iterations_cadence and uses it for the iteration' do + expect { iteration.save! }.to change { Iterations::Cadence.count }.by(1) + end + + it 'sets the newly created iterations_cadence to the reecord' do + iteration.save! + + expect(iteration.iterations_cadence).to eq(Iterations::Cadence.last) + end + + it 'creates the iterations_cadence with the correct attributes' do + iteration.save! + + cadence = Iterations::Cadence.last + + expect(cadence.reload.start_date).to eq(iteration.start_date) + expect(cadence.title).to eq('Test group Iterations') + end + end + + context 'when iteration is a project iteration' do + it 'does not set the iterations_cadence' do + iteration = create(:iteration, iterations_cadence: nil, project: project, skip_project_validation: true) + + expect(iteration.reload.iterations_cadence).to be_nil + end + end + end + describe '.filter_by_state' do let_it_be(:closed_iteration) { create(:iteration, :closed, :skip_future_date_validation, group: group, start_date: 8.days.ago, due_date: 2.days.ago) } let_it_be(:started_iteration) { create(:iteration, :started, :skip_future_date_validation, group: group, start_date: 1.day.ago, due_date: 6.days.from_now) } @@ -307,6 +367,43 @@ RSpec.describe Iteration do end end + describe '#validate_group' do + let_it_be(:iterations_cadence) { create(:iterations_cadence, group: group) } + + context 'when the iteration and iteration cadence groups are same' do + it 'is valid' do + iteration = build(:iteration, group: group, iterations_cadence: iterations_cadence) + + expect(iteration).to be_valid + end + end + + context 'when the iteration and iteration cadence groups are different' do + it 'is invalid' do + other_group = create(:group) + iteration = build(:iteration, group: other_group, iterations_cadence: iterations_cadence) + + expect(iteration).not_to be_valid + end + end + + context 'when the iteration belongs to a project and the iteration cadence is set' do + it 'is invalid' do + iteration = build(:iteration, project: project, iterations_cadence: iterations_cadence, skip_project_validation: true) + + expect(iteration).to be_invalid + end + end + + context 'when the iteration belongs to a project and the iteration cadence is not set' do + it 'is valid' do + iteration = build(:iteration, project: project, skip_project_validation: true) + + expect(iteration).to be_valid + end + end + end + describe '.within_timeframe' do let_it_be(:now) { Time.current } let_it_be(:project) { create(:project, :empty_repo) } diff --git a/spec/models/iterations/cadence_spec.rb b/spec/models/iterations/cadence_spec.rb new file mode 100644 index 00000000000..cdeeef97580 --- /dev/null +++ b/spec/models/iterations/cadence_spec.rb @@ -0,0 +1,22 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Iterations::Cadence do + describe 'associations' do + subject { build(:iterations_cadence) } + + it { is_expected.to belong_to(:group) } + it { is_expected.to have_many(:iterations).inverse_of(:iterations_cadence) } + end + + describe 'validations' do + subject { build(:iterations_cadence) } + + it { is_expected.to validate_presence_of(:title) } + it { is_expected.to validate_presence_of(:start_date) } + it { is_expected.to validate_presence_of(:group_id) } + it { is_expected.to validate_presence_of(:active) } + it { is_expected.to validate_presence_of(:automatic) } + end +end diff --git a/spec/models/list_spec.rb b/spec/models/list_spec.rb index f0b1bc33e84..922d6b01b82 100644 --- a/spec/models/list_spec.rb +++ b/spec/models/list_spec.rb @@ -17,6 +17,19 @@ RSpec.describe List do it { is_expected.to validate_presence_of(:list_type) } end + describe '.without_types' do + it 'exclude lists of given types' do + board = create(:list, list_type: :label).board + # closed list is created by default + backlog_list = create(:list, list_type: :backlog, board: board) + + exclude_type = [described_class.list_types[:label], described_class.list_types[:closed]] + + lists = described_class.without_types(exclude_type) + expect(lists.where(board: board)).to match_array([backlog_list]) + end + end + describe '#update_preferences_for' do let(:user) { create(:user) } let(:list) { create(:list) } diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb index ebe2cd2ac03..06b6ba2088f 100644 --- a/spec/models/merge_request_spec.rb +++ b/spec/models/merge_request_spec.rb @@ -9,8 +9,8 @@ RSpec.describe MergeRequest, factory_default: :keep do using RSpec::Parameterized::TableSyntax - let_it_be(:namespace) { create_default(:namespace) } - let_it_be(:project, refind: true) { create_default(:project, :repository) } + let_it_be(:namespace) { create_default(:namespace).freeze } + let_it_be(:project, refind: true) { create_default(:project, :repository).freeze } subject { create(:merge_request) } @@ -3082,32 +3082,83 @@ RSpec.describe MergeRequest, factory_default: :keep do end describe "#head_pipeline_active? " do - it do - is_expected - .to delegate_method(:active?) - .to(:head_pipeline) - .with_prefix - .with_arguments(allow_nil: true) + context 'when project lacks a head_pipeline relation' do + before do + subject.head_pipeline = nil + end + + it 'returns false' do + expect(subject.head_pipeline_active?).to be false + end + end + + context 'when project has a head_pipeline relation' do + let(:pipeline) { create(:ci_empty_pipeline) } + + before do + allow(subject).to receive(:head_pipeline) { pipeline } + end + + it 'accesses the value from the head_pipeline' do + expect(subject.head_pipeline) + .to receive(:active?) + + subject.head_pipeline_active? + end end end describe "#actual_head_pipeline_success? " do - it do - is_expected - .to delegate_method(:success?) - .to(:actual_head_pipeline) - .with_prefix - .with_arguments(allow_nil: true) + context 'when project lacks an actual_head_pipeline relation' do + before do + allow(subject).to receive(:actual_head_pipeline) { nil } + end + + it 'returns false' do + expect(subject.actual_head_pipeline_success?).to be false + end + end + + context 'when project has a actual_head_pipeline relation' do + let(:pipeline) { create(:ci_empty_pipeline) } + + before do + allow(subject).to receive(:actual_head_pipeline) { pipeline } + end + + it 'accesses the value from the actual_head_pipeline' do + expect(subject.actual_head_pipeline) + .to receive(:success?) + + subject.actual_head_pipeline_success? + end end end describe "#actual_head_pipeline_active? " do - it do - is_expected - .to delegate_method(:active?) - .to(:actual_head_pipeline) - .with_prefix - .with_arguments(allow_nil: true) + context 'when project lacks an actual_head_pipeline relation' do + before do + allow(subject).to receive(:actual_head_pipeline) { nil } + end + + it 'returns false' do + expect(subject.actual_head_pipeline_active?).to be false + end + end + + context 'when project has a actual_head_pipeline relation' do + let(:pipeline) { create(:ci_empty_pipeline) } + + before do + allow(subject).to receive(:actual_head_pipeline) { pipeline } + end + + it 'accesses the value from the actual_head_pipeline' do + expect(subject.actual_head_pipeline) + .to receive(:active?) + + subject.actual_head_pipeline_active? + end end end diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb index 647e279bf83..ed0b9063e32 100644 --- a/spec/models/namespace_spec.rb +++ b/spec/models/namespace_spec.rb @@ -285,6 +285,17 @@ RSpec.describe Namespace do end end + describe '.top_most' do + let_it_be(:namespace) { create(:namespace) } + let_it_be(:sub_namespace) { create(:namespace, parent: namespace) } + + subject { described_class.top_most.ids } + + it 'only contains root namespace' do + is_expected.to eq([namespace.id]) + end + end + describe '#ancestors_upto' do let(:parent) { create(:group) } let(:child) { create(:group, parent: parent) } @@ -1449,4 +1460,24 @@ RSpec.describe Namespace do end end end + + describe '#recent?' do + subject { namespace.recent? } + + context 'when created more than 90 days ago' do + before do + namespace.update_attribute(:created_at, 91.days.ago) + end + + it { is_expected.to be(false) } + end + + context 'when created less than 90 days ago' do + before do + namespace.update_attribute(:created_at, 89.days.ago) + end + + it { is_expected.to be(true) } + end + end end diff --git a/spec/models/notification_recipient_spec.rb b/spec/models/notification_recipient_spec.rb index 8429f577dc6..4debda0621c 100644 --- a/spec/models/notification_recipient_spec.rb +++ b/spec/models/notification_recipient_spec.rb @@ -337,6 +337,39 @@ RSpec.describe NotificationRecipient do expect(recipient.suitable_notification_level?).to eq true end end + + context 'with merge_when_pipeline_succeeds' do + let(:notification_setting) { user.notification_settings_for(project) } + let(:recipient) do + described_class.new( + user, + :watch, + custom_action: :merge_when_pipeline_succeeds, + target: target, + project: project + ) + end + + context 'custom event enabled' do + before do + notification_setting.update!(merge_when_pipeline_succeeds: true) + end + + it 'returns true' do + expect(recipient.suitable_notification_level?).to eq true + end + end + + context 'custom event disabled' do + before do + notification_setting.update!(merge_when_pipeline_succeeds: false) + end + + it 'returns false' do + expect(recipient.suitable_notification_level?).to eq false + end + end + end end end diff --git a/spec/models/notification_setting_spec.rb b/spec/models/notification_setting_spec.rb index bc50e2af373..4ef5ab7af48 100644 --- a/spec/models/notification_setting_spec.rb +++ b/spec/models/notification_setting_spec.rb @@ -180,7 +180,8 @@ RSpec.describe NotificationSetting do :failed_pipeline, :success_pipeline, :fixed_pipeline, - :moved_project + :moved_project, + :merge_when_pipeline_succeeds ) end diff --git a/spec/models/packages/package_spec.rb b/spec/models/packages/package_spec.rb index 6c55d37b95f..fc0bd34a331 100644 --- a/spec/models/packages/package_spec.rb +++ b/spec/models/packages/package_spec.rb @@ -162,6 +162,18 @@ RSpec.describe Packages::Package, type: :model do it { is_expected.not_to allow_value('../../../my_package').for(:name) } it { is_expected.not_to allow_value('%2e%2e%2fmy_package').for(:name) } end + + context 'npm package' do + subject { build_stubbed(:npm_package) } + + it { is_expected.to allow_value("@group-1/package").for(:name) } + it { is_expected.to allow_value("@any-scope/package").for(:name) } + it { is_expected.to allow_value("unscoped-package").for(:name) } + it { is_expected.not_to allow_value("@inv@lid-scope/package").for(:name) } + it { is_expected.not_to allow_value("@scope/../../package").for(:name) } + it { is_expected.not_to allow_value("@scope%2e%2e%fpackage").for(:name) } + it { is_expected.not_to allow_value("@scope/sub/package").for(:name) } + end end describe '#version' do @@ -342,16 +354,6 @@ RSpec.describe Packages::Package, type: :model do end describe '#package_already_taken' do - context 'npm package' do - let!(:package) { create(:npm_package) } - - it 'will not allow a package of the same name' do - new_package = build(:npm_package, project: create(:project), name: package.name) - - expect(new_package).not_to be_valid - end - end - context 'maven package' do let!(:package) { create(:maven_package) } @@ -511,7 +513,7 @@ RSpec.describe Packages::Package, type: :model do describe '.without_nuget_temporary_name' do let!(:package1) { create(:nuget_package) } - let!(:package2) { create(:nuget_package, name: Packages::Nuget::CreatePackageService::TEMPORARY_PACKAGE_NAME) } + let!(:package2) { create(:nuget_package, name: Packages::Nuget::TEMPORARY_PACKAGE_NAME) } subject { described_class.without_nuget_temporary_name } @@ -530,7 +532,7 @@ RSpec.describe Packages::Package, type: :model do it { is_expected.to match_array([package1, package2, package3]) } context 'with temporary packages' do - let!(:package1) { create(:nuget_package, name: Packages::Nuget::CreatePackageService::TEMPORARY_PACKAGE_NAME) } + let!(:package1) { create(:nuget_package, name: Packages::Nuget::TEMPORARY_PACKAGE_NAME) } it { is_expected.to match_array([package2, package3]) } end diff --git a/spec/models/project_services/jira_service_spec.rb b/spec/models/project_services/jira_service_spec.rb index 78bd0e91208..ef0bc930152 100644 --- a/spec/models/project_services/jira_service_spec.rb +++ b/spec/models/project_services/jira_service_spec.rb @@ -82,11 +82,8 @@ RSpec.describe JiraService do subject(:fields) { service.fields } - it 'includes transition help link' do - transition_id_field = fields.find { |field| field[:name] == 'jira_issue_transition_id' } - - expect(transition_id_field[:title]).to eq('Jira workflow transition IDs') - expect(transition_id_field[:help]).to include('/help/user/project/integrations/jira') + it 'returns custom fields' do + expect(fields.pluck(:name)).to eq(%w[url api_url username password]) end end @@ -460,10 +457,10 @@ RSpec.describe JiraService do end context 'with options' do - let(:issue_url) { "#{url}/rest/api/2/issue/#{issue_key}?expand=renderedFields" } + let(:issue_url) { "#{url}/rest/api/2/issue/#{issue_key}?expand=renderedFields,transitions" } it 'calls the Jira API with the options to get the issue' do - jira_service.find_issue(issue_key, rendered_fields: true) + jira_service.find_issue(issue_key, rendered_fields: true, transitions: true) expect(WebMock).to have_requested(:get, issue_url) end @@ -474,52 +471,56 @@ RSpec.describe JiraService do let(:custom_base_url) { 'http://custom_url' } shared_examples 'close_issue' do + let(:issue_key) { 'JIRA-123' } + let(:issue_url) { "#{url}/rest/api/2/issue/#{issue_key}" } + let(:transitions_url) { "#{issue_url}/transitions" } + let(:comment_url) { "#{issue_url}/comment" } + let(:remote_link_url) { "#{issue_url}/remotelink" } + let(:transitions) { nil } + + let(:issue_fields) do + { + id: issue_key, + self: issue_url, + transitions: transitions + } + end + + subject(:close_issue) do + jira_service.close_issue(resource, ExternalIssue.new(issue_key, project)) + end + before do - @jira_service = described_class.new - allow(@jira_service).to receive_messages( - project_id: project.id, - project: project, - url: 'http://jira.example.com', - username: 'gitlab_jira_username', - password: 'gitlab_jira_password', - jira_issue_transition_id: '999' - ) + allow(jira_service).to receive_messages(jira_issue_transition_id: '999') # These stubs are needed to test JiraService#close_issue. # We close the issue then do another request to API to check if it got closed. # Here is stubbed the API return with a closed and an opened issues. - open_issue = JIRA::Resource::Issue.new(@jira_service.client, attrs: { 'id' => 'JIRA-123' }) + open_issue = JIRA::Resource::Issue.new(jira_service.client, attrs: issue_fields.deep_stringify_keys) closed_issue = open_issue.dup allow(open_issue).to receive(:resolution).and_return(false) allow(closed_issue).to receive(:resolution).and_return(true) allow(JIRA::Resource::Issue).to receive(:find).and_return(open_issue, closed_issue) - allow_any_instance_of(JIRA::Resource::Issue).to receive(:key).and_return('JIRA-123') + allow_any_instance_of(JIRA::Resource::Issue).to receive(:key).and_return(issue_key) allow(JIRA::Resource::Remotelink).to receive(:all).and_return([]) - @jira_service.save! - - project_issues_url = 'http://jira.example.com/rest/api/2/issue/JIRA-123' - @transitions_url = 'http://jira.example.com/rest/api/2/issue/JIRA-123/transitions' - @comment_url = 'http://jira.example.com/rest/api/2/issue/JIRA-123/comment' - @remote_link_url = 'http://jira.example.com/rest/api/2/issue/JIRA-123/remotelink' - - WebMock.stub_request(:get, project_issues_url).with(basic_auth: %w(gitlab_jira_username gitlab_jira_password)) - WebMock.stub_request(:post, @transitions_url).with(basic_auth: %w(gitlab_jira_username gitlab_jira_password)) - WebMock.stub_request(:post, @comment_url).with(basic_auth: %w(gitlab_jira_username gitlab_jira_password)) - WebMock.stub_request(:post, @remote_link_url).with(basic_auth: %w(gitlab_jira_username gitlab_jira_password)) + WebMock.stub_request(:get, issue_url).with(basic_auth: %w(jira-username jira-password)) + WebMock.stub_request(:post, transitions_url).with(basic_auth: %w(jira-username jira-password)) + WebMock.stub_request(:post, comment_url).with(basic_auth: %w(jira-username jira-password)) + WebMock.stub_request(:post, remote_link_url).with(basic_auth: %w(jira-username jira-password)) end let(:external_issue) { ExternalIssue.new('JIRA-123', project) } def close_issue - @jira_service.close_issue(resource, external_issue, current_user) + jira_service.close_issue(resource, external_issue, current_user) end it 'calls Jira API' do close_issue - expect(WebMock).to have_requested(:post, @comment_url).with( + expect(WebMock).to have_requested(:post, comment_url).with( body: /Issue solved with/ ).once end @@ -546,9 +547,9 @@ RSpec.describe JiraService do favicon_path = "http://localhost/assets/#{find_asset('favicon.png').digest_path}" # Creates comment - expect(WebMock).to have_requested(:post, @comment_url) + expect(WebMock).to have_requested(:post, comment_url) # Creates Remote Link in Jira issue fields - expect(WebMock).to have_requested(:post, @remote_link_url).with( + expect(WebMock).to have_requested(:post, remote_link_url).with( body: hash_including( GlobalID: 'GitLab', relationship: 'mentioned on', @@ -564,11 +565,11 @@ RSpec.describe JiraService do context 'when "comment_on_event_enabled" is set to false' do it 'creates Remote Link reference but does not create comment' do - allow(@jira_service).to receive_messages(comment_on_event_enabled: false) + allow(jira_service).to receive_messages(comment_on_event_enabled: false) close_issue - expect(WebMock).not_to have_requested(:post, @comment_url) - expect(WebMock).to have_requested(:post, @remote_link_url) + expect(WebMock).not_to have_requested(:post, comment_url) + expect(WebMock).to have_requested(:post, remote_link_url) end end @@ -589,7 +590,7 @@ RSpec.describe JiraService do close_issue - expect(WebMock).not_to have_requested(:post, @comment_url) + expect(WebMock).not_to have_requested(:post, comment_url) end end @@ -598,8 +599,8 @@ RSpec.describe JiraService do close_issue - expect(WebMock).not_to have_requested(:post, @comment_url) - expect(WebMock).not_to have_requested(:post, @remote_link_url) + expect(WebMock).not_to have_requested(:post, comment_url) + expect(WebMock).not_to have_requested(:post, remote_link_url) end it 'does not send comment or remote links to issues with unknown resolution' do @@ -607,8 +608,8 @@ RSpec.describe JiraService do close_issue - expect(WebMock).not_to have_requested(:post, @comment_url) - expect(WebMock).not_to have_requested(:post, @remote_link_url) + expect(WebMock).not_to have_requested(:post, comment_url) + expect(WebMock).not_to have_requested(:post, remote_link_url) end it 'references the GitLab commit' do @@ -616,7 +617,7 @@ RSpec.describe JiraService do close_issue - expect(WebMock).to have_requested(:post, @comment_url).with( + expect(WebMock).to have_requested(:post, comment_url).with( body: %r{#{custom_base_url}/#{project.full_path}/-/commit/#{commit_id}} ).once end @@ -631,18 +632,18 @@ RSpec.describe JiraService do close_issue - expect(WebMock).to have_requested(:post, @comment_url).with( + expect(WebMock).to have_requested(:post, comment_url).with( body: %r{#{Gitlab.config.gitlab.url}/#{project.full_path}/-/commit/#{commit_id}} ).once end it 'logs exception when transition id is not valid' do - allow(@jira_service).to receive(:log_error) - WebMock.stub_request(:post, @transitions_url).with(basic_auth: %w(gitlab_jira_username gitlab_jira_password)).and_raise("Bad Request") + allow(jira_service).to receive(:log_error) + WebMock.stub_request(:post, transitions_url).with(basic_auth: %w(jira-username jira-password)).and_raise("Bad Request") close_issue - expect(@jira_service).to have_received(:log_error).with( + expect(jira_service).to have_received(:log_error).with( "Issue transition failed", error: hash_including( exception_class: 'StandardError', @@ -655,34 +656,107 @@ RSpec.describe JiraService do it 'calls the api with jira_issue_transition_id' do close_issue - expect(WebMock).to have_requested(:post, @transitions_url).with( - body: /999/ + expect(WebMock).to have_requested(:post, transitions_url).with( + body: /"id":"999"/ ).once end - context 'when have multiple transition ids' do - it 'calls the api with transition ids separated by comma' do - allow(@jira_service).to receive_messages(jira_issue_transition_id: '1,2,3') + context 'when using automatic issue transitions' do + let(:transitions) do + [ + { id: '1' }, + { id: '2', to: { statusCategory: { key: 'new' } } }, + { id: '3', to: { statusCategory: { key: 'done' } } }, + { id: '4', to: { statusCategory: { key: 'done' } } } + ] + end + + before do + allow(jira_service).to receive_messages(jira_issue_transition_id: '') close_issue + end + + it 'uses the next transition with a status category of done' do + expect(WebMock).to have_requested(:post, transitions_url).with( + body: /"id":"3"/ + ).once + end + + context 'when no done transition is available' do + let(:transitions) do + [ + { id: '1', to: { statusCategory: { key: 'new' } } } + ] + end + + it 'does not attempt to transition' do + expect(WebMock).not_to have_requested(:post, transitions_url) + end + end + + context 'when no valid transitions are returned' do + let(:transitions) { 'foo' } + + it 'does not attempt to transition' do + expect(WebMock).not_to have_requested(:post, transitions_url) + end + end + end + + context 'when using multiple transition ids' do + before do + allow(jira_service).to receive_messages(jira_issue_transition_id: '1,2,3') + end + + it 'calls the api with transition ids separated by comma' do + close_issue 1.upto(3) do |transition_id| - expect(WebMock).to have_requested(:post, @transitions_url).with( - body: /#{transition_id}/ + expect(WebMock).to have_requested(:post, transitions_url).with( + body: /"id":"#{transition_id}"/ ).once end + + expect(WebMock).to have_requested(:post, comment_url) end it 'calls the api with transition ids separated by semicolon' do - allow(@jira_service).to receive_messages(jira_issue_transition_id: '1;2;3') + allow(jira_service).to receive_messages(jira_issue_transition_id: '1;2;3') close_issue 1.upto(3) do |transition_id| - expect(WebMock).to have_requested(:post, @transitions_url).with( - body: /#{transition_id}/ + expect(WebMock).to have_requested(:post, transitions_url).with( + body: /"id":"#{transition_id}"/ ).once end + + expect(WebMock).to have_requested(:post, comment_url) + end + + context 'when a transition fails' do + before do + WebMock.stub_request(:post, transitions_url).with(basic_auth: %w(jira-username jira-password)).to_return do |request| + { status: request.body.include?('"id":"2"') ? 500 : 200 } + end + end + + it 'stops the sequence' do + close_issue + + 1.upto(2) do |transition_id| + expect(WebMock).to have_requested(:post, transitions_url).with( + body: /"id":"#{transition_id}"/ + ) + end + + expect(WebMock).not_to have_requested(:post, transitions_url).with( + body: /"id":"3"/ + ) + + expect(WebMock).not_to have_requested(:post, comment_url) + end end end end diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb index fd7975bf65d..c33c6c05c24 100644 --- a/spec/models/project_spec.rb +++ b/spec/models/project_spec.rb @@ -8,7 +8,7 @@ RSpec.describe Project, factory_default: :keep do include ExternalAuthorizationServiceHelpers using RSpec::Parameterized::TableSyntax - let_it_be(:namespace) { create_default(:namespace) } + let_it_be(:namespace) { create_default(:namespace).freeze } it_behaves_like 'having unique enum values' @@ -1799,7 +1799,8 @@ RSpec.describe Project, factory_default: :keep do describe '#default_branch_protected?' do using RSpec::Parameterized::TableSyntax - let_it_be(:project) { create(:project) } + let_it_be(:namespace) { create(:namespace) } + let_it_be(:project) { create(:project, namespace: namespace) } subject { project.default_branch_protected? } @@ -2802,7 +2803,8 @@ RSpec.describe Project, factory_default: :keep do end describe '#emails_disabled?' do - let(:project) { build(:project, emails_disabled: false) } + let_it_be(:namespace) { create(:namespace) } + let(:project) { build(:project, namespace: namespace, emails_disabled: false) } context 'emails disabled in group' do it 'returns true' do @@ -2830,7 +2832,8 @@ RSpec.describe Project, factory_default: :keep do end describe '#lfs_enabled?' do - let(:project) { build(:project) } + let(:namespace) { create(:namespace) } + let(:project) { build(:project, namespace: namespace) } shared_examples 'project overrides group' do it 'returns true when enabled in project' do @@ -4877,7 +4880,8 @@ RSpec.describe Project, factory_default: :keep do end context 'branch protection' do - let(:project) { create(:project, :repository) } + let_it_be(:namespace) { create(:namespace) } + let(:project) { create(:project, :repository, namespace: namespace) } before do create(:import_state, :started, project: project) diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb index 3a4de7ba279..84347ec2a51 100644 --- a/spec/models/repository_spec.rb +++ b/spec/models/repository_spec.rb @@ -1949,8 +1949,8 @@ RSpec.describe Repository do :root_ref, :merged_branch_names, :has_visible_content?, - :issue_template_names_by_category, - :merge_request_template_names_by_category, + :issue_template_names_hash, + :merge_request_template_names_hash, :user_defined_metrics_dashboard_paths, :xcode_project?, :has_ambiguous_refs? diff --git a/spec/models/snippet_repository_spec.rb b/spec/models/snippet_repository_spec.rb index cdbc1feefce..11196f06529 100644 --- a/spec/models/snippet_repository_spec.rb +++ b/spec/models/snippet_repository_spec.rb @@ -286,6 +286,7 @@ RSpec.describe SnippetRepository do context 'with git errors' do it_behaves_like 'snippet repository with git errors', 'invalid://path/here', described_class::InvalidPathError + it_behaves_like 'snippet repository with git errors', '.git/hooks/pre-commit', described_class::InvalidPathError it_behaves_like 'snippet repository with git errors', '../../path/traversal/here', described_class::InvalidPathError it_behaves_like 'snippet repository with git errors', 'README', described_class::CommitError diff --git a/spec/models/snippet_spec.rb b/spec/models/snippet_spec.rb index 623767d19e0..c194a98fdd8 100644 --- a/spec/models/snippet_spec.rb +++ b/spec/models/snippet_spec.rb @@ -496,6 +496,16 @@ RSpec.describe Snippet do it 'returns array of blobs' do expect(snippet.blobs).to all(be_a(Blob)) end + + context 'when file does not exist' do + it 'removes nil values from the blobs array' do + allow(snippet).to receive(:list_files).and_return(%w(LICENSE non_existent_snippet_file)) + + blobs = snippet.blobs + expect(blobs.count).to eq 1 + expect(blobs.first.name).to eq 'LICENSE' + end + end end end diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb index 860c015e166..084cfb55de0 100644 --- a/spec/models/user_spec.rb +++ b/spec/models/user_spec.rb @@ -101,6 +101,7 @@ RSpec.describe User do it { is_expected.to have_many(:reviews).inverse_of(:author) } it { is_expected.to have_many(:merge_request_assignees).inverse_of(:assignee) } it { is_expected.to have_many(:merge_request_reviewers).inverse_of(:reviewer) } + it { is_expected.to have_many(:created_custom_emoji).inverse_of(:creator) } describe "#user_detail" do it 'does not persist `user_detail` by default' do diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb index 6ba3ab6aace..cf5a606bd8e 100644 --- a/spec/policies/project_policy_spec.rb +++ b/spec/policies/project_policy_spec.rb @@ -1263,4 +1263,90 @@ RSpec.describe ProjectPolicy do end end end + + describe 'access_security_and_compliance' do + context 'when the "Security & Compliance" is enabled' do + before do + project.project_feature.update!(security_and_compliance_access_level: Featurable::PRIVATE) + end + + %w[owner maintainer developer].each do |role| + context "when the role is #{role}" do + let(:current_user) { public_send(role) } + + it { is_expected.to be_allowed(:access_security_and_compliance) } + end + end + + context 'with admin' do + let(:current_user) { admin } + + context 'when admin mode enabled', :enable_admin_mode do + it { is_expected.to be_allowed(:access_security_and_compliance) } + end + + context 'when admin mode disabled' do + it { is_expected.to be_disallowed(:access_security_and_compliance) } + end + end + + %w[reporter guest].each do |role| + context "when the role is #{role}" do + let(:current_user) { public_send(role) } + + it { is_expected.to be_disallowed(:access_security_and_compliance) } + end + end + + context 'with non member' do + let(:current_user) { non_member } + + it { is_expected.to be_disallowed(:access_security_and_compliance) } + end + + context 'with anonymous' do + let(:current_user) { anonymous } + + it { is_expected.to be_disallowed(:access_security_and_compliance) } + end + end + + context 'when the "Security & Compliance" is not enabled' do + before do + project.project_feature.update!(security_and_compliance_access_level: Featurable::DISABLED) + end + + %w[owner maintainer developer reporter guest].each do |role| + context "when the role is #{role}" do + let(:current_user) { public_send(role) } + + it { is_expected.to be_disallowed(:access_security_and_compliance) } + end + end + + context 'with admin' do + let(:current_user) { admin } + + context 'when admin mode enabled', :enable_admin_mode do + it { is_expected.to be_disallowed(:access_security_and_compliance) } + end + + context 'when admin mode disabled' do + it { is_expected.to be_disallowed(:access_security_and_compliance) } + end + end + + context 'with non member' do + let(:current_user) { non_member } + + it { is_expected.to be_disallowed(:access_security_and_compliance) } + end + + context 'with anonymous' do + let(:current_user) { anonymous } + + it { is_expected.to be_disallowed(:access_security_and_compliance) } + end + end + end end diff --git a/spec/presenters/packages/composer/packages_presenter_spec.rb b/spec/presenters/packages/composer/packages_presenter_spec.rb index 19d99a62468..0c2631f2b01 100644 --- a/spec/presenters/packages/composer/packages_presenter_spec.rb +++ b/spec/presenters/packages/composer/packages_presenter_spec.rb @@ -67,10 +67,14 @@ RSpec.describe ::Packages::Composer::PackagesPresenter do { 'packages' => [], 'provider-includes' => { 'p/%hash%.json' => { 'sha256' => /^\h+$/ } }, - 'providers-url' => "/api/v4/group/#{group.id}/-/packages/composer/%package%$%hash%.json" + 'providers-url' => "prefix/api/v4/group/#{group.id}/-/packages/composer/%package%$%hash%.json" } end + before do + stub_config(gitlab: { relative_url_root: 'prefix' }) + end + it 'returns the provider json' do expect(subject).to match(expected_json) end diff --git a/spec/presenters/project_presenter_spec.rb b/spec/presenters/project_presenter_spec.rb index 98bcbd8384b..f78aa61529a 100644 --- a/spec/presenters/project_presenter_spec.rb +++ b/spec/presenters/project_presenter_spec.rb @@ -183,6 +183,14 @@ RSpec.describe ProjectPresenter do context 'not empty repo' do let(:project) { create(:project, :repository) } + context 'if no current user' do + let(:user) { nil } + + it 'returns false' do + expect(presenter.can_current_user_push_code?).to be(false) + end + end + it 'returns true if user can push to default branch' do project.add_developer(user) @@ -350,7 +358,7 @@ RSpec.describe ProjectPresenter do is_link: false, label: a_string_including("New file"), link: presenter.project_new_blob_path(project, 'master'), - class_modifier: 'dashed' + class_modifier: 'btn-dashed' ) end @@ -597,10 +605,12 @@ RSpec.describe ProjectPresenter do context 'for a developer' do before do project.add_developer(user) + stub_experiments(empty_repo_upload: :candidate) end it 'orders the items correctly' do expect(empty_repo_statistics_buttons.map(&:label)).to start_with( + a_string_including('Upload'), a_string_including('New'), a_string_including('README'), a_string_including('LICENSE'), @@ -609,6 +619,16 @@ RSpec.describe ProjectPresenter do a_string_including('CI/CD') ) end + + context 'when not in the upload experiment' do + before do + stub_experiments(empty_repo_upload: :control) + end + + it 'does not include upload button' do + expect(empty_repo_statistics_buttons.map(&:label)).not_to start_with(a_string_including('Upload')) + end + end end end @@ -694,4 +714,20 @@ RSpec.describe ProjectPresenter do end end end + + describe 'empty_repo_upload_experiment?' do + subject { presenter.empty_repo_upload_experiment? } + + it 'returns false when upload_anchor_data is nil' do + allow(presenter).to receive(:upload_anchor_data).and_return(nil) + + expect(subject).to be false + end + + it 'returns true when upload_anchor_data exists' do + allow(presenter).to receive(:upload_anchor_data).and_return(true) + + expect(subject).to be true + end + end end diff --git a/spec/presenters/snippet_presenter_spec.rb b/spec/presenters/snippet_presenter_spec.rb index a1d987ed78f..b0387206bd9 100644 --- a/spec/presenters/snippet_presenter_spec.rb +++ b/spec/presenters/snippet_presenter_spec.rb @@ -159,7 +159,7 @@ RSpec.describe SnippetPresenter do let(:snippet) { create(:snippet, :repository, author: user) } it 'returns repository first blob' do - expect(subject).to eq snippet.blobs.first + expect(subject.name).to eq snippet.blobs.first.name end end end diff --git a/spec/requests/api/ci/runner/jobs_request_post_spec.rb b/spec/requests/api/ci/runner/jobs_request_post_spec.rb index 74d8e3f7ae8..3cc27d0e1eb 100644 --- a/spec/requests/api/ci/runner/jobs_request_post_spec.rb +++ b/spec/requests/api/ci/runner/jobs_request_post_spec.rb @@ -198,7 +198,12 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do 'when' => 'on_success' }] end - let(:expected_features) { { 'trace_sections' => true } } + let(:expected_features) do + { + 'trace_sections' => true, + 'failure_reasons' => include('script_failure') + } + end it 'picks a job' do request_job info: { platform: :darwin } @@ -220,7 +225,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do expect(json_response['artifacts']).to eq(expected_artifacts) expect(json_response['cache']).to eq(expected_cache) expect(json_response['variables']).to include(*expected_variables) - expect(json_response['features']).to eq(expected_features) + expect(json_response['features']).to match(expected_features) end it 'creates persistent ref' do diff --git a/spec/requests/api/graphql/instance_statistics_measurements_spec.rb b/spec/requests/api/graphql/usage_trends_measurements_spec.rb index eb73dc59253..69a3ed7e09c 100644 --- a/spec/requests/api/graphql/instance_statistics_measurements_spec.rb +++ b/spec/requests/api/graphql/usage_trends_measurements_spec.rb @@ -2,22 +2,22 @@ require 'spec_helper' -RSpec.describe 'InstanceStatisticsMeasurements' do +RSpec.describe 'UsageTrendsMeasurements' do include GraphqlHelpers let(:current_user) { create(:user, :admin) } - let!(:instance_statistics_measurement_1) { create(:instance_statistics_measurement, :project_count, recorded_at: 20.days.ago, count: 5) } - let!(:instance_statistics_measurement_2) { create(:instance_statistics_measurement, :project_count, recorded_at: 10.days.ago, count: 10) } + let!(:usage_trends_measurement_1) { create(:usage_trends_measurement, :project_count, recorded_at: 20.days.ago, count: 5) } + let!(:usage_trends_measurement_2) { create(:usage_trends_measurement, :project_count, recorded_at: 10.days.ago, count: 10) } let(:arguments) { 'identifier: PROJECTS' } - let(:query) { graphql_query_for(:instanceStatisticsMeasurements, arguments, 'nodes { count identifier }') } + let(:query) { graphql_query_for(:UsageTrendsMeasurements, arguments, 'nodes { count identifier }') } before do post_graphql(query, current_user: current_user) end it 'returns measurement objects' do - expect(graphql_data.dig('instanceStatisticsMeasurements', 'nodes')).to eq([ + expect(graphql_data.dig('usageTrendsMeasurements', 'nodes')).to eq([ { "count" => 10, 'identifier' => 'PROJECTS' }, { "count" => 5, 'identifier' => 'PROJECTS' } ]) @@ -27,7 +27,7 @@ RSpec.describe 'InstanceStatisticsMeasurements' do let(:arguments) { %(identifier: PROJECTS, recordedAfter: "#{15.days.ago.to_date}", recordedBefore: "#{5.days.ago.to_date}") } it 'returns filtered measurement objects' do - expect(graphql_data.dig('instanceStatisticsMeasurements', 'nodes')).to eq([ + expect(graphql_data.dig('usageTrendsMeasurements', 'nodes')).to eq([ { "count" => 10, 'identifier' => 'PROJECTS' } ]) end diff --git a/spec/requests/api/lint_spec.rb b/spec/requests/api/lint_spec.rb index 2316e702c3e..b5bf697e9e3 100644 --- a/spec/requests/api/lint_spec.rb +++ b/spec/requests/api/lint_spec.rb @@ -5,7 +5,9 @@ require 'spec_helper' RSpec.describe API::Lint do describe 'POST /ci/lint' do context 'when signup settings are disabled' do - Gitlab::CurrentSettings.signup_enabled = false + before do + Gitlab::CurrentSettings.signup_enabled = false + end context 'when unauthenticated' do it 'returns authentication error' do @@ -16,22 +18,25 @@ RSpec.describe API::Lint do end context 'when authenticated' do - it 'returns unauthorized error' do - post api('/ci/lint'), params: { content: 'content' } + let_it_be(:api_user) { create(:user) } + it 'returns authorized' do + post api('/ci/lint', api_user), params: { content: 'content' } - expect(response).to have_gitlab_http_status(:unauthorized) + expect(response).to have_gitlab_http_status(:ok) end end end context 'when signup settings are enabled' do - Gitlab::CurrentSettings.signup_enabled = true + before do + Gitlab::CurrentSettings.signup_enabled = true + end context 'when unauthenticated' do - it 'returns authentication error' do + it 'returns authorized success' do post api('/ci/lint'), params: { content: 'content' } - expect(response).to have_gitlab_http_status(:unauthorized) + expect(response).to have_gitlab_http_status(:ok) end end diff --git a/spec/requests/api/npm_instance_packages_spec.rb b/spec/requests/api/npm_instance_packages_spec.rb index 70c76067a6e..698885ddcf4 100644 --- a/spec/requests/api/npm_instance_packages_spec.rb +++ b/spec/requests/api/npm_instance_packages_spec.rb @@ -3,6 +3,11 @@ require 'spec_helper' RSpec.describe API::NpmInstancePackages do + # We need to create a subgroup with the same name as the hosting group. + # It has to be created first to exhibit this bug: https://gitlab.com/gitlab-org/gitlab/-/issues/321958 + let_it_be(:another_namespace) { create(:group, :public) } + let_it_be(:similarly_named_group) { create(:group, :public, parent: another_namespace, name: 'test-group') } + include_context 'npm api setup' describe 'GET /api/v4/packages/npm/*package_name' do diff --git a/spec/requests/api/npm_project_packages_spec.rb b/spec/requests/api/npm_project_packages_spec.rb index 7ea238c0607..e64b5ddc374 100644 --- a/spec/requests/api/npm_project_packages_spec.rb +++ b/spec/requests/api/npm_project_packages_spec.rb @@ -30,7 +30,7 @@ RSpec.describe API::NpmProjectPackages do end describe 'GET /api/v4/projects/:id/packages/npm/*package_name/-/*file_name' do - let_it_be(:package_file) { package.package_files.first } + let(:package_file) { package.package_files.first } let(:headers) { {} } let(:url) { api("/projects/#{project.id}/packages/npm/#{package.name}/-/#{package_file.file_name}") } @@ -127,24 +127,6 @@ RSpec.describe API::NpmProjectPackages do context 'when params are correct' do context 'invalid package record' do - context 'unscoped package' do - let(:package_name) { 'my_unscoped_package' } - let(:params) { upload_params(package_name: package_name) } - - it_behaves_like 'handling invalid record with 400 error' - - context 'with empty versions' do - let(:params) { upload_params(package_name: package_name).merge!(versions: {}) } - - it 'throws a 400 error' do - expect { upload_package_with_token(package_name, params) } - .not_to change { project.packages.count } - - expect(response).to have_gitlab_http_status(:bad_request) - end - end - end - context 'invalid package name' do let(:package_name) { "@#{group.path}/my_inv@@lid_package_name" } let(:params) { upload_params(package_name: package_name) } @@ -175,52 +157,71 @@ RSpec.describe API::NpmProjectPackages do end end - context 'scoped package' do - let(:package_name) { "@#{group.path}/my_package_name" } + context 'valid package record' do let(:params) { upload_params(package_name: package_name) } - context 'with access token' do - subject { upload_package_with_token(package_name, params) } + shared_examples 'handling upload with different authentications' do + context 'with access token' do + subject { upload_package_with_token(package_name, params) } + + it_behaves_like 'a package tracking event', 'API::NpmPackages', 'push_package' + + it 'creates npm package with file' do + expect { subject } + .to change { project.packages.count }.by(1) + .and change { Packages::PackageFile.count }.by(1) + .and change { Packages::Tag.count }.by(1) - it_behaves_like 'a package tracking event', 'API::NpmPackages', 'push_package' + expect(response).to have_gitlab_http_status(:ok) + end + end - it 'creates npm package with file' do - expect { subject } + it 'creates npm package with file with job token' do + expect { upload_package_with_job_token(package_name, params) } .to change { project.packages.count }.by(1) .and change { Packages::PackageFile.count }.by(1) - .and change { Packages::Tag.count }.by(1) expect(response).to have_gitlab_http_status(:ok) end - end - it 'creates npm package with file with job token' do - expect { upload_package_with_job_token(package_name, params) } - .to change { project.packages.count }.by(1) - .and change { Packages::PackageFile.count }.by(1) + context 'with an authenticated job token' do + let!(:job) { create(:ci_build, user: user) } - expect(response).to have_gitlab_http_status(:ok) - end + before do + Grape::Endpoint.before_each do |endpoint| + expect(endpoint).to receive(:current_authenticated_job) { job } + end + end - context 'with an authenticated job token' do - let!(:job) { create(:ci_build, user: user) } + after do + Grape::Endpoint.before_each nil + end - before do - Grape::Endpoint.before_each do |endpoint| - expect(endpoint).to receive(:current_authenticated_job) { job } + it 'creates the package metadata' do + upload_package_with_token(package_name, params) + + expect(response).to have_gitlab_http_status(:ok) + expect(project.reload.packages.find(json_response['id']).original_build_info.pipeline).to eq job.pipeline end end + end - after do - Grape::Endpoint.before_each nil - end + context 'with a scoped name' do + let(:package_name) { "@#{group.path}/my_package_name" } - it 'creates the package metadata' do - upload_package_with_token(package_name, params) + it_behaves_like 'handling upload with different authentications' + end - expect(response).to have_gitlab_http_status(:ok) - expect(project.reload.packages.find(json_response['id']).original_build_info.pipeline).to eq job.pipeline - end + context 'with any scoped name' do + let(:package_name) { "@any_scope/my_package_name" } + + it_behaves_like 'handling upload with different authentications' + end + + context 'with an unscoped name' do + let(:package_name) { "my_unscoped_package_name" } + + it_behaves_like 'handling upload with different authentications' end end diff --git a/spec/requests/api/rubygem_packages_spec.rb b/spec/requests/api/rubygem_packages_spec.rb index 5dd68bf9b10..c97072ee9cd 100644 --- a/spec/requests/api/rubygem_packages_spec.rb +++ b/spec/requests/api/rubygem_packages_spec.rb @@ -3,9 +3,11 @@ require 'spec_helper' RSpec.describe API::RubygemPackages do + include PackagesManagerApiSpecHelpers + include WorkhorseHelpers using RSpec::Parameterized::TableSyntax - let_it_be(:project) { create(:project) } + let_it_be_with_reload(:project) { create(:project) } let_it_be(:personal_access_token) { create(:personal_access_token) } let_it_be(:user) { personal_access_token.user } let_it_be(:job) { create(:ci_build, :running, user: user) } @@ -13,6 +15,14 @@ RSpec.describe API::RubygemPackages do let_it_be(:project_deploy_token) { create(:project_deploy_token, deploy_token: deploy_token, project: project) } let_it_be(:headers) { {} } + let(:tokens) do + { + personal_access_token: personal_access_token.token, + deploy_token: deploy_token.token, + job_token: job.token + } + end + shared_examples 'when feature flag is disabled' do let(:headers) do { 'HTTP_AUTHORIZATION' => personal_access_token.token } @@ -42,14 +52,6 @@ RSpec.describe API::RubygemPackages do { 'HTTP_AUTHORIZATION' => token } end - let(:tokens) do - { - personal_access_token: personal_access_token.token, - deploy_token: deploy_token.token, - job_token: job.token - } - end - where(:user_role, :token_type, :valid_token, :status) do :guest | :personal_access_token | true | :not_found :guest | :personal_access_token | false | :unauthorized @@ -114,19 +116,163 @@ RSpec.describe API::RubygemPackages do end describe 'POST /api/v4/projects/:project_id/packages/rubygems/api/v1/gems/authorize' do + include_context 'workhorse headers' + let(:url) { api("/projects/#{project.id}/packages/rubygems/api/v1/gems/authorize") } + let(:headers) { {} } subject { post(url, headers: headers) } - it_behaves_like 'an unimplemented route' + context 'with valid project' do + where(:visibility, :user_role, :member, :token_type, :valid_token, :shared_examples_name, :expected_status) do + :public | :developer | true | :personal_access_token | true | 'process rubygems workhorse authorization' | :success + :public | :guest | true | :personal_access_token | true | 'rejects rubygems packages access' | :forbidden + :public | :developer | true | :personal_access_token | false | 'rejects rubygems packages access' | :unauthorized + :public | :guest | true | :personal_access_token | false | 'rejects rubygems packages access' | :unauthorized + :public | :developer | false | :personal_access_token | true | 'rejects rubygems packages access' | :forbidden + :public | :guest | false | :personal_access_token | true | 'rejects rubygems packages access' | :forbidden + :public | :developer | false | :personal_access_token | false | 'rejects rubygems packages access' | :unauthorized + :public | :guest | false | :personal_access_token | false | 'rejects rubygems packages access' | :unauthorized + :public | :anonymous | false | :personal_access_token | true | 'rejects rubygems packages access' | :unauthorized + :private | :developer | true | :personal_access_token | true | 'process rubygems workhorse authorization' | :success + :private | :guest | true | :personal_access_token | true | 'rejects rubygems packages access' | :forbidden + :private | :developer | true | :personal_access_token | false | 'rejects rubygems packages access' | :unauthorized + :private | :guest | true | :personal_access_token | false | 'rejects rubygems packages access' | :unauthorized + :private | :developer | false | :personal_access_token | true | 'rejects rubygems packages access' | :not_found + :private | :guest | false | :personal_access_token | true | 'rejects rubygems packages access' | :not_found + :private | :developer | false | :personal_access_token | false | 'rejects rubygems packages access' | :unauthorized + :private | :guest | false | :personal_access_token | false | 'rejects rubygems packages access' | :unauthorized + :private | :anonymous | false | :personal_access_token | true | 'rejects rubygems packages access' | :unauthorized + :public | :developer | true | :job_token | true | 'process rubygems workhorse authorization' | :success + :public | :guest | true | :job_token | true | 'rejects rubygems packages access' | :forbidden + :public | :developer | true | :job_token | false | 'rejects rubygems packages access' | :unauthorized + :public | :guest | true | :job_token | false | 'rejects rubygems packages access' | :unauthorized + :public | :developer | false | :job_token | true | 'rejects rubygems packages access' | :forbidden + :public | :guest | false | :job_token | true | 'rejects rubygems packages access' | :forbidden + :public | :developer | false | :job_token | false | 'rejects rubygems packages access' | :unauthorized + :public | :guest | false | :job_token | false | 'rejects rubygems packages access' | :unauthorized + :private | :developer | true | :job_token | true | 'process rubygems workhorse authorization' | :success + :private | :guest | true | :job_token | true | 'rejects rubygems packages access' | :forbidden + :private | :developer | true | :job_token | false | 'rejects rubygems packages access' | :unauthorized + :private | :guest | true | :job_token | false | 'rejects rubygems packages access' | :unauthorized + :private | :developer | false | :job_token | true | 'rejects rubygems packages access' | :not_found + :private | :guest | false | :job_token | true | 'rejects rubygems packages access' | :not_found + :private | :developer | false | :job_token | false | 'rejects rubygems packages access' | :unauthorized + :private | :guest | false | :job_token | false | 'rejects rubygems packages access' | :unauthorized + :public | :developer | true | :deploy_token | true | 'process rubygems workhorse authorization' | :success + :public | :developer | true | :deploy_token | false | 'rejects rubygems packages access' | :unauthorized + :private | :developer | true | :deploy_token | true | 'process rubygems workhorse authorization' | :success + :private | :developer | true | :deploy_token | false | 'rejects rubygems packages access' | :unauthorized + end + + with_them do + let(:token) { valid_token ? tokens[token_type] : 'invalid-token123' } + let(:user_headers) { user_role == :anonymous ? {} : { 'HTTP_AUTHORIZATION' => token } } + let(:headers) { user_headers.merge(workhorse_headers) } + + before do + project.update!(visibility: visibility.to_s) + end + + it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member] + end + end end describe 'POST /api/v4/projects/:project_id/packages/rubygems/api/v1/gems' do - let(:url) { api("/projects/#{project.id}/packages/rubygems/api/v1/gems") } + include_context 'workhorse headers' + + let(:url) { "/projects/#{project.id}/packages/rubygems/api/v1/gems" } + + let_it_be(:file_name) { 'package.gem' } + let(:headers) { {} } + let(:params) { { file: temp_file(file_name) } } + let(:file_key) { :file } + let(:send_rewritten_field) { true } + + subject do + workhorse_finalize( + api(url), + method: :post, + file_key: file_key, + params: params, + headers: headers, + send_rewritten_field: send_rewritten_field + ) + end - subject { post(url, headers: headers) } + context 'with valid project' do + where(:visibility, :user_role, :member, :token_type, :valid_token, :shared_examples_name, :expected_status) do + :public | :developer | true | :personal_access_token | true | 'process rubygems upload' | :created + :public | :guest | true | :personal_access_token | true | 'rejects rubygems packages access' | :forbidden + :public | :developer | true | :personal_access_token | false | 'rejects rubygems packages access' | :unauthorized + :public | :guest | true | :personal_access_token | false | 'rejects rubygems packages access' | :unauthorized + :public | :developer | false | :personal_access_token | true | 'rejects rubygems packages access' | :forbidden + :public | :guest | false | :personal_access_token | true | 'rejects rubygems packages access' | :forbidden + :public | :developer | false | :personal_access_token | false | 'rejects rubygems packages access' | :unauthorized + :public | :guest | false | :personal_access_token | false | 'rejects rubygems packages access' | :unauthorized + :public | :anonymous | false | :personal_access_token | true | 'rejects rubygems packages access' | :unauthorized + :private | :developer | true | :personal_access_token | true | 'process rubygems upload' | :created + :private | :guest | true | :personal_access_token | true | 'rejects rubygems packages access' | :forbidden + :private | :developer | true | :personal_access_token | false | 'rejects rubygems packages access' | :unauthorized + :private | :guest | true | :personal_access_token | false | 'rejects rubygems packages access' | :unauthorized + :private | :developer | false | :personal_access_token | true | 'rejects rubygems packages access' | :not_found + :private | :guest | false | :personal_access_token | true | 'rejects rubygems packages access' | :not_found + :private | :developer | false | :personal_access_token | false | 'rejects rubygems packages access' | :unauthorized + :private | :guest | false | :personal_access_token | false | 'rejects rubygems packages access' | :unauthorized + :private | :anonymous | false | :personal_access_token | true | 'rejects rubygems packages access' | :unauthorized + :public | :developer | true | :job_token | true | 'process rubygems upload' | :created + :public | :guest | true | :job_token | true | 'rejects rubygems packages access' | :forbidden + :public | :developer | true | :job_token | false | 'rejects rubygems packages access' | :unauthorized + :public | :guest | true | :job_token | false | 'rejects rubygems packages access' | :unauthorized + :public | :developer | false | :job_token | true | 'rejects rubygems packages access' | :forbidden + :public | :guest | false | :job_token | true | 'rejects rubygems packages access' | :forbidden + :public | :developer | false | :job_token | false | 'rejects rubygems packages access' | :unauthorized + :public | :guest | false | :job_token | false | 'rejects rubygems packages access' | :unauthorized + :private | :developer | true | :job_token | true | 'process rubygems upload' | :created + :private | :guest | true | :job_token | true | 'rejects rubygems packages access' | :forbidden + :private | :developer | true | :job_token | false | 'rejects rubygems packages access' | :unauthorized + :private | :guest | true | :job_token | false | 'rejects rubygems packages access' | :unauthorized + :private | :developer | false | :job_token | true | 'rejects rubygems packages access' | :not_found + :private | :guest | false | :job_token | true | 'rejects rubygems packages access' | :not_found + :private | :developer | false | :job_token | false | 'rejects rubygems packages access' | :unauthorized + :private | :guest | false | :job_token | false | 'rejects rubygems packages access' | :unauthorized + :public | :developer | true | :deploy_token | true | 'process rubygems upload' | :created + :public | :developer | true | :deploy_token | false | 'rejects rubygems packages access' | :unauthorized + :private | :developer | true | :deploy_token | true | 'process rubygems upload' | :created + :private | :developer | true | :deploy_token | false | 'rejects rubygems packages access' | :unauthorized + end - it_behaves_like 'an unimplemented route' + with_them do + let(:token) { valid_token ? tokens[token_type] : 'invalid-token123' } + let(:user_headers) { user_role == :anonymous ? {} : { 'HTTP_AUTHORIZATION' => token } } + let(:headers) { user_headers.merge(workhorse_headers) } + + before do + project.update!(visibility: visibility.to_s) + end + + it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member] + end + + context 'failed package file save' do + let(:user_headers) { { 'HTTP_AUTHORIZATION' => personal_access_token.token } } + let(:headers) { user_headers.merge(workhorse_headers) } + + before do + project.add_developer(user) + end + + it 'does not create package record', :aggregate_failures do + allow(Packages::CreatePackageFileService).to receive(:new).and_raise(StandardError) + + expect { subject } + .to change { project.packages.count }.by(0) + .and change { Packages::PackageFile.count }.by(0) + expect(response).to have_gitlab_http_status(:error) + end + end + end end describe 'GET /api/v4/projects/:project_id/packages/rubygems/api/v1/dependencies' do diff --git a/spec/requests/projects/noteable_notes_spec.rb b/spec/requests/projects/noteable_notes_spec.rb index 5ae2aadaa84..2bf1ffb2edc 100644 --- a/spec/requests/projects/noteable_notes_spec.rb +++ b/spec/requests/projects/noteable_notes_spec.rb @@ -18,7 +18,9 @@ RSpec.describe 'Project noteable notes' do login_as(user) end - it 'does not set a Gitlab::EtagCaching ETag' do + it 'does not set a Gitlab::EtagCaching ETag if there is a note' do + create(:note_on_merge_request, noteable: merge_request, project: merge_request.project) + get notes_path expect(response).to have_gitlab_http_status(:ok) @@ -27,5 +29,12 @@ RSpec.describe 'Project noteable notes' do # interfere with notes pagination expect(response_etag).not_to eq(stored_etag) end + + it 'sets a Gitlab::EtagCaching ETag if there is no note' do + get notes_path + + expect(response).to have_gitlab_http_status(:ok) + expect(response_etag).to eq(stored_etag) + end end end diff --git a/spec/rubocop/cop/avoid_return_from_blocks_spec.rb b/spec/rubocop/cop/avoid_return_from_blocks_spec.rb index 71311b9df7f..526a362447f 100644 --- a/spec/rubocop/cop/avoid_return_from_blocks_spec.rb +++ b/spec/rubocop/cop/avoid_return_from_blocks_spec.rb @@ -5,8 +5,6 @@ require 'rubocop' require_relative '../../../rubocop/cop/avoid_return_from_blocks' RSpec.describe RuboCop::Cop::AvoidReturnFromBlocks do - include CopHelper - subject(:cop) { described_class.new } it 'flags violation for return inside a block' do @@ -19,20 +17,16 @@ RSpec.describe RuboCop::Cop::AvoidReturnFromBlocks do RUBY end - it "doesn't call add_offense twice for nested blocks" do - source = <<~RUBY + it "doesn't create more than one offense for nested blocks" do + expect_offense(<<~RUBY) call do call do something return if something_else + ^^^^^^ Do not return from a block, use next or break instead. end end RUBY - expect_any_instance_of(described_class) do |instance| - expect(instance).to receive(:add_offense).once - end - - inspect_source(source) end it 'flags violation for return inside included > def > block' do diff --git a/spec/rubocop/cop/avoid_route_redirect_leading_slash_spec.rb b/spec/rubocop/cop/avoid_route_redirect_leading_slash_spec.rb index 9e13a5278e3..c049528523e 100644 --- a/spec/rubocop/cop/avoid_route_redirect_leading_slash_spec.rb +++ b/spec/rubocop/cop/avoid_route_redirect_leading_slash_spec.rb @@ -5,19 +5,21 @@ require 'rubocop' require_relative '../../../rubocop/cop/avoid_route_redirect_leading_slash' RSpec.describe RuboCop::Cop::AvoidRouteRedirectLeadingSlash do - include CopHelper - subject(:cop) { described_class.new } before do allow(cop).to receive(:in_routes?).and_return(true) end - it 'registers an offense when redirect has a leading slash' do + it 'registers an offense when redirect has a leading slash and corrects', :aggregate_failures do expect_offense(<<~PATTERN) root to: redirect("/-/route") ^^^^^^^^^^^^^^^^^^^^ Do not use a leading "/" in route redirects PATTERN + + expect_correction(<<~PATTERN) + root to: redirect("-/route") + PATTERN end it 'does not register an offense when redirect does not have a leading slash' do @@ -25,8 +27,4 @@ RSpec.describe RuboCop::Cop::AvoidRouteRedirectLeadingSlash do root to: redirect("-/route") PATTERN end - - it 'autocorrect `/-/route` to `-/route`' do - expect(autocorrect_source('redirect("/-/route")')).to eq('redirect("-/route")') - end end diff --git a/spec/rubocop/cop/ignored_columns_spec.rb b/spec/rubocop/cop/ignored_columns_spec.rb index 38b4ac0bc1a..37fec7cb62a 100644 --- a/spec/rubocop/cop/ignored_columns_spec.rb +++ b/spec/rubocop/cop/ignored_columns_spec.rb @@ -2,21 +2,17 @@ require 'fast_spec_helper' require 'rubocop' -require 'rubocop/rspec/support' require_relative '../../../rubocop/cop/ignored_columns' RSpec.describe RuboCop::Cop::IgnoredColumns do - include CopHelper - subject(:cop) { described_class.new } - it 'flags the use of destroy_all with a local variable receiver' do - inspect_source(<<~RUBY) + it 'flags direct use of ignored_columns instead of the IgnoredColumns concern' do + expect_offense(<<~RUBY) class Foo < ApplicationRecord self.ignored_columns += %i[id] + ^^^^^^^^^^^^^^^^^^^^ Use `IgnoredColumns` concern instead of adding to `self.ignored_columns`. end RUBY - - expect(cop.offenses.size).to eq(1) end end diff --git a/spec/rubocop/cop/migration/add_column_with_default_spec.rb b/spec/rubocop/cop/migration/add_column_with_default_spec.rb index cf476ae55d6..68f219e01f9 100644 --- a/spec/rubocop/cop/migration/add_column_with_default_spec.rb +++ b/spec/rubocop/cop/migration/add_column_with_default_spec.rb @@ -5,11 +5,9 @@ require 'rubocop' require_relative '../../../../rubocop/cop/migration/add_column_with_default' RSpec.describe RuboCop::Cop::Migration::AddColumnWithDefault do - include CopHelper - let(:cop) { described_class.new } - context 'outside of a migration' do + context 'when outside of a migration' do it 'does not register any offenses' do expect_no_offenses(<<~RUBY) def up @@ -19,18 +17,16 @@ RSpec.describe RuboCop::Cop::Migration::AddColumnWithDefault do end end - context 'in a migration' do + context 'when in a migration' do before do allow(cop).to receive(:in_migration?).and_return(true) end - let(:offense) { '`add_column_with_default` is deprecated, use `add_column` instead' } - it 'registers an offense' do expect_offense(<<~RUBY) def up add_column_with_default(:merge_request_diff_files, :artifacts, :boolean, default: true, allow_null: false) - ^^^^^^^^^^^^^^^^^^^^^^^ #{offense} + ^^^^^^^^^^^^^^^^^^^^^^^ `add_column_with_default` is deprecated, use `add_column` instead end RUBY end diff --git a/spec/rubocop/cop/migration/add_columns_to_wide_tables_spec.rb b/spec/rubocop/cop/migration/add_columns_to_wide_tables_spec.rb index 92863c45b1a..70d9a577728 100644 --- a/spec/rubocop/cop/migration/add_columns_to_wide_tables_spec.rb +++ b/spec/rubocop/cop/migration/add_columns_to_wide_tables_spec.rb @@ -5,11 +5,9 @@ require 'rubocop' require_relative '../../../../rubocop/cop/migration/add_columns_to_wide_tables' RSpec.describe RuboCop::Cop::Migration::AddColumnsToWideTables do - include CopHelper - let(:cop) { described_class.new } - context 'outside of a migration' do + context 'when outside of a migration' do it 'does not register any offenses' do expect_no_offenses(<<~RUBY) def up @@ -19,14 +17,14 @@ RSpec.describe RuboCop::Cop::Migration::AddColumnsToWideTables do end end - context 'in a migration' do + context 'when in a migration' do before do allow(cop).to receive(:in_migration?).and_return(true) end context 'with wide tables' do it 'registers an offense when adding a column to a wide table' do - offense = '`projects` is a wide table with several columns, addig more should be avoided unless absolutely necessary. Consider storing the column in a different table or creating a new one.' + offense = '`projects` is a wide table with several columns, [...]' expect_offense(<<~RUBY) def up @@ -37,7 +35,7 @@ RSpec.describe RuboCop::Cop::Migration::AddColumnsToWideTables do end it 'registers an offense when adding a column with default to a wide table' do - offense = '`users` is a wide table with several columns, addig more should be avoided unless absolutely necessary. Consider storing the column in a different table or creating a new one.' + offense = '`users` is a wide table with several columns, [...]' expect_offense(<<~RUBY) def up @@ -48,7 +46,7 @@ RSpec.describe RuboCop::Cop::Migration::AddColumnsToWideTables do end it 'registers an offense when adding a reference' do - offense = '`ci_builds` is a wide table with several columns, addig more should be avoided unless absolutely necessary. Consider storing the column in a different table or creating a new one.' + offense = '`ci_builds` is a wide table with several columns, [...]' expect_offense(<<~RUBY) def up @@ -59,7 +57,7 @@ RSpec.describe RuboCop::Cop::Migration::AddColumnsToWideTables do end it 'registers an offense when adding timestamps' do - offense = '`projects` is a wide table with several columns, addig more should be avoided unless absolutely necessary. Consider storing the column in a different table or creating a new one.' + offense = '`projects` is a wide table with several columns, [...]' expect_offense(<<~RUBY) def up diff --git a/spec/rubocop/cop/migration/add_concurrent_foreign_key_spec.rb b/spec/rubocop/cop/migration/add_concurrent_foreign_key_spec.rb index 25350ad1ecb..2da29606024 100644 --- a/spec/rubocop/cop/migration/add_concurrent_foreign_key_spec.rb +++ b/spec/rubocop/cop/migration/add_concurrent_foreign_key_spec.rb @@ -5,46 +5,38 @@ require 'rubocop' require_relative '../../../../rubocop/cop/migration/add_concurrent_foreign_key' RSpec.describe RuboCop::Cop::Migration::AddConcurrentForeignKey do - include CopHelper - let(:cop) { described_class.new } - context 'outside of a migration' do + context 'when outside of a migration' do it 'does not register any offenses' do - inspect_source('def up; add_foreign_key(:projects, :users, column: :user_id); end') - - expect(cop.offenses).to be_empty + expect_no_offenses('def up; add_foreign_key(:projects, :users, column: :user_id); end') end end - context 'in a migration' do + context 'when in a migration' do before do allow(cop).to receive(:in_migration?).and_return(true) end it 'registers an offense when using add_foreign_key' do - inspect_source('def up; add_foreign_key(:projects, :users, column: :user_id); end') - - aggregate_failures do - expect(cop.offenses.size).to eq(1) - expect(cop.offenses.map(&:line)).to eq([1]) - end + expect_offense(<<~RUBY) + def up + add_foreign_key(:projects, :users, column: :user_id) + ^^^^^^^^^^^^^^^ `add_foreign_key` requires downtime, use `add_concurrent_foreign_key` instead + end + RUBY end it 'does not register an offense when a `NOT VALID` foreign key is added' do - inspect_source('def up; add_foreign_key(:projects, :users, column: :user_id, validate: false); end') - - expect(cop.offenses).to be_empty + expect_no_offenses('def up; add_foreign_key(:projects, :users, column: :user_id, validate: false); end') end it 'does not register an offense when `add_foreign_key` is within `with_lock_retries`' do - inspect_source <<~RUBY + expect_no_offenses(<<~RUBY) with_lock_retries do add_foreign_key :key, :projects, column: :project_id, on_delete: :cascade end RUBY - - expect(cop.offenses).to be_empty end end end diff --git a/spec/rubocop/cop/migration/add_concurrent_index_spec.rb b/spec/rubocop/cop/migration/add_concurrent_index_spec.rb index 351283a230a..bc5fd2de4fd 100644 --- a/spec/rubocop/cop/migration/add_concurrent_index_spec.rb +++ b/spec/rubocop/cop/migration/add_concurrent_index_spec.rb @@ -5,36 +5,30 @@ require 'rubocop' require_relative '../../../../rubocop/cop/migration/add_concurrent_index' RSpec.describe RuboCop::Cop::Migration::AddConcurrentIndex do - include CopHelper - subject(:cop) { described_class.new } - context 'in migration' do + context 'when in migration' do before do allow(cop).to receive(:in_migration?).and_return(true) end it 'registers an offense when add_concurrent_index is used inside a change method' do - inspect_source('def change; add_concurrent_index :table, :column; end') - - aggregate_failures do - expect(cop.offenses.size).to eq(1) - expect(cop.offenses.map(&:line)).to eq([1]) - end + expect_offense(<<~RUBY) + def change + ^^^^^^ `add_concurrent_index` is not reversible[...] + add_concurrent_index :table, :column + end + RUBY end it 'registers no offense when add_concurrent_index is used inside an up method' do - inspect_source('def up; add_concurrent_index :table, :column; end') - - expect(cop.offenses.size).to eq(0) + expect_no_offenses('def up; add_concurrent_index :table, :column; end') end end - context 'outside of migration' do + context 'when outside of migration' do it 'registers no offense' do - inspect_source('def change; add_concurrent_index :table, :column; end') - - expect(cop.offenses.size).to eq(0) + expect_no_offenses('def change; add_concurrent_index :table, :column; end') end end end diff --git a/spec/rubocop/cop/migration/add_index_spec.rb b/spec/rubocop/cop/migration/add_index_spec.rb index 1d083e9f2d2..894fbc25eb8 100644 --- a/spec/rubocop/cop/migration/add_index_spec.rb +++ b/spec/rubocop/cop/migration/add_index_spec.rb @@ -5,8 +5,6 @@ require 'rubocop' require_relative '../../../../rubocop/cop/migration/add_index' RSpec.describe RuboCop::Cop::Migration::AddIndex do - include CopHelper - subject(:cop) { described_class.new } context 'in migration' do diff --git a/spec/rubocop/cop/migration/add_limit_to_text_columns_spec.rb b/spec/rubocop/cop/migration/add_limit_to_text_columns_spec.rb index 149fb0a48eb..4f107f7600f 100644 --- a/spec/rubocop/cop/migration/add_limit_to_text_columns_spec.rb +++ b/spec/rubocop/cop/migration/add_limit_to_text_columns_spec.rb @@ -5,11 +5,11 @@ require 'rubocop' require_relative '../../../../rubocop/cop/migration/add_limit_to_text_columns' RSpec.describe RuboCop::Cop::Migration::AddLimitToTextColumns do - include CopHelper - subject(:cop) { described_class.new } - context 'in migration' do + context 'when in migration' do + let(:msg) { 'Text columns should always have a limit set (255 is suggested)[...]' } + before do allow(cop).to receive(:in_migration?).and_return(true) end @@ -25,31 +25,29 @@ RSpec.describe RuboCop::Cop::Migration::AddLimitToTextColumns do create_table :test_text_limits, id: false do |t| t.integer :test_id, null: false t.text :name - ^^^^ #{described_class::MSG} + ^^^^ #{msg} end create_table_with_constraints :test_text_limits_create do |t| t.integer :test_id, null: false t.text :title t.text :description - ^^^^ #{described_class::MSG} + ^^^^ #{msg} t.text_limit :title, 100 end add_column :test_text_limits, :email, :text - ^^^^^^^^^^ #{described_class::MSG} + ^^^^^^^^^^ #{msg} add_column_with_default :test_text_limits, :role, :text, default: 'default' - ^^^^^^^^^^^^^^^^^^^^^^^ #{described_class::MSG} + ^^^^^^^^^^^^^^^^^^^^^^^ #{msg} change_column_type_concurrently :test_text_limits, :test_id, :text - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{described_class::MSG} + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{msg} end end RUBY - - expect(cop.offenses.map(&:cop_name)).to all(eq('Migration/AddLimitToTextColumns')) end end @@ -111,7 +109,7 @@ RSpec.describe RuboCop::Cop::Migration::AddLimitToTextColumns do end # Make sure that the cop is properly checking for an `add_text_limit` - # over the same {table, attribute} as the one that triggered the offence + # over the same {table, attribute} as the one that triggered the offense context 'when the limit is defined for a same name attribute but different table' do it 'registers an offense' do expect_offense(<<~RUBY) @@ -123,17 +121,17 @@ RSpec.describe RuboCop::Cop::Migration::AddLimitToTextColumns do create_table :test_text_limits, id: false do |t| t.integer :test_id, null: false t.text :name - ^^^^ #{described_class::MSG} + ^^^^ #{msg} end add_column :test_text_limits, :email, :text - ^^^^^^^^^^ #{described_class::MSG} + ^^^^^^^^^^ #{msg} add_column_with_default :test_text_limits, :role, :text, default: 'default' - ^^^^^^^^^^^^^^^^^^^^^^^ #{described_class::MSG} + ^^^^^^^^^^^^^^^^^^^^^^^ #{msg} change_column_type_concurrently :test_text_limits, :test_id, :text - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{described_class::MSG} + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{msg} add_text_limit :wrong_table, :name, 255 add_text_limit :wrong_table, :email, 255 @@ -142,8 +140,6 @@ RSpec.describe RuboCop::Cop::Migration::AddLimitToTextColumns do end end RUBY - - expect(cop.offenses.map(&:cop_name)).to all(eq('Migration/AddLimitToTextColumns')) end end @@ -176,18 +172,18 @@ RSpec.describe RuboCop::Cop::Migration::AddLimitToTextColumns do DOWNTIME = false def up - drop_table :no_offence_on_down + drop_table :no_offense_on_down end def down - create_table :no_offence_on_down, id: false do |t| + create_table :no_offense_on_down, id: false do |t| t.integer :test_id, null: false t.text :name end - add_column :no_offence_on_down, :email, :text + add_column :no_offense_on_down, :email, :text - add_column_with_default :no_offence_on_down, :role, :text, default: 'default' + add_column_with_default :no_offense_on_down, :role, :text, default: 'default' end end RUBY @@ -195,7 +191,7 @@ RSpec.describe RuboCop::Cop::Migration::AddLimitToTextColumns do end end - context 'outside of migration' do + context 'when outside of migration' do it 'registers no offense' do expect_no_offenses(<<~RUBY) class TestTextLimits < ActiveRecord::Migration[6.0] diff --git a/spec/rubocop/cop/migration/add_reference_spec.rb b/spec/rubocop/cop/migration/add_reference_spec.rb index 6e229d3eefc..35386f14e26 100644 --- a/spec/rubocop/cop/migration/add_reference_spec.rb +++ b/spec/rubocop/cop/migration/add_reference_spec.rb @@ -5,11 +5,9 @@ require 'rubocop' require_relative '../../../../rubocop/cop/migration/add_reference' RSpec.describe RuboCop::Cop::Migration::AddReference do - include CopHelper - let(:cop) { described_class.new } - context 'outside of a migration' do + context 'when outside of a migration' do it 'does not register any offenses' do expect_no_offenses(<<~RUBY) def up @@ -19,12 +17,12 @@ RSpec.describe RuboCop::Cop::Migration::AddReference do end end - context 'in a migration' do + context 'when in a migration' do before do allow(cop).to receive(:in_migration?).and_return(true) end - let(:offense) { '`add_reference` requires downtime for existing tables, use `add_concurrent_foreign_key` instead. When used for new tables, `index: true` or `index: { options... } is required.`' } + let(:offense) { '`add_reference` requires downtime for existing tables, use `add_concurrent_foreign_key`[...]' } context 'when the table existed before' do it 'registers an offense when using add_reference' do diff --git a/spec/rubocop/cop/migration/add_timestamps_spec.rb b/spec/rubocop/cop/migration/add_timestamps_spec.rb index 83570711ab9..6492c93d4e6 100644 --- a/spec/rubocop/cop/migration/add_timestamps_spec.rb +++ b/spec/rubocop/cop/migration/add_timestamps_spec.rb @@ -5,8 +5,6 @@ require 'rubocop' require_relative '../../../../rubocop/cop/migration/add_timestamps' RSpec.describe RuboCop::Cop::Migration::AddTimestamps do - include CopHelper - subject(:cop) { described_class.new } let(:migration_with_add_timestamps) do @@ -47,44 +45,39 @@ RSpec.describe RuboCop::Cop::Migration::AddTimestamps do ) end - context 'in migration' do + context 'when in migration' do before do allow(cop).to receive(:in_migration?).and_return(true) end it 'registers an offense when the "add_timestamps" method is used' do - inspect_source(migration_with_add_timestamps) - - aggregate_failures do - expect(cop.offenses.size).to eq(1) - expect(cop.offenses.map(&:line)).to eq([7]) - end + expect_offense(<<~RUBY) + class Users < ActiveRecord::Migration[4.2] + DOWNTIME = false + + def change + add_column(:users, :username, :text) + add_timestamps(:users) + ^^^^^^^^^^^^^^ Do not use `add_timestamps`, use `add_timestamps_with_timezone` instead + end + end + RUBY end it 'does not register an offense when the "add_timestamps" method is not used' do - inspect_source(migration_without_add_timestamps) - - aggregate_failures do - expect(cop.offenses.size).to eq(0) - end + expect_no_offenses(migration_without_add_timestamps) end it 'does not register an offense when the "add_timestamps_with_timezone" method is used' do - inspect_source(migration_with_add_timestamps_with_timezone) - - aggregate_failures do - expect(cop.offenses.size).to eq(0) - end + expect_no_offenses(migration_with_add_timestamps_with_timezone) end end - context 'outside of migration' do - it 'registers no offense' do - inspect_source(migration_with_add_timestamps) - inspect_source(migration_without_add_timestamps) - inspect_source(migration_with_add_timestamps_with_timezone) - - expect(cop.offenses.size).to eq(0) + context 'when outside of migration' do + it 'registers no offense', :aggregate_failures do + expect_no_offenses(migration_with_add_timestamps) + expect_no_offenses(migration_without_add_timestamps) + expect_no_offenses(migration_with_add_timestamps_with_timezone) end end end diff --git a/spec/rubocop/cop/migration/complex_indexes_require_name_spec.rb b/spec/rubocop/cop/migration/complex_indexes_require_name_spec.rb index 38ccf546b7c..0a95bd5d78c 100644 --- a/spec/rubocop/cop/migration/complex_indexes_require_name_spec.rb +++ b/spec/rubocop/cop/migration/complex_indexes_require_name_spec.rb @@ -5,11 +5,11 @@ require 'rubocop' require_relative '../../../../rubocop/cop/migration/complex_indexes_require_name' RSpec.describe RuboCop::Cop::Migration::ComplexIndexesRequireName do - include CopHelper - subject(:cop) { described_class.new } - context 'in migration' do + context 'when in migration' do + let(:msg) { 'indexes added with custom options must be explicitly named' } + before do allow(cop).to receive(:in_migration?).and_return(true) end @@ -29,9 +29,9 @@ RSpec.describe RuboCop::Cop::Migration::ComplexIndexesRequireName do t.index :column1, unique: true t.index :column2, where: 'column1 = 0' - ^^^^^ #{described_class::MSG} + ^^^^^ #{msg} t.index :column3, using: :gin - ^^^^^ #{described_class::MSG} + ^^^^^ #{msg} end end @@ -40,8 +40,6 @@ RSpec.describe RuboCop::Cop::Migration::ComplexIndexesRequireName do end end RUBY - - expect(cop.offenses.map(&:cop_name)).to all(eq("Migration/#{described_class.name.demodulize}")) end end @@ -85,20 +83,18 @@ RSpec.describe RuboCop::Cop::Migration::ComplexIndexesRequireName do add_index :test_indexes, :column1 add_index :test_indexes, :column2, where: "column2 = 'value'", order: { column4: :desc } - ^^^^^^^^^ #{described_class::MSG} + ^^^^^^^^^ #{msg} end def down add_index :test_indexes, :column4, 'unique' => true, where: 'column4 IS NOT NULL' - ^^^^^^^^^ #{described_class::MSG} + ^^^^^^^^^ #{msg} add_concurrent_index :test_indexes, :column6, using: :gin, opclass: :gin_trgm_ops - ^^^^^^^^^^^^^^^^^^^^ #{described_class::MSG} + ^^^^^^^^^^^^^^^^^^^^ #{msg} end end RUBY - - expect(cop.offenses.map(&:cop_name)).to all(eq("Migration/#{described_class.name.demodulize}")) end end @@ -132,7 +128,7 @@ RSpec.describe RuboCop::Cop::Migration::ComplexIndexesRequireName do end end - context 'outside migration' do + context 'when outside migration' do before do allow(cop).to receive(:in_migration?).and_return(false) end diff --git a/spec/rubocop/cop/migration/create_table_with_foreign_keys_spec.rb b/spec/rubocop/cop/migration/create_table_with_foreign_keys_spec.rb index 2159bad1490..d046fd913d4 100644 --- a/spec/rubocop/cop/migration/create_table_with_foreign_keys_spec.rb +++ b/spec/rubocop/cop/migration/create_table_with_foreign_keys_spec.rb @@ -5,8 +5,6 @@ require 'rubocop' require_relative '../../../../rubocop/cop/migration/create_table_with_foreign_keys' RSpec.describe RuboCop::Cop::Migration::CreateTableWithForeignKeys do - include CopHelper - let(:cop) { described_class.new } context 'outside of a migration' do @@ -22,7 +20,7 @@ RSpec.describe RuboCop::Cop::Migration::CreateTableWithForeignKeys do end end - context 'in a migration' do + context 'when in a migration' do before do allow(cop).to receive(:in_migration?).and_return(true) end diff --git a/spec/rubocop/cop/migration/datetime_spec.rb b/spec/rubocop/cop/migration/datetime_spec.rb index a3cccae21e0..24be3bea8c3 100644 --- a/spec/rubocop/cop/migration/datetime_spec.rb +++ b/spec/rubocop/cop/migration/datetime_spec.rb @@ -5,40 +5,8 @@ require 'rubocop' require_relative '../../../../rubocop/cop/migration/datetime' RSpec.describe RuboCop::Cop::Migration::Datetime do - include CopHelper - subject(:cop) { described_class.new } - let(:create_table_migration_with_datetime) do - %q( - class Users < ActiveRecord::Migration[6.0] - DOWNTIME = false - - def change - create_table :users do |t| - t.string :username, null: false - t.datetime :last_sign_in - end - end - end - ) - end - - let(:create_table_migration_with_timestamp) do - %q( - class Users < ActiveRecord::Migration[6.0] - DOWNTIME = false - - def change - create_table :users do |t| - t.string :username, null: false - t.timestamp :last_sign_in - end - end - end - ) - end - let(:create_table_migration_without_datetime) do %q( class Users < ActiveRecord::Migration[6.0] @@ -120,92 +88,94 @@ RSpec.describe RuboCop::Cop::Migration::Datetime do ) end - context 'in migration' do + context 'when in migration' do before do allow(cop).to receive(:in_migration?).and_return(true) end it 'registers an offense when the ":datetime" data type is used on create_table' do - inspect_source(create_table_migration_with_datetime) - - aggregate_failures do - expect(cop.offenses.size).to eq(1) - expect(cop.offenses.map(&:line)).to eq([8]) - expect(cop.offenses.first.message).to include('`datetime`') - end + expect_offense(<<~RUBY) + class Users < ActiveRecord::Migration[6.0] + DOWNTIME = false + + def change + create_table :users do |t| + t.string :username, null: false + t.datetime :last_sign_in + ^^^^^^^^ Do not use the `datetime` data type[...] + end + end + end + RUBY end it 'registers an offense when the ":timestamp" data type is used on create_table' do - inspect_source(create_table_migration_with_timestamp) - - aggregate_failures do - expect(cop.offenses.size).to eq(1) - expect(cop.offenses.map(&:line)).to eq([8]) - expect(cop.offenses.first.message).to include('timestamp') - end + expect_offense(<<~RUBY) + class Users < ActiveRecord::Migration[6.0] + DOWNTIME = false + + def change + create_table :users do |t| + t.string :username, null: false + t.timestamp :last_sign_in + ^^^^^^^^^ Do not use the `timestamp` data type[...] + end + end + end + RUBY end it 'does not register an offense when the ":datetime" data type is not used on create_table' do - inspect_source(create_table_migration_without_datetime) - - aggregate_failures do - expect(cop.offenses.size).to eq(0) - end + expect_no_offenses(create_table_migration_without_datetime) end it 'does not register an offense when the ":datetime_with_timezone" data type is used on create_table' do - inspect_source(create_table_migration_with_datetime_with_timezone) - - aggregate_failures do - expect(cop.offenses.size).to eq(0) - end + expect_no_offenses(create_table_migration_with_datetime_with_timezone) end it 'registers an offense when the ":datetime" data type is used on add_column' do - inspect_source(add_column_migration_with_datetime) - - aggregate_failures do - expect(cop.offenses.size).to eq(1) - expect(cop.offenses.map(&:line)).to eq([7]) - expect(cop.offenses.first.message).to include('`datetime`') - end + expect_offense(<<~RUBY) + class Users < ActiveRecord::Migration[6.0] + DOWNTIME = false + + def change + add_column(:users, :username, :text) + add_column(:users, :last_sign_in, :datetime) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Do not use the `datetime` data type[...] + end + end + RUBY end it 'registers an offense when the ":timestamp" data type is used on add_column' do - inspect_source(add_column_migration_with_timestamp) - - aggregate_failures do - expect(cop.offenses.size).to eq(1) - expect(cop.offenses.map(&:line)).to eq([7]) - expect(cop.offenses.first.message).to include('timestamp') - end + expect_offense(<<~RUBY) + class Users < ActiveRecord::Migration[6.0] + DOWNTIME = false + + def change + add_column(:users, :username, :text) + add_column(:users, :last_sign_in, :timestamp) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Do not use the `timestamp` data type[...] + end + end + RUBY end it 'does not register an offense when the ":datetime" data type is not used on add_column' do - inspect_source(add_column_migration_without_datetime) - - aggregate_failures do - expect(cop.offenses.size).to eq(0) - end + expect_no_offenses(add_column_migration_without_datetime) end it 'does not register an offense when the ":datetime_with_timezone" data type is used on add_column' do - inspect_source(add_column_migration_with_datetime_with_timezone) - - aggregate_failures do - expect(cop.offenses.size).to eq(0) - end + expect_no_offenses(add_column_migration_with_datetime_with_timezone) end end - context 'outside of migration' do - it 'registers no offense' do - inspect_source(add_column_migration_with_datetime) - inspect_source(add_column_migration_with_timestamp) - inspect_source(add_column_migration_without_datetime) - inspect_source(add_column_migration_with_datetime_with_timezone) - - expect(cop.offenses.size).to eq(0) + context 'when outside of migration' do + it 'registers no offense', :aggregate_failures do + expect_no_offenses(add_column_migration_with_datetime) + expect_no_offenses(add_column_migration_with_timestamp) + expect_no_offenses(add_column_migration_without_datetime) + expect_no_offenses(add_column_migration_with_datetime_with_timezone) end end end diff --git a/spec/rubocop/cop/migration/drop_table_spec.rb b/spec/rubocop/cop/migration/drop_table_spec.rb index d783cb56203..7a2e7929f11 100644 --- a/spec/rubocop/cop/migration/drop_table_spec.rb +++ b/spec/rubocop/cop/migration/drop_table_spec.rb @@ -5,11 +5,13 @@ require 'rubocop' require_relative '../../../../rubocop/cop/migration/drop_table' RSpec.describe RuboCop::Cop::Migration::DropTable do - include CopHelper - subject(:cop) { described_class.new } context 'when in deployment migration' do + let(:msg) do + '`drop_table` in deployment migrations requires downtime. Drop tables in post-deployment migrations instead.' + end + before do allow(cop).to receive(:in_deployment_migration?).and_return(true) end @@ -30,7 +32,7 @@ RSpec.describe RuboCop::Cop::Migration::DropTable do expect_offense(<<~PATTERN) def up drop_table :table - ^^^^^^^^^^ #{described_class::MSG} + ^^^^^^^^^^ #{msg} end PATTERN end @@ -41,7 +43,7 @@ RSpec.describe RuboCop::Cop::Migration::DropTable do expect_offense(<<~PATTERN) def change drop_table :table - ^^^^^^^^^^ #{described_class::MSG} + ^^^^^^^^^^ #{msg} end PATTERN end @@ -63,7 +65,7 @@ RSpec.describe RuboCop::Cop::Migration::DropTable do expect_offense(<<~PATTERN) def up execute "DROP TABLE table" - ^^^^^^^ #{described_class::MSG} + ^^^^^^^ #{msg} end PATTERN end @@ -74,7 +76,7 @@ RSpec.describe RuboCop::Cop::Migration::DropTable do expect_offense(<<~PATTERN) def change execute "DROP TABLE table" - ^^^^^^^ #{described_class::MSG} + ^^^^^^^ #{msg} end PATTERN end diff --git a/spec/rubocop/cop/migration/hash_index_spec.rb b/spec/rubocop/cop/migration/hash_index_spec.rb index 15f68eb990f..08c803ee79e 100644 --- a/spec/rubocop/cop/migration/hash_index_spec.rb +++ b/spec/rubocop/cop/migration/hash_index_spec.rb @@ -5,48 +5,44 @@ require 'rubocop' require_relative '../../../../rubocop/cop/migration/hash_index' RSpec.describe RuboCop::Cop::Migration::HashIndex do - include CopHelper - subject(:cop) { described_class.new } - context 'in migration' do + context 'when in migration' do before do allow(cop).to receive(:in_migration?).and_return(true) end it 'registers an offense when creating a hash index' do - inspect_source('def change; add_index :table, :column, using: :hash; end') - - aggregate_failures do - expect(cop.offenses.size).to eq(1) - expect(cop.offenses.map(&:line)).to eq([1]) - end + expect_offense(<<~RUBY) + def change + add_index :table, :column, using: :hash + ^^^^^^^^^^^^ hash indexes should be avoided at all costs[...] + end + RUBY end it 'registers an offense when creating a concurrent hash index' do - inspect_source('def change; add_concurrent_index :table, :column, using: :hash; end') - - aggregate_failures do - expect(cop.offenses.size).to eq(1) - expect(cop.offenses.map(&:line)).to eq([1]) - end + expect_offense(<<~RUBY) + def change + add_concurrent_index :table, :column, using: :hash + ^^^^^^^^^^^^ hash indexes should be avoided at all costs[...] + end + RUBY end it 'registers an offense when creating a hash index using t.index' do - inspect_source('def change; t.index :table, :column, using: :hash; end') - - aggregate_failures do - expect(cop.offenses.size).to eq(1) - expect(cop.offenses.map(&:line)).to eq([1]) - end + expect_offense(<<~RUBY) + def change + t.index :table, :column, using: :hash + ^^^^^^^^^^^^ hash indexes should be avoided at all costs[...] + end + RUBY end end - context 'outside of migration' do + context 'when outside of migration' do it 'registers no offense' do - inspect_source('def change; index :table, :column, using: :hash; end') - - expect(cop.offenses.size).to eq(0) + expect_no_offenses('def change; index :table, :column, using: :hash; end') end end end diff --git a/spec/rubocop/cop/migration/prevent_strings_spec.rb b/spec/rubocop/cop/migration/prevent_strings_spec.rb index 560a485017a..d8d4058363e 100644 --- a/spec/rubocop/cop/migration/prevent_strings_spec.rb +++ b/spec/rubocop/cop/migration/prevent_strings_spec.rb @@ -5,45 +5,41 @@ require 'rubocop' require_relative '../../../../rubocop/cop/migration/prevent_strings' RSpec.describe RuboCop::Cop::Migration::PreventStrings do - include CopHelper - subject(:cop) { described_class.new } - context 'in migration' do + context 'when in migration' do before do allow(cop).to receive(:in_migration?).and_return(true) end context 'when the string data type is used' do it 'registers an offense' do - expect_offense(<<~RUBY) + expect_offense(<<~RUBY, msg: "Do not use the `string` data type, use `text` instead.[...]") class Users < ActiveRecord::Migration[6.0] DOWNTIME = false def up create_table :users do |t| t.string :username, null: false - ^^^^^^ #{described_class::MSG} + ^^^^^^ %{msg} t.timestamps_with_timezone null: true t.string :password - ^^^^^^ #{described_class::MSG} + ^^^^^^ %{msg} end add_column(:users, :bio, :string) - ^^^^^^^^^^ #{described_class::MSG} + ^^^^^^^^^^ %{msg} add_column_with_default(:users, :url, :string, default: '/-/user', allow_null: false, limit: 255) - ^^^^^^^^^^^^^^^^^^^^^^^ #{described_class::MSG} + ^^^^^^^^^^^^^^^^^^^^^^^ %{msg} change_column_type_concurrently :users, :commit_id, :string - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{described_class::MSG} + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ %{msg} end end RUBY - - expect(cop.offenses.map(&:cop_name)).to all(eq('Migration/PreventStrings')) end end @@ -109,7 +105,7 @@ RSpec.describe RuboCop::Cop::Migration::PreventStrings do end end - context 'on down' do + context 'when using down method' do it 'registers no offense' do expect_no_offenses(<<~RUBY) class Users < ActiveRecord::Migration[6.0] @@ -138,7 +134,7 @@ RSpec.describe RuboCop::Cop::Migration::PreventStrings do end end - context 'outside of migration' do + context 'when outside of migration' do it 'registers no offense' do expect_no_offenses(<<~RUBY) class Users < ActiveRecord::Migration[6.0] diff --git a/spec/rubocop/cop/migration/refer_to_index_by_name_spec.rb b/spec/rubocop/cop/migration/refer_to_index_by_name_spec.rb index a25328a56a8..864d2325d53 100644 --- a/spec/rubocop/cop/migration/refer_to_index_by_name_spec.rb +++ b/spec/rubocop/cop/migration/refer_to_index_by_name_spec.rb @@ -5,18 +5,16 @@ require 'rubocop' require_relative '../../../../rubocop/cop/migration/refer_to_index_by_name' RSpec.describe RuboCop::Cop::Migration::ReferToIndexByName do - include CopHelper - subject(:cop) { described_class.new } - context 'in migration' do + context 'when in migration' do before do allow(cop).to receive(:in_migration?).and_return(true) end context 'when existing indexes are referred to without an explicit name' do it 'registers an offense' do - expect_offense(<<~RUBY) + expect_offense(<<~RUBY, msg: 'migration methods that refer to existing indexes must do so by name') class TestReferToIndexByName < ActiveRecord::Migration[6.0] DOWNTIME = false @@ -30,22 +28,22 @@ RSpec.describe RuboCop::Cop::Migration::ReferToIndexByName do end if index_exists? :test_indexes, :column2 - ^^^^^^^^^^^^^ #{described_class::MSG} + ^^^^^^^^^^^^^ %{msg} remove_index :test_indexes, :column2 - ^^^^^^^^^^^^ #{described_class::MSG} + ^^^^^^^^^^^^ %{msg} end remove_index :test_indexes, column: column3 - ^^^^^^^^^^^^ #{described_class::MSG} + ^^^^^^^^^^^^ %{msg} remove_index :test_indexes, name: 'index_name_4' end def down if index_exists? :test_indexes, :column4, using: :gin, opclass: :gin_trgm_ops - ^^^^^^^^^^^^^ #{described_class::MSG} + ^^^^^^^^^^^^^ %{msg} remove_concurrent_index :test_indexes, :column4, using: :gin, opclass: :gin_trgm_ops - ^^^^^^^^^^^^^^^^^^^^^^^ #{described_class::MSG} + ^^^^^^^^^^^^^^^^^^^^^^^ %{msg} end if index_exists? :test_indexes, :column3, unique: true, name: 'index_name_3', where: 'column3 = 10' @@ -54,13 +52,11 @@ RSpec.describe RuboCop::Cop::Migration::ReferToIndexByName do end end RUBY - - expect(cop.offenses.map(&:cop_name)).to all(eq("Migration/#{described_class.name.demodulize}")) end end end - context 'outside migration' do + context 'when outside migration' do before do allow(cop).to receive(:in_migration?).and_return(false) end diff --git a/spec/rubocop/cop/migration/remove_column_spec.rb b/spec/rubocop/cop/migration/remove_column_spec.rb index 4768093b10d..a8a828bc62a 100644 --- a/spec/rubocop/cop/migration/remove_column_spec.rb +++ b/spec/rubocop/cop/migration/remove_column_spec.rb @@ -5,63 +5,55 @@ require 'rubocop' require_relative '../../../../rubocop/cop/migration/remove_column' RSpec.describe RuboCop::Cop::Migration::RemoveColumn do - include CopHelper - subject(:cop) { described_class.new } def source(meth = 'change') "def #{meth}; remove_column :table, :column; end" end - context 'in a regular migration' do + context 'when in a regular migration' do before do allow(cop).to receive(:in_migration?).and_return(true) allow(cop).to receive(:in_post_deployment_migration?).and_return(false) end it 'registers an offense when remove_column is used in the change method' do - inspect_source(source('change')) - - aggregate_failures do - expect(cop.offenses.size).to eq(1) - expect(cop.offenses.map(&:line)).to eq([1]) - end + expect_offense(<<~RUBY) + def change + remove_column :table, :column + ^^^^^^^^^^^^^ `remove_column` must only be used in post-deployment migrations + end + RUBY end it 'registers an offense when remove_column is used in the up method' do - inspect_source(source('up')) - - aggregate_failures do - expect(cop.offenses.size).to eq(1) - expect(cop.offenses.map(&:line)).to eq([1]) - end + expect_offense(<<~RUBY) + def up + remove_column :table, :column + ^^^^^^^^^^^^^ `remove_column` must only be used in post-deployment migrations + end + RUBY end it 'registers no offense when remove_column is used in the down method' do - inspect_source(source('down')) - - expect(cop.offenses.size).to eq(0) + expect_no_offenses(source('down')) end end - context 'in a post-deployment migration' do + context 'when in a post-deployment migration' do before do allow(cop).to receive(:in_migration?).and_return(true) allow(cop).to receive(:in_post_deployment_migration?).and_return(true) end it 'registers no offense' do - inspect_source(source) - - expect(cop.offenses.size).to eq(0) + expect_no_offenses(source) end end - context 'outside of a migration' do + context 'when outside of a migration' do it 'registers no offense' do - inspect_source(source) - - expect(cop.offenses.size).to eq(0) + expect_no_offenses(source) end end end diff --git a/spec/rubocop/cop/migration/remove_concurrent_index_spec.rb b/spec/rubocop/cop/migration/remove_concurrent_index_spec.rb index 8da368d588c..8e29e51992c 100644 --- a/spec/rubocop/cop/migration/remove_concurrent_index_spec.rb +++ b/spec/rubocop/cop/migration/remove_concurrent_index_spec.rb @@ -5,8 +5,6 @@ require 'rubocop' require_relative '../../../../rubocop/cop/migration/remove_concurrent_index' RSpec.describe RuboCop::Cop::Migration::RemoveConcurrentIndex do - include CopHelper - subject(:cop) { described_class.new } context 'in migration' do @@ -15,26 +13,22 @@ RSpec.describe RuboCop::Cop::Migration::RemoveConcurrentIndex do end it 'registers an offense when remove_concurrent_index is used inside a change method' do - inspect_source('def change; remove_concurrent_index :table, :column; end') - - aggregate_failures do - expect(cop.offenses.size).to eq(1) - expect(cop.offenses.map(&:line)).to eq([1]) - end + expect_offense(<<~RUBY) + def change + ^^^^^^ `remove_concurrent_index` is not reversible [...] + remove_concurrent_index :table, :column + end + RUBY end it 'registers no offense when remove_concurrent_index is used inside an up method' do - inspect_source('def up; remove_concurrent_index :table, :column; end') - - expect(cop.offenses.size).to eq(0) + expect_no_offenses('def up; remove_concurrent_index :table, :column; end') end end context 'outside of migration' do it 'registers no offense' do - inspect_source('def change; remove_concurrent_index :table, :column; end') - - expect(cop.offenses.size).to eq(0) + expect_no_offenses('def change; remove_concurrent_index :table, :column; end') end end end diff --git a/spec/rubocop/cop/migration/remove_index_spec.rb b/spec/rubocop/cop/migration/remove_index_spec.rb index 274c907ac41..c9797443276 100644 --- a/spec/rubocop/cop/migration/remove_index_spec.rb +++ b/spec/rubocop/cop/migration/remove_index_spec.rb @@ -5,30 +5,26 @@ require 'rubocop' require_relative '../../../../rubocop/cop/migration/remove_index' RSpec.describe RuboCop::Cop::Migration::RemoveIndex do - include CopHelper - subject(:cop) { described_class.new } - context 'in migration' do + context 'when in migration' do before do allow(cop).to receive(:in_migration?).and_return(true) end it 'registers an offense when remove_index is used' do - inspect_source('def change; remove_index :table, :column; end') - - aggregate_failures do - expect(cop.offenses.size).to eq(1) - expect(cop.offenses.map(&:line)).to eq([1]) - end + expect_offense(<<~RUBY) + def change + remove_index :table, :column + ^^^^^^^^^^^^ `remove_index` requires downtime, use `remove_concurrent_index` instead + end + RUBY end end - context 'outside of migration' do + context 'when outside of migration' do it 'registers no offense' do - inspect_source('def change; remove_index :table, :column; end') - - expect(cop.offenses.size).to eq(0) + expect_no_offenses('def change; remove_index :table, :column; end') end end end diff --git a/spec/rubocop/cop/migration/safer_boolean_column_spec.rb b/spec/rubocop/cop/migration/safer_boolean_column_spec.rb index aa7bb58ab45..11661be2bb6 100644 --- a/spec/rubocop/cop/migration/safer_boolean_column_spec.rb +++ b/spec/rubocop/cop/migration/safer_boolean_column_spec.rb @@ -5,8 +5,6 @@ require 'rubocop' require_relative '../../../../rubocop/cop/migration/safer_boolean_column' RSpec.describe RuboCop::Cop::Migration::SaferBooleanColumn do - include CopHelper - subject(:cop) { described_class.new } context 'in migration' do @@ -31,11 +29,10 @@ RSpec.describe RuboCop::Cop::Migration::SaferBooleanColumn do sources_and_offense.each do |source, offense| context "given the source \"#{source}\"" do it "registers the offense matching \"#{offense}\"" do - inspect_source(source) - - aggregate_failures do - expect(cop.offenses.first.message).to match(offense) - end + expect_offense(<<~RUBY, node: source, msg: offense) + %{node} + ^{node} Boolean columns on the `#{table}` table %{msg}.[...] + RUBY end end end @@ -48,11 +45,7 @@ RSpec.describe RuboCop::Cop::Migration::SaferBooleanColumn do inoffensive_sources.each do |source| context "given the source \"#{source}\"" do it "registers no offense" do - inspect_source(source) - - aggregate_failures do - expect(cop.offenses).to be_empty - end + expect_no_offenses(source) end end end @@ -60,25 +53,19 @@ RSpec.describe RuboCop::Cop::Migration::SaferBooleanColumn do end it 'registers no offense for tables not listed in SMALL_TABLES' do - inspect_source("add_column :large_table, :column, :boolean") - - expect(cop.offenses).to be_empty + expect_no_offenses("add_column :large_table, :column, :boolean") end it 'registers no offense for non-boolean columns' do table = described_class::SMALL_TABLES.sample - inspect_source("add_column :#{table}, :column, :string") - - expect(cop.offenses).to be_empty + expect_no_offenses("add_column :#{table}, :column, :string") end end context 'outside of migration' do it 'registers no offense' do table = described_class::SMALL_TABLES.sample - inspect_source("add_column :#{table}, :column, :boolean") - - expect(cop.offenses).to be_empty + expect_no_offenses("add_column :#{table}, :column, :boolean") end end end diff --git a/spec/rubocop/cop/migration/schedule_async_spec.rb b/spec/rubocop/cop/migration/schedule_async_spec.rb index a7246dfa73a..008a23c7b8f 100644 --- a/spec/rubocop/cop/migration/schedule_async_spec.rb +++ b/spec/rubocop/cop/migration/schedule_async_spec.rb @@ -3,13 +3,9 @@ require 'fast_spec_helper' require 'rubocop' -require 'rubocop/rspec/support' - require_relative '../../../../rubocop/cop/migration/schedule_async' RSpec.describe RuboCop::Cop::Migration::ScheduleAsync do - include CopHelper - let(:cop) { described_class.new } let(:source) do <<~SOURCE @@ -21,9 +17,7 @@ RSpec.describe RuboCop::Cop::Migration::ScheduleAsync do shared_examples 'a disabled cop' do it 'does not register any offenses' do - inspect_source(source) - - expect(cop.offenses).to be_empty + expect_no_offenses(source) end end @@ -50,101 +44,73 @@ RSpec.describe RuboCop::Cop::Migration::ScheduleAsync do end context 'BackgroundMigrationWorker.perform_async' do - it 'adds an offence when calling `BackgroundMigrationWorker.peform_async`' do - inspect_source(source) - - expect(cop.offenses.size).to eq(1) - end - - it 'autocorrects to the right version' do - correct_source = <<~CORRECT - def up - migrate_async(ClazzName, "Bar", "Baz") - end - CORRECT + it 'adds an offense when calling `BackgroundMigrationWorker.peform_async` and corrects', :aggregate_failures do + expect_offense(<<~RUBY) + def up + BackgroundMigrationWorker.perform_async(ClazzName, "Bar", "Baz") + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Don't call [...] + end + RUBY - expect(autocorrect_source(source)).to eq(correct_source) + expect_correction(<<~RUBY) + def up + migrate_async(ClazzName, "Bar", "Baz") + end + RUBY end end context 'BackgroundMigrationWorker.perform_in' do - let(:source) do - <<~SOURCE + it 'adds an offense and corrects', :aggregate_failures do + expect_offense(<<~RUBY) def up BackgroundMigrationWorker + ^^^^^^^^^^^^^^^^^^^^^^^^^ Don't call [...] .perform_in(delay, ClazzName, "Bar", "Baz") end - SOURCE - end - - it 'adds an offence' do - inspect_source(source) + RUBY - expect(cop.offenses.size).to eq(1) - end - - it 'autocorrects to the right version' do - correct_source = <<~CORRECT + expect_correction(<<~RUBY) def up migrate_in(delay, ClazzName, "Bar", "Baz") end - CORRECT - - expect(autocorrect_source(source)).to eq(correct_source) + RUBY end end context 'BackgroundMigrationWorker.bulk_perform_async' do - let(:source) do - <<~SOURCE + it 'adds an offense and corrects', :aggregate_failures do + expect_offense(<<~RUBY) def up BackgroundMigrationWorker + ^^^^^^^^^^^^^^^^^^^^^^^^^ Don't call [...] .bulk_perform_async(jobs) end - SOURCE - end - - it 'adds an offence' do - inspect_source(source) - - expect(cop.offenses.size).to eq(1) - end + RUBY - it 'autocorrects to the right version' do - correct_source = <<~CORRECT + expect_correction(<<~RUBY) def up bulk_migrate_async(jobs) end - CORRECT - - expect(autocorrect_source(source)).to eq(correct_source) + RUBY end end context 'BackgroundMigrationWorker.bulk_perform_in' do - let(:source) do - <<~SOURCE + it 'adds an offense and corrects', :aggregate_failures do + expect_offense(<<~RUBY) def up BackgroundMigrationWorker + ^^^^^^^^^^^^^^^^^^^^^^^^^ Don't call [...] .bulk_perform_in(5.minutes, jobs) end - SOURCE - end - - it 'adds an offence' do - inspect_source(source) + RUBY - expect(cop.offenses.size).to eq(1) - end - - it 'autocorrects to the right version' do - correct_source = <<~CORRECT + expect_correction(<<~RUBY) def up bulk_migrate_in(5.minutes, jobs) end - CORRECT - - expect(autocorrect_source(source)).to eq(correct_source) + RUBY end end end diff --git a/spec/rubocop/cop/migration/timestamps_spec.rb b/spec/rubocop/cop/migration/timestamps_spec.rb index 2f4154907d2..4da3dffbfc6 100644 --- a/spec/rubocop/cop/migration/timestamps_spec.rb +++ b/spec/rubocop/cop/migration/timestamps_spec.rb @@ -5,8 +5,6 @@ require 'rubocop' require_relative '../../../../rubocop/cop/migration/timestamps' RSpec.describe RuboCop::Cop::Migration::Timestamps do - include CopHelper - subject(:cop) { described_class.new } let(:migration_with_timestamps) do @@ -62,38 +60,36 @@ RSpec.describe RuboCop::Cop::Migration::Timestamps do end it 'registers an offense when the "timestamps" method is used' do - inspect_source(migration_with_timestamps) - - aggregate_failures do - expect(cop.offenses.size).to eq(1) - expect(cop.offenses.map(&:line)).to eq([8]) - end + expect_offense(<<~RUBY) + class Users < ActiveRecord::Migration[4.2] + DOWNTIME = false + + def change + create_table :users do |t| + t.string :username, null: false + t.timestamps null: true + ^^^^^^^^^^ Do not use `timestamps`, use `timestamps_with_timezone` instead + t.string :password + end + end + end + RUBY end it 'does not register an offense when the "timestamps" method is not used' do - inspect_source(migration_without_timestamps) - - aggregate_failures do - expect(cop.offenses.size).to eq(0) - end + expect_no_offenses(migration_without_timestamps) end it 'does not register an offense when the "timestamps_with_timezone" method is used' do - inspect_source(migration_with_timestamps_with_timezone) - - aggregate_failures do - expect(cop.offenses.size).to eq(0) - end + expect_no_offenses(migration_with_timestamps_with_timezone) end end context 'outside of migration' do - it 'registers no offense' do - inspect_source(migration_with_timestamps) - inspect_source(migration_without_timestamps) - inspect_source(migration_with_timestamps_with_timezone) - - expect(cop.offenses.size).to eq(0) + it 'registers no offense', :aggregate_failures do + expect_no_offenses(migration_with_timestamps) + expect_no_offenses(migration_without_timestamps) + expect_no_offenses(migration_with_timestamps_with_timezone) end end end diff --git a/spec/rubocop/cop/migration/update_column_in_batches_spec.rb b/spec/rubocop/cop/migration/update_column_in_batches_spec.rb index 8049cba12d0..5e33e11b13f 100644 --- a/spec/rubocop/cop/migration/update_column_in_batches_spec.rb +++ b/spec/rubocop/cop/migration/update_column_in_batches_spec.rb @@ -3,8 +3,6 @@ require 'fast_spec_helper' require 'rubocop' -require 'rubocop/rspec/support' - require_relative '../../../../rubocop/cop/migration/update_column_in_batches' RSpec.describe RuboCop::Cop::Migration::UpdateColumnInBatches do @@ -31,9 +29,7 @@ RSpec.describe RuboCop::Cop::Migration::UpdateColumnInBatches do context 'outside of a migration' do it 'does not register any offenses' do - inspect_source(migration_code) - - expect(cop.offenses).to be_empty + expect_no_offenses(migration_code) end end @@ -53,14 +49,14 @@ RSpec.describe RuboCop::Cop::Migration::UpdateColumnInBatches do let(:relative_spec_filepath) { Pathname.new(spec_filepath).relative_path_from(tmp_rails_root) } it 'registers an offense when using update_column_in_batches' do - inspect_source(migration_code, @migration_file) - - aggregate_failures do - expect(cop.offenses.size).to eq(1) - expect(cop.offenses.map(&:line)).to eq([2]) - expect(cop.offenses.first.message) - .to include("`#{relative_spec_filepath}`") - end + expect_offense(<<~RUBY, @migration_file) + def up + update_column_in_batches(:projects, :name, "foo") do |table, query| + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Migration running `update_column_in_batches` [...] + query.where(table[:name].eq(nil)) + end + end + RUBY end end @@ -76,20 +72,18 @@ RSpec.describe RuboCop::Cop::Migration::UpdateColumnInBatches do end it 'does not register any offenses' do - inspect_source(migration_code, @migration_file) - - expect(cop.offenses).to be_empty + expect_no_offenses(migration_code) end end - context 'in a migration' do + context 'when in migration' do let(:migration_filepath) { File.join(tmp_rails_root, 'db', 'migrate', '20121220064453_my_super_migration.rb') } it_behaves_like 'a migration file with no spec file' it_behaves_like 'a migration file with a spec file' end - context 'in a post migration' do + context 'when in a post migration' do let(:migration_filepath) { File.join(tmp_rails_root, 'db', 'post_migrate', '20121220064453_my_super_migration.rb') } it_behaves_like 'a migration file with no spec file' @@ -99,14 +93,14 @@ RSpec.describe RuboCop::Cop::Migration::UpdateColumnInBatches do context 'EE migrations' do let(:spec_filepath) { File.join(tmp_rails_root, 'ee', 'spec', 'migrations', 'my_super_migration_spec.rb') } - context 'in a migration' do + context 'when in a migration' do let(:migration_filepath) { File.join(tmp_rails_root, 'ee', 'db', 'migrate', '20121220064453_my_super_migration.rb') } it_behaves_like 'a migration file with no spec file' it_behaves_like 'a migration file with a spec file' end - context 'in a post migration' do + context 'when in a post migration' do let(:migration_filepath) { File.join(tmp_rails_root, 'ee', 'db', 'post_migrate', '20121220064453_my_super_migration.rb') } it_behaves_like 'a migration file with no spec file' diff --git a/spec/rubocop/cop/migration/with_lock_retries_disallowed_method_spec.rb b/spec/rubocop/cop/migration/with_lock_retries_disallowed_method_spec.rb index 814d87ea24b..429490294be 100644 --- a/spec/rubocop/cop/migration/with_lock_retries_disallowed_method_spec.rb +++ b/spec/rubocop/cop/migration/with_lock_retries_disallowed_method_spec.rb @@ -5,60 +5,55 @@ require 'rubocop' require_relative '../../../../rubocop/cop/migration/with_lock_retries_disallowed_method' RSpec.describe RuboCop::Cop::Migration::WithLockRetriesDisallowedMethod do - include CopHelper - subject(:cop) { described_class.new } - context 'in migration' do + context 'when in migration' do before do allow(cop).to receive(:in_migration?).and_return(true) end it 'registers an offense when `with_lock_retries` block has disallowed method' do - inspect_source('def change; with_lock_retries { disallowed_method }; end') - - aggregate_failures do - expect(cop.offenses.size).to eq(1) - expect(cop.offenses.map(&:line)).to eq([1]) - end + expect_offense(<<~RUBY) + def change + with_lock_retries { disallowed_method } + ^^^^^^^^^^^^^^^^^ The method is not allowed [...] + end + RUBY end it 'registers an offense when `with_lock_retries` block has disallowed methods' do - source = <<~HEREDOC - def change - with_lock_retries do - disallowed_method + expect_offense(<<~RUBY) + def change + with_lock_retries do + disallowed_method + ^^^^^^^^^^^^^^^^^ The method is not allowed [...] - create_table do |t| - t.text :text - end + create_table do |t| + t.text :text + end - other_disallowed_method + other_disallowed_method + ^^^^^^^^^^^^^^^^^^^^^^^ The method is not allowed [...] - add_column :users, :name + add_column :users, :name + end end - end - HEREDOC - - inspect_source(source) - - aggregate_failures do - expect(cop.offenses.size).to eq(2) - expect(cop.offenses.map(&:line)).to eq([3, 9]) - end + RUBY end it 'registers no offense when `with_lock_retries` has only allowed method' do - inspect_source('def up; with_lock_retries { add_foreign_key :foo, :bar }; end') - - expect(cop.offenses.size).to eq(0) + expect_no_offenses(<<~RUBY) + def up + with_lock_retries { add_foreign_key :foo, :bar } + end + RUBY end describe 'for `add_foreign_key`' do it 'registers an offense when more than two FKs are added' do message = described_class::MSG_ONLY_ONE_FK_ALLOWED - expect_offense <<~RUBY + expect_offense(<<~RUBY) with_lock_retries do add_foreign_key :imports, :projects, column: :project_id, on_delete: :cascade ^^^^^^^^^^^^^^^ #{message} @@ -71,11 +66,13 @@ RSpec.describe RuboCop::Cop::Migration::WithLockRetriesDisallowedMethod do end end - context 'outside of migration' do + context 'when outside of migration' do it 'registers no offense' do - inspect_source('def change; with_lock_retries { disallowed_method }; end') - - expect(cop.offenses.size).to eq(0) + expect_no_offenses(<<~RUBY) + def change + with_lock_retries { disallowed_method } + end + RUBY end end end diff --git a/spec/rubocop/cop/migration/with_lock_retries_with_change_spec.rb b/spec/rubocop/cop/migration/with_lock_retries_with_change_spec.rb index f0be14c8ee9..cf2d46d52c9 100644 --- a/spec/rubocop/cop/migration/with_lock_retries_with_change_spec.rb +++ b/spec/rubocop/cop/migration/with_lock_retries_with_change_spec.rb @@ -5,36 +5,38 @@ require 'rubocop' require_relative '../../../../rubocop/cop/migration/with_lock_retries_with_change' RSpec.describe RuboCop::Cop::Migration::WithLockRetriesWithChange do - include CopHelper - subject(:cop) { described_class.new } - context 'in migration' do + context 'when in migration' do before do allow(cop).to receive(:in_migration?).and_return(true) end it 'registers an offense when `with_lock_retries` is used inside a `change` method' do - inspect_source('def change; with_lock_retries {}; end') - - aggregate_failures do - expect(cop.offenses.size).to eq(1) - expect(cop.offenses.map(&:line)).to eq([1]) - end + expect_offense(<<~RUBY) + def change + ^^^^^^ `with_lock_retries` cannot be used within `change` [...] + with_lock_retries {} + end + RUBY end it 'registers no offense when `with_lock_retries` is used inside an `up` method' do - inspect_source('def up; with_lock_retries {}; end') - - expect(cop.offenses.size).to eq(0) + expect_no_offenses(<<~RUBY) + def up + with_lock_retries {} + end + RUBY end end - context 'outside of migration' do + context 'when outside of migration' do it 'registers no offense' do - inspect_source('def change; with_lock_retries {}; end') - - expect(cop.offenses.size).to eq(0) + expect_no_offenses(<<~RUBY) + def change + with_lock_retries {} + end + RUBY end end end diff --git a/spec/rubocop/cop/prefer_class_methods_over_module_spec.rb b/spec/rubocop/cop/prefer_class_methods_over_module_spec.rb index dc665f9dd25..5a557ec26d6 100644 --- a/spec/rubocop/cop/prefer_class_methods_over_module_spec.rb +++ b/spec/rubocop/cop/prefer_class_methods_over_module_spec.rb @@ -2,15 +2,12 @@ require 'fast_spec_helper' require 'rubocop' -require 'rubocop/rspec/support' require_relative '../../../rubocop/cop/prefer_class_methods_over_module' RSpec.describe RuboCop::Cop::PreferClassMethodsOverModule do - include CopHelper - subject(:cop) { described_class.new } - it 'flags violation when using module ClassMethods' do + it 'flags violation when using module ClassMethods and corrects', :aggregate_failures do expect_offense(<<~RUBY) module Foo extend ActiveSupport::Concern @@ -22,6 +19,17 @@ RSpec.describe RuboCop::Cop::PreferClassMethodsOverModule do end end RUBY + + expect_correction(<<~RUBY) + module Foo + extend ActiveSupport::Concern + + class_methods do + def a_class_method + end + end + end + RUBY end it "doesn't flag violation when using class_methods" do @@ -69,30 +77,4 @@ RSpec.describe RuboCop::Cop::PreferClassMethodsOverModule do end RUBY end - - it 'autocorrects ClassMethods into class_methods' do - source = <<~RUBY - module Foo - extend ActiveSupport::Concern - - module ClassMethods - def a_class_method - end - end - end - RUBY - autocorrected = autocorrect_source(source) - - expected_source = <<~RUBY - module Foo - extend ActiveSupport::Concern - - class_methods do - def a_class_method - end - end - end - RUBY - expect(autocorrected).to eq(expected_source) - end end diff --git a/spec/rubocop/cop/put_group_routes_under_scope_spec.rb b/spec/rubocop/cop/put_group_routes_under_scope_spec.rb index 46b50d7690b..f9e64d4d68f 100644 --- a/spec/rubocop/cop/put_group_routes_under_scope_spec.rb +++ b/spec/rubocop/cop/put_group_routes_under_scope_spec.rb @@ -5,8 +5,6 @@ require 'rubocop' require_relative '../../../rubocop/cop/put_group_routes_under_scope' RSpec.describe RuboCop::Cop::PutGroupRoutesUnderScope do - include CopHelper - subject(:cop) { described_class.new } %w[resource resources get post put patch delete].each do |route_method| @@ -15,12 +13,12 @@ RSpec.describe RuboCop::Cop::PutGroupRoutesUnderScope do marker = '^' * offense.size expect_offense(<<~PATTERN) - scope(path: 'groups/*group_id/-', module: :groups) do - resource :issues - end + scope(path: 'groups/*group_id/-', module: :groups) do + resource :issues + end - #{offense} - #{marker} Put new group routes under /-/ scope + #{offense} + #{marker} Put new group routes under /-/ scope PATTERN end end diff --git a/spec/rubocop/cop/sidekiq_options_queue_spec.rb b/spec/rubocop/cop/sidekiq_options_queue_spec.rb index 306cbcf62b5..e65118ca192 100644 --- a/spec/rubocop/cop/sidekiq_options_queue_spec.rb +++ b/spec/rubocop/cop/sidekiq_options_queue_spec.rb @@ -3,28 +3,19 @@ require 'fast_spec_helper' require 'rubocop' -require 'rubocop/rspec/support' - require_relative '../../../rubocop/cop/sidekiq_options_queue' RSpec.describe RuboCop::Cop::SidekiqOptionsQueue do - include CopHelper - subject(:cop) { described_class.new } it 'registers an offense when `sidekiq_options` is used with the `queue` option' do - inspect_source('sidekiq_options queue: "some_queue"') - - aggregate_failures do - expect(cop.offenses.size).to eq(1) - expect(cop.offenses.map(&:line)).to eq([1]) - expect(cop.highlights).to eq(['queue: "some_queue"']) - end + expect_offense(<<~CODE) + sidekiq_options queue: "some_queue" + ^^^^^^^^^^^^^^^^^^^ Do not manually set a queue; `ApplicationWorker` sets one automatically. + CODE end it 'does not register an offense when `sidekiq_options` is used with another option' do - inspect_source('sidekiq_options retry: false') - - expect(cop.offenses).to be_empty + expect_no_offenses('sidekiq_options retry: false') end end diff --git a/spec/serializers/base_discussion_entity_spec.rb b/spec/serializers/base_discussion_entity_spec.rb index 5f483da4113..334e71d23f4 100644 --- a/spec/serializers/base_discussion_entity_spec.rb +++ b/spec/serializers/base_discussion_entity_spec.rb @@ -66,4 +66,13 @@ RSpec.describe BaseDiscussionEntity do ) end end + + context 'when issues are disabled in a project' do + let(:project) { create(:project, :issues_disabled) } + let(:note) { create(:discussion_note_on_merge_request, project: project) } + + it 'does not show a new issues path' do + expect(entity.as_json[:resolve_with_issue_path]).to be_nil + end + end end diff --git a/spec/services/boards/lists/list_service_spec.rb b/spec/services/boards/lists/list_service_spec.rb index dfe65f3d241..21619abf6aa 100644 --- a/spec/services/boards/lists/list_service_spec.rb +++ b/spec/services/boards/lists/list_service_spec.rb @@ -8,6 +8,26 @@ RSpec.describe Boards::Lists::ListService do describe '#execute' do let(:service) { described_class.new(parent, user) } + shared_examples 'hidden lists' do + let!(:list) { create(:list, board: board, label: label) } + + context 'when hide_backlog_list is true' do + it 'hides backlog list' do + board.update!(hide_backlog_list: true) + + expect(service.execute(board)).to match_array([board.closed_list, list]) + end + end + + context 'when hide_closed_list is true' do + it 'hides closed list' do + board.update!(hide_closed_list: true) + + expect(service.execute(board)).to match_array([board.backlog_list, list]) + end + end + end + context 'when board parent is a project' do let(:project) { create(:project) } let(:board) { create(:board, project: project) } @@ -16,6 +36,7 @@ RSpec.describe Boards::Lists::ListService do let(:parent) { project } it_behaves_like 'lists list service' + it_behaves_like 'hidden lists' end context 'when board parent is a group' do @@ -26,6 +47,7 @@ RSpec.describe Boards::Lists::ListService do let(:parent) { group } it_behaves_like 'lists list service' + it_behaves_like 'hidden lists' end end end diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb index 1005985b3e4..9fafc57a770 100644 --- a/spec/services/ci/create_pipeline_service_spec.rb +++ b/spec/services/ci/create_pipeline_service_spec.rb @@ -101,7 +101,7 @@ RSpec.describe Ci::CreatePipelineService do describe 'recording a conversion event' do it 'schedules a record conversion event worker' do - expect(Experiments::RecordConversionEventWorker).to receive(:perform_async).with(:ci_syntax_templates, user.id) + expect(Experiments::RecordConversionEventWorker).to receive(:perform_async).with(:ci_syntax_templates_b, user.id) pipeline end diff --git a/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb b/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb index 90956e7b4ea..98963f57341 100644 --- a/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb +++ b/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb @@ -39,6 +39,8 @@ RSpec.describe Clusters::Kubernetes::CreateOrUpdateNamespaceService, '#execute' stub_kubeclient_put_role_binding(api_url, Clusters::Kubernetes::GITLAB_KNATIVE_SERVING_ROLE_BINDING_NAME, namespace: namespace) stub_kubeclient_put_role(api_url, Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_NAME, namespace: namespace) stub_kubeclient_put_role_binding(api_url, Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_BINDING_NAME, namespace: namespace) + stub_kubeclient_put_role(api_url, Clusters::Kubernetes::GITLAB_CILIUM_ROLE_NAME, namespace: namespace) + stub_kubeclient_put_role_binding(api_url, Clusters::Kubernetes::GITLAB_CILIUM_ROLE_BINDING_NAME, namespace: namespace) stub_kubeclient_get_secret( api_url, diff --git a/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb b/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb index a4f018aec0c..11045dfe950 100644 --- a/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb +++ b/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb @@ -147,6 +147,8 @@ RSpec.describe Clusters::Kubernetes::CreateOrUpdateServiceAccountService do stub_kubeclient_put_role_binding(api_url, Clusters::Kubernetes::GITLAB_KNATIVE_SERVING_ROLE_BINDING_NAME, namespace: namespace) stub_kubeclient_put_role(api_url, Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_NAME, namespace: namespace) stub_kubeclient_put_role_binding(api_url, Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_BINDING_NAME, namespace: namespace) + stub_kubeclient_put_role(api_url, Clusters::Kubernetes::GITLAB_CILIUM_ROLE_NAME, namespace: namespace) + stub_kubeclient_put_role_binding(api_url, Clusters::Kubernetes::GITLAB_CILIUM_ROLE_BINDING_NAME, namespace: namespace) end it 'creates a namespace object' do @@ -243,6 +245,47 @@ RSpec.describe Clusters::Kubernetes::CreateOrUpdateServiceAccountService do ) ) end + + it 'creates a role granting cilium permissions to the service account' do + subject + + expect(WebMock).to have_requested(:put, api_url + "/apis/rbac.authorization.k8s.io/v1/namespaces/#{namespace}/roles/#{Clusters::Kubernetes::GITLAB_CILIUM_ROLE_NAME}").with( + body: hash_including( + metadata: { + name: Clusters::Kubernetes::GITLAB_CILIUM_ROLE_NAME, + namespace: namespace + }, + rules: [{ + apiGroups: %w(cilium.io), + resources: %w(ciliumnetworkpolicies), + verbs: %w(get list create update patch) + }] + ) + ) + end + + it 'creates a role binding granting cilium permissions to the service account' do + subject + + expect(WebMock).to have_requested(:put, api_url + "/apis/rbac.authorization.k8s.io/v1/namespaces/#{namespace}/rolebindings/#{Clusters::Kubernetes::GITLAB_CILIUM_ROLE_BINDING_NAME}").with( + body: hash_including( + metadata: { + name: Clusters::Kubernetes::GITLAB_CILIUM_ROLE_BINDING_NAME, + namespace: namespace + }, + roleRef: { + apiGroup: 'rbac.authorization.k8s.io', + kind: 'Role', + name: Clusters::Kubernetes::GITLAB_CILIUM_ROLE_NAME + }, + subjects: [{ + kind: 'ServiceAccount', + name: service_account_name, + namespace: namespace + }] + ) + ) + end end end end diff --git a/spec/services/groups/destroy_service_spec.rb b/spec/services/groups/destroy_service_spec.rb index 2f9bb72939a..a5fce315d91 100644 --- a/spec/services/groups/destroy_service_spec.rb +++ b/spec/services/groups/destroy_service_spec.rb @@ -229,10 +229,10 @@ RSpec.describe Groups::DestroyService do # will still be executed for the nested group as they fall under the same hierarchy # and hence we need to account for this scenario. expect(UserProjectAccessChangedService) - .to receive(:new).with(shared_with_group.user_ids_for_project_authorizations).and_call_original + .to receive(:new).with(shared_with_group.users_ids_of_direct_members).and_call_original expect(UserProjectAccessChangedService) - .not_to receive(:new).with(shared_group.user_ids_for_project_authorizations) + .not_to receive(:new).with(shared_group.users_ids_of_direct_members) destroy_group(shared_group, user, false) end @@ -246,7 +246,7 @@ RSpec.describe Groups::DestroyService do it 'makes use of a specific service to update project authorizations' do expect(UserProjectAccessChangedService) - .to receive(:new).with(shared_with_group.user_ids_for_project_authorizations).and_call_original + .to receive(:new).with(shared_with_group.users_ids_of_direct_members).and_call_original destroy_group(shared_with_group, user, false) end diff --git a/spec/services/groups/group_links/create_service_spec.rb b/spec/services/groups/group_links/create_service_spec.rb index fb88433d8f6..df994b9f2a3 100644 --- a/spec/services/groups/group_links/create_service_spec.rb +++ b/spec/services/groups/group_links/create_service_spec.rb @@ -74,46 +74,56 @@ RSpec.describe Groups::GroupLinks::CreateService, '#execute' do end end - context 'group hierarchies' do + context 'project authorizations based on group hierarchies' do before do group_parent.add_owner(parent_group_user) group.add_owner(group_user) group_child.add_owner(child_group_user) end - context 'group user' do - let(:user) { group_user } + context 'project authorizations refresh' do + it 'is executed only for the direct members of the group' do + expect(UserProjectAccessChangedService).to receive(:new).with(contain_exactly(group_user.id)).and_call_original - it 'create proper authorizations' do subject.execute(shared_group) - - expect(Ability.allowed?(user, :read_project, project_parent)).to be_falsey - expect(Ability.allowed?(user, :read_project, project)).to be_truthy - expect(Ability.allowed?(user, :read_project, project_child)).to be_truthy end end - context 'parent group user' do - let(:user) { parent_group_user } + context 'project authorizations' do + context 'group user' do + let(:user) { group_user } - it 'create proper authorizations' do - subject.execute(shared_group) + it 'create proper authorizations' do + subject.execute(shared_group) - expect(Ability.allowed?(user, :read_project, project_parent)).to be_falsey - expect(Ability.allowed?(user, :read_project, project)).to be_falsey - expect(Ability.allowed?(user, :read_project, project_child)).to be_falsey + expect(Ability.allowed?(user, :read_project, project_parent)).to be_falsey + expect(Ability.allowed?(user, :read_project, project)).to be_truthy + expect(Ability.allowed?(user, :read_project, project_child)).to be_truthy + end end - end - context 'child group user' do - let(:user) { child_group_user } + context 'parent group user' do + let(:user) { parent_group_user } - it 'create proper authorizations' do - subject.execute(shared_group) + it 'create proper authorizations' do + subject.execute(shared_group) + + expect(Ability.allowed?(user, :read_project, project_parent)).to be_falsey + expect(Ability.allowed?(user, :read_project, project)).to be_falsey + expect(Ability.allowed?(user, :read_project, project_child)).to be_falsey + end + end + + context 'child group user' do + let(:user) { child_group_user } + + it 'create proper authorizations' do + subject.execute(shared_group) - expect(Ability.allowed?(user, :read_project, project_parent)).to be_falsey - expect(Ability.allowed?(user, :read_project, project)).to be_falsey - expect(Ability.allowed?(user, :read_project, project_child)).to be_falsey + expect(Ability.allowed?(user, :read_project, project_parent)).to be_falsey + expect(Ability.allowed?(user, :read_project, project)).to be_falsey + expect(Ability.allowed?(user, :read_project, project_child)).to be_falsey + end end end end diff --git a/spec/services/groups/group_links/destroy_service_spec.rb b/spec/services/groups/group_links/destroy_service_spec.rb index 22fe8a1d58b..97fe23e9147 100644 --- a/spec/services/groups/group_links/destroy_service_spec.rb +++ b/spec/services/groups/group_links/destroy_service_spec.rb @@ -47,8 +47,8 @@ RSpec.describe Groups::GroupLinks::DestroyService, '#execute' do it 'updates project authorization once per group' do expect(GroupGroupLink).to receive(:delete).and_call_original - expect(group).to receive(:refresh_members_authorized_projects).once - expect(another_group).to receive(:refresh_members_authorized_projects).once + expect(group).to receive(:refresh_members_authorized_projects).with(direct_members_only: true).once + expect(another_group).to receive(:refresh_members_authorized_projects).with(direct_members_only: true).once subject.execute(links) end diff --git a/spec/services/groups/group_links/update_service_spec.rb b/spec/services/groups/group_links/update_service_spec.rb index e4ff83d7926..436cdf89a0f 100644 --- a/spec/services/groups/group_links/update_service_spec.rb +++ b/spec/services/groups/group_links/update_service_spec.rb @@ -8,7 +8,7 @@ RSpec.describe Groups::GroupLinks::UpdateService, '#execute' do let_it_be(:group) { create(:group, :private) } let_it_be(:shared_group) { create(:group, :private) } let_it_be(:project) { create(:project, group: shared_group) } - let(:group_member) { create(:user) } + let(:group_member_user) { create(:user) } let!(:link) { create(:group_group_link, shared_group: shared_group, shared_with_group: group) } let(:expiry_date) { 1.month.from_now.to_date } @@ -20,7 +20,7 @@ RSpec.describe Groups::GroupLinks::UpdateService, '#execute' do subject { described_class.new(link).execute(group_link_params) } before do - group.add_developer(group_member) + group.add_developer(group_member_user) end it 'updates existing link' do @@ -36,11 +36,11 @@ RSpec.describe Groups::GroupLinks::UpdateService, '#execute' do end it 'updates project permissions' do - expect { subject }.to change { group_member.can?(:create_release, project) }.from(true).to(false) + expect { subject }.to change { group_member_user.can?(:create_release, project) }.from(true).to(false) end it 'executes UserProjectAccessChangedService' do - expect_next_instance_of(UserProjectAccessChangedService) do |service| + expect_next_instance_of(UserProjectAccessChangedService, [group_member_user.id]) do |service| expect(service).to receive(:execute) end diff --git a/spec/services/issuable/process_assignees_spec.rb b/spec/services/issuable/process_assignees_spec.rb new file mode 100644 index 00000000000..876c84957cc --- /dev/null +++ b/spec/services/issuable/process_assignees_spec.rb @@ -0,0 +1,71 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Issuable::ProcessAssignees do + describe '#execute' do + it 'returns assignee_ids when assignee_ids are specified' do + process = Issuable::ProcessAssignees.new(assignee_ids: %w(5 7 9), + add_assignee_ids: %w(2 4 6), + remove_assignee_ids: %w(4 7 11), + existing_assignee_ids: %w(1 3 9), + extra_assignee_ids: %w(2 5 12)) + result = process.execute + + expect(result.sort).to eq(%w(5 7 9).sort) + end + + it 'combines other ids when assignee_ids is empty' do + process = Issuable::ProcessAssignees.new(assignee_ids: [], + add_assignee_ids: %w(2 4 6), + remove_assignee_ids: %w(4 7 11), + existing_assignee_ids: %w(1 3 11), + extra_assignee_ids: %w(2 5 12)) + result = process.execute + + expect(result.sort).to eq(%w(1 2 3 5 6 12).sort) + end + + it 'combines other ids when assignee_ids is nil' do + process = Issuable::ProcessAssignees.new(assignee_ids: nil, + add_assignee_ids: %w(2 4 6), + remove_assignee_ids: %w(4 7 11), + existing_assignee_ids: %w(1 3 11), + extra_assignee_ids: %w(2 5 12)) + result = process.execute + + expect(result.sort).to eq(%w(1 2 3 5 6 12).sort) + end + + it 'combines other ids when assignee_ids and add_assignee_ids are nil' do + process = Issuable::ProcessAssignees.new(assignee_ids: nil, + add_assignee_ids: nil, + remove_assignee_ids: %w(4 7 11), + existing_assignee_ids: %w(1 3 11), + extra_assignee_ids: %w(2 5 12)) + result = process.execute + + expect(result.sort).to eq(%w(1 2 3 5 12).sort) + end + + it 'combines other ids when assignee_ids and remove_assignee_ids are nil' do + process = Issuable::ProcessAssignees.new(assignee_ids: nil, + add_assignee_ids: %w(2 4 6), + remove_assignee_ids: nil, + existing_assignee_ids: %w(1 3 11), + extra_assignee_ids: %w(2 5 12)) + result = process.execute + + expect(result.sort).to eq(%w(1 2 4 3 5 6 11 12).sort) + end + + it 'combines ids when only add_assignee_ids and remove_assignee_ids are passed' do + process = Issuable::ProcessAssignees.new(assignee_ids: nil, + add_assignee_ids: %w(2 4 6), + remove_assignee_ids: %w(4 7 11)) + result = process.execute + + expect(result.sort).to eq(%w(2 6).sort) + end + end +end diff --git a/spec/services/issues/export_csv_service_spec.rb b/spec/services/issues/export_csv_service_spec.rb index fd1bcf82ccd..d199f825276 100644 --- a/spec/services/issues/export_csv_service_spec.rb +++ b/spec/services/issues/export_csv_service_spec.rb @@ -4,11 +4,11 @@ require 'spec_helper' RSpec.describe Issues::ExportCsvService do let_it_be(:user) { create(:user) } - let(:group) { create(:group) } - let(:project) { create(:project, :public, group: group) } - let!(:issue) { create(:issue, project: project, author: user) } - let!(:bad_issue) { create(:issue, project: project, author: user) } - let(:subject) { described_class.new(Issue.all, project) } + let_it_be(:group) { create(:group) } + let_it_be(:project) { create(:project, :public, group: group) } + let_it_be(:issue) { create(:issue, project: project, author: user) } + let_it_be(:bad_issue) { create(:issue, project: project, author: user) } + subject { described_class.new(Issue.all, project) } it 'renders csv to string' do expect(subject.csv_data).to be_a String @@ -33,11 +33,11 @@ RSpec.describe Issues::ExportCsvService do end context 'includes' do - let(:milestone) { create(:milestone, title: 'v1.0', project: project) } - let(:idea_label) { create(:label, project: project, title: 'Idea') } - let(:feature_label) { create(:label, project: project, title: 'Feature') } + let_it_be(:milestone) { create(:milestone, title: 'v1.0', project: project) } + let_it_be(:idea_label) { create(:label, project: project, title: 'Idea') } + let_it_be(:feature_label) { create(:label, project: project, title: 'Feature') } - before do + before_all do # Creating a timelog touches the updated_at timestamp of issue, # so create these first. issue.timelogs.create!(time_spent: 360, user: user) @@ -60,6 +60,10 @@ RSpec.describe Issues::ExportCsvService do expect(csv.headers).to include('Title', 'Description') end + it 'returns two issues' do + expect(csv.count).to eq(2) + end + specify 'iid' do expect(csv[0]['Issue ID']).to eq issue.iid.to_s end @@ -150,7 +154,7 @@ RSpec.describe Issues::ExportCsvService do end context 'with issues filtered by labels and project' do - let(:subject) do + subject do described_class.new( IssuesFinder.new(user, project_id: project.id, @@ -162,6 +166,27 @@ RSpec.describe Issues::ExportCsvService do expect(csv[0]['Issue ID']).to eq issue.iid.to_s end end + + context 'with label links' do + let(:labeled_issues) { create_list(:labeled_issue, 2, project: project, author: user, labels: [feature_label, idea_label]) } + + it 'does not run a query for each label link' do + control_count = ActiveRecord::QueryRecorder.new { csv }.count + + labeled_issues + + expect { csv }.not_to exceed_query_limit(control_count) + expect(csv.count).to eq(4) + end + + it 'returns the labels in sorted order' do + labeled_issues + + labeled_rows = csv.select { |entry| labeled_issues.map(&:iid).include?(entry['Issue ID'].to_i) } + expect(labeled_rows.count).to eq(2) + expect(labeled_rows.map { |entry| entry['Labels'] }).to all( eq("Feature,Idea") ) + end + end end context 'with minimal details' do diff --git a/spec/services/merge_requests/merge_service_spec.rb b/spec/services/merge_requests/merge_service_spec.rb index 611f12c8146..80de453cd8b 100644 --- a/spec/services/merge_requests/merge_service_spec.rb +++ b/spec/services/merge_requests/merge_service_spec.rb @@ -166,20 +166,6 @@ RSpec.describe MergeRequests::MergeService do service.execute(merge_request) end - context 'when jira_issue_transition_id is not present' do - before do - allow_any_instance_of(JIRA::Resource::Issue).to receive(:resolution).and_return(nil) - end - - it 'does not close issue' do - jira_tracker.update!(jira_issue_transition_id: nil) - - expect_any_instance_of(JiraService).not_to receive(:transition_issue) - - service.execute(merge_request) - end - end - context 'wrong issue markdown' do it 'does not close issues on Jira issue tracker' do jira_issue = ExternalIssue.new('#JIRA-123', project) diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb index 747ecbf4fa4..eeb782b573e 100644 --- a/spec/services/merge_requests/refresh_service_spec.rb +++ b/spec/services/merge_requests/refresh_service_spec.rb @@ -72,6 +72,21 @@ RSpec.describe MergeRequests::RefreshService do allow(NotificationService).to receive(:new) { notification_service } end + context 'query count' do + it 'does not execute a lot of queries' do + # Hardcoded the query limit since the queries can also be reduced even + # if there are the same number of merge requests (e.g. by preloading + # associations). This should also fail in case additional queries are + # added elsewhere that affected this service. + # + # The limit is based on the number of queries executed at the current + # state of the service. As we reduce the number of queries executed in + # this service, the limit should be reduced as well. + expect { refresh_service.execute(@oldrev, @newrev, 'refs/heads/master') } + .not_to exceed_query_limit(260) + end + end + it 'executes hooks with update action' do refresh_service.execute(@oldrev, @newrev, 'refs/heads/master') reload_mrs diff --git a/spec/services/namespaces/in_product_marketing_emails_service_spec.rb b/spec/services/namespaces/in_product_marketing_emails_service_spec.rb index 7346a5b95ae..cb2e8aff8a1 100644 --- a/spec/services/namespaces/in_product_marketing_emails_service_spec.rb +++ b/spec/services/namespaces/in_product_marketing_emails_service_spec.rb @@ -35,7 +35,7 @@ RSpec.describe Namespaces::InProductMarketingEmailsService, '#execute' do end end - context 'for each track and series with the right conditions' do + context 'for each track and series with the right conditions', :quarantine do # https://gitlab.com/gitlab-org/gitlab/-/issues/322092 using RSpec::Parameterized::TableSyntax where(:track, :interval, :actions_completed) do diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb index b67c37ba02d..ed9ff45ef1a 100644 --- a/spec/services/notification_service_spec.rb +++ b/spec/services/notification_service_spec.rb @@ -2159,8 +2159,38 @@ RSpec.describe NotificationService, :mailer do end describe '#merge_when_pipeline_succeeds' do + before do + update_custom_notification(:merge_when_pipeline_succeeds, @u_guest_custom, resource: project) + update_custom_notification(:merge_when_pipeline_succeeds, @u_custom_global) + end + it 'send notification that merge will happen when pipeline succeeds' do notification.merge_when_pipeline_succeeds(merge_request, assignee) + + should_email(merge_request.author) + should_email(@u_watcher) + should_email(@subscriber) + should_email(@u_guest_custom) + should_email(@u_custom_global) + should_not_email(@unsubscriber) + should_not_email(@u_disabled) + end + + it 'does not send notification if the custom event is disabled' do + update_custom_notification(:merge_when_pipeline_succeeds, @u_guest_custom, resource: project, value: false) + update_custom_notification(:merge_when_pipeline_succeeds, @u_custom_global, resource: nil, value: false) + notification.merge_when_pipeline_succeeds(merge_request, assignee) + + should_not_email(@u_guest_custom) + should_not_email(@u_custom_global) + end + + it 'sends notification to participants even if the custom event is disabled' do + update_custom_notification(:merge_when_pipeline_succeeds, merge_request.author, resource: project, value: false) + update_custom_notification(:merge_when_pipeline_succeeds, @u_watcher, resource: project, value: false) + update_custom_notification(:merge_when_pipeline_succeeds, @subscriber, resource: project, value: false) + notification.merge_when_pipeline_succeeds(merge_request, assignee) + should_email(merge_request.author) should_email(@u_watcher) should_email(@subscriber) diff --git a/spec/services/packages/create_temporary_package_service_spec.rb b/spec/services/packages/create_temporary_package_service_spec.rb new file mode 100644 index 00000000000..4b8d37401d8 --- /dev/null +++ b/spec/services/packages/create_temporary_package_service_spec.rb @@ -0,0 +1,44 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Packages::CreateTemporaryPackageService do + let_it_be(:project) { create(:project) } + let_it_be(:user) { create(:user) } + let_it_be(:params) { {} } + let_it_be(:package_name) { 'my-package' } + let_it_be(:package_type) { 'rubygems' } + + describe '#execute' do + subject { described_class.new(project, user, params).execute(package_type, name: package_name) } + + let(:package) { Packages::Package.last } + + it 'creates the package', :aggregate_failures do + expect { subject }.to change { Packages::Package.count }.by(1) + + expect(package).to be_valid + expect(package).to be_processing + expect(package.name).to eq(package_name) + expect(package.version).to start_with(described_class::PACKAGE_VERSION) + expect(package.package_type).to eq(package_type) + end + + it 'can create two packages in a row', :aggregate_failures do + expect { subject }.to change { Packages::Package.count }.by(1) + + expect do + described_class.new(project, user, params).execute(package_type, name: package_name) + end.to change { Packages::Package.count }.by(1) + + expect(package).to be_valid + expect(package).to be_processing + expect(package.name).to eq(package_name) + expect(package.version).to start_with(described_class::PACKAGE_VERSION) + expect(package.package_type).to eq(package_type) + end + + it_behaves_like 'assigns the package creator' + it_behaves_like 'assigns build to package' + end +end diff --git a/spec/services/packages/npm/create_package_service_spec.rb b/spec/services/packages/npm/create_package_service_spec.rb index 10fce6c1651..ba5729eaf59 100644 --- a/spec/services/packages/npm/create_package_service_spec.rb +++ b/spec/services/packages/npm/create_package_service_spec.rb @@ -15,7 +15,7 @@ RSpec.describe Packages::Npm::CreatePackageService do end let(:override) { {} } - let(:package_name) { "@#{namespace.path}/my-app".freeze } + let(:package_name) { "@#{namespace.path}/my-app" } subject { described_class.new(project, user, params).execute } @@ -42,29 +42,35 @@ RSpec.describe Packages::Npm::CreatePackageService do it { expect(subject.name).to eq(package_name) } it { expect(subject.version).to eq(version) } + + context 'with build info' do + let(:job) { create(:ci_build, user: user) } + let(:params) { super().merge(build: job) } + + it_behaves_like 'assigns build to package' + it_behaves_like 'assigns status to package' + + it 'creates a package file build info' do + expect { subject }.to change { Packages::PackageFileBuildInfo.count }.by(1) + end + end end describe '#execute' do context 'scoped package' do it_behaves_like 'valid package' + end - context 'with build info' do - let(:job) { create(:ci_build, user: user) } - let(:params) { super().merge(build: job) } - - it_behaves_like 'assigns build to package' - it_behaves_like 'assigns status to package' + context 'scoped package not following the naming convention' do + let(:package_name) { '@any-scope/package' } - it 'creates a package file build info' do - expect { subject }.to change { Packages::PackageFileBuildInfo.count }.by(1) - end - end + it_behaves_like 'valid package' end - context 'invalid package name' do - let(:package_name) { "@#{namespace.path}/my-group/my-app".freeze } + context 'unscoped package' do + let(:package_name) { 'unscoped-package' } - it { expect { subject }.to raise_error(ActiveRecord::RecordInvalid) } + it_behaves_like 'valid package' end context 'package already exists' do @@ -84,11 +90,18 @@ RSpec.describe Packages::Npm::CreatePackageService do it { expect(subject[:message]).to be 'File is too large.' } end - context 'with incorrect namespace' do - let(:package_name) { '@my_other_namespace/my-app' } - - it 'raises a RecordInvalid error' do - expect { subject }.to raise_error(ActiveRecord::RecordInvalid) + [ + '@inv@lid_scope/package', + '@scope/sub/group', + '@scope/../../package', + '@scope%2e%2e%2fpackage' + ].each do |invalid_package_name| + context "with invalid name #{invalid_package_name}" do + let(:package_name) { invalid_package_name } + + it 'raises a RecordInvalid error' do + expect { subject }.to raise_error(ActiveRecord::RecordInvalid) + end end end diff --git a/spec/services/packages/nuget/create_package_service_spec.rb b/spec/services/packages/nuget/create_package_service_spec.rb deleted file mode 100644 index e338ac36fc3..00000000000 --- a/spec/services/packages/nuget/create_package_service_spec.rb +++ /dev/null @@ -1,37 +0,0 @@ -# frozen_string_literal: true -require 'spec_helper' - -RSpec.describe Packages::Nuget::CreatePackageService do - let_it_be(:project) { create(:project) } - let_it_be(:user) { create(:user) } - let_it_be(:params) { {} } - - describe '#execute' do - subject { described_class.new(project, user, params).execute } - - let(:package) { Packages::Package.last } - - it 'creates the package' do - expect { subject }.to change { Packages::Package.count }.by(1) - - expect(package).to be_valid - expect(package.name).to eq(Packages::Nuget::CreatePackageService::TEMPORARY_PACKAGE_NAME) - expect(package.version).to start_with(Packages::Nuget::CreatePackageService::PACKAGE_VERSION) - expect(package.package_type).to eq('nuget') - end - - it 'can create two packages in a row' do - expect { subject }.to change { Packages::Package.count }.by(1) - expect { described_class.new(project, user, params).execute }.to change { Packages::Package.count }.by(1) - - expect(package).to be_valid - expect(package.name).to eq(Packages::Nuget::CreatePackageService::TEMPORARY_PACKAGE_NAME) - expect(package.version).to start_with(Packages::Nuget::CreatePackageService::PACKAGE_VERSION) - expect(package.package_type).to eq('nuget') - end - - it_behaves_like 'assigns the package creator' - it_behaves_like 'assigns build to package' - it_behaves_like 'assigns status to package' - end -end diff --git a/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb b/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb index 92b493ed376..c1cce46a54c 100644 --- a/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb +++ b/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb @@ -5,7 +5,7 @@ require 'spec_helper' RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_redis_shared_state do include ExclusiveLeaseHelpers - let(:package) { create(:nuget_package) } + let(:package) { create(:nuget_package, :processing) } let(:package_file) { package.package_files.first } let(:service) { described_class.new(package_file) } let(:package_name) { 'DummyProject.DummyPackage' } @@ -60,6 +60,7 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_ .to change { ::Packages::Package.count }.by(0) .and change { Packages::DependencyLink.count }.by(0) expect(package_file.reload.file_name).not_to eq(package_file_name) + expect(package_file.package).to be_processing expect(package_file.package.reload.name).not_to eq(package_name) expect(package_file.package.version).not_to eq(package_version) end @@ -78,6 +79,7 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_ expect(package.reload.name).to eq(package_name) expect(package.version).to eq(package_version) + expect(package).to be_default expect(package_file.reload.file_name).to eq(package_file_name) # hard reset needed to properly reload package_file.file expect(Packages::PackageFile.find(package_file.id).file.size).not_to eq 0 @@ -184,6 +186,7 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_ expect(package.reload.name).to eq(package_name) expect(package.version).to eq(package_version) + expect(package).to be_default expect(package_file.reload.file_name).to eq(package_file_name) # hard reset needed to properly reload package_file.file expect(Packages::PackageFile.find(package_file.id).file.size).not_to eq 0 diff --git a/spec/services/pages/legacy_storage_lease_spec.rb b/spec/services/pages/legacy_storage_lease_spec.rb index c022da6f47f..092dce093ff 100644 --- a/spec/services/pages/legacy_storage_lease_spec.rb +++ b/spec/services/pages/legacy_storage_lease_spec.rb @@ -47,14 +47,6 @@ RSpec.describe ::Pages::LegacyStorageLease do expect(service.execute).to eq(nil) end - - it 'runs guarded method if feature flag is disabled' do - stub_feature_flags(pages_use_legacy_storage_lease: false) - - expect(service).to receive(:execute_unsafe).and_call_original - - expect(service.execute).to eq(true) - end end context 'when another service holds the lease for the different project' do diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb index f7da6f75141..306d87eefb8 100644 --- a/spec/services/projects/create_service_spec.rb +++ b/spec/services/projects/create_service_spec.rb @@ -349,27 +349,38 @@ RSpec.describe Projects::CreateService, '#execute' do context 'default visibility level' do let(:group) { create(:group, :private) } - before do - stub_application_setting(default_project_visibility: Gitlab::VisibilityLevel::INTERNAL) - group.add_developer(user) + using RSpec::Parameterized::TableSyntax - opts.merge!( - visibility: 'private', - name: 'test', - namespace: group, - path: 'foo' - ) + where(:case_name, :group_level, :project_level) do + [ + ['in public group', Gitlab::VisibilityLevel::PUBLIC, Gitlab::VisibilityLevel::INTERNAL], + ['in internal group', Gitlab::VisibilityLevel::INTERNAL, Gitlab::VisibilityLevel::INTERNAL], + ['in private group', Gitlab::VisibilityLevel::PRIVATE, Gitlab::VisibilityLevel::PRIVATE] + ] end - it 'creates a private project' do - project = create_project(user, opts) + with_them do + before do + stub_application_setting(default_project_visibility: Gitlab::VisibilityLevel::INTERNAL) + group.add_developer(user) + group.update!(visibility_level: group_level) - expect(project).to respond_to(:errors) + opts.merge!( + name: 'test', + namespace: group, + path: 'foo' + ) + end - expect(project.errors.any?).to be(false) - expect(project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE) - expect(project.saved?).to be(true) - expect(project.valid?).to be(true) + it 'creates project with correct visibility level', :aggregate_failures do + project = create_project(user, opts) + + expect(project).to respond_to(:errors) + expect(project.errors).to be_blank + expect(project.visibility_level).to eq(project_level) + expect(project).to be_saved + expect(project).to be_valid + end end end diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index 64c1479a412..2c745a0a70c 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -249,9 +249,6 @@ RSpec.configure do |config| unstub_all_feature_flags end - # Enable Marginalia feature for all specs in the test suite. - Gitlab::Marginalia.enabled = true - # Stub these calls due to being expensive operations # It can be reenabled for specific tests via: # diff --git a/spec/support/shared_contexts/project_service_jira_context.rb b/spec/support/shared_contexts/project_service_jira_context.rb index 8e01de70846..54bb9fd108e 100644 --- a/spec/support/shared_contexts/project_service_jira_context.rb +++ b/spec/support/shared_contexts/project_service_jira_context.rb @@ -1,8 +1,8 @@ # frozen_string_literal: true RSpec.shared_context 'project service Jira context' do - let(:url) { 'http://jira.example.com' } - let(:test_url) { 'http://jira.example.com/rest/api/2/serverInfo' } + let(:url) { 'https://jira.example.com' } + let(:test_url) { 'https://jira.example.com/rest/api/2/serverInfo' } def fill_form(disable: false) click_active_checkbox if disable @@ -10,6 +10,5 @@ RSpec.shared_context 'project service Jira context' do fill_in 'service_url', with: url fill_in 'service_username', with: 'username' fill_in 'service_password', with: 'password' - fill_in 'service_jira_issue_transition_id', with: '25' end end diff --git a/spec/support/shared_contexts/project_service_shared_context.rb b/spec/support/shared_contexts/project_service_shared_context.rb index b4b9ab456e0..a8e75c624e8 100644 --- a/spec/support/shared_contexts/project_service_shared_context.rb +++ b/spec/support/shared_contexts/project_service_shared_context.rb @@ -15,7 +15,10 @@ RSpec.shared_context 'project service activation' do def visit_project_integration(name) visit_project_integrations - click_link(name) + + within('#content-body') do + click_link(name) + end end def click_active_checkbox diff --git a/spec/support/shared_contexts/requests/api/npm_packages_shared_context.rb b/spec/support/shared_contexts/requests/api/npm_packages_shared_context.rb index 60a29d78084..815108be447 100644 --- a/spec/support/shared_contexts/requests/api/npm_packages_shared_context.rb +++ b/spec/support/shared_contexts/requests/api/npm_packages_shared_context.rb @@ -5,8 +5,9 @@ RSpec.shared_context 'npm api setup' do include HttpBasicAuthHelpers let_it_be(:user, reload: true) { create(:user) } - let_it_be(:group) { create(:group) } - let_it_be(:project, reload: true) { create(:project, :public, namespace: group) } + let_it_be(:group) { create(:group, name: 'test-group') } + let_it_be(:namespace) { group } + let_it_be(:project, reload: true) { create(:project, :public, namespace: namespace) } let_it_be(:package, reload: true) { create(:npm_package, project: project, name: "@#{group.path}/scoped_package") } let_it_be(:token) { create(:oauth_access_token, scopes: 'api', resource_owner: user) } let_it_be(:personal_access_token) { create(:personal_access_token, user: user) } @@ -22,6 +23,10 @@ RSpec.shared_context 'set package name from package name type' do case package_name_type when :scoped_naming_convention "@#{group.path}/scoped-package" + when :scoped_no_naming_convention + '@any-scope/scoped-package' + when :unscoped + 'unscoped-package' when :non_existing 'non-existing-package' end diff --git a/spec/support/shared_contexts/security_and_compliance_permissions_shared_context.rb b/spec/support/shared_contexts/security_and_compliance_permissions_shared_context.rb new file mode 100644 index 00000000000..dc5195e4b01 --- /dev/null +++ b/spec/support/shared_contexts/security_and_compliance_permissions_shared_context.rb @@ -0,0 +1,33 @@ +# frozen_string_literal: true + +RSpec.shared_context '"Security & Compliance" permissions' do + let(:project_instance) { an_instance_of(Project) } + let(:user_instance) { an_instance_of(User) } + let(:before_request_defined) { false } + let(:valid_request) {} + + def self.before_request(&block) + return unless block + + let(:before_request_call) { instance_exec(&block) } + let(:before_request_defined) { true } + end + + before do + allow(Ability).to receive(:allowed?).and_call_original + allow(Ability).to receive(:allowed?).with(user_instance, :access_security_and_compliance, project_instance).and_return(true) + end + + context 'when the "Security & Compliance" feature is disabled' do + subject { response } + + before do + before_request_call if before_request_defined + + allow(Ability).to receive(:allowed?).with(user_instance, :access_security_and_compliance, project_instance).and_return(false) + valid_request + end + + it { is_expected.to have_gitlab_http_status(:not_found) } + end +end diff --git a/spec/support/shared_examples/controllers/create_notes_rate_limit_shared_examples.rb b/spec/support/shared_examples/controllers/create_notes_rate_limit_shared_examples.rb new file mode 100644 index 00000000000..74a98c20383 --- /dev/null +++ b/spec/support/shared_examples/controllers/create_notes_rate_limit_shared_examples.rb @@ -0,0 +1,44 @@ +# frozen_string_literal: true +# +# Requires a context containing: +# - user +# - params +# - request_full_path + +RSpec.shared_examples 'request exceeding rate limit' do + before do + stub_application_setting(notes_create_limit: 2) + 2.times { post :create, params: params } + end + + it 'prevents from creating more notes', :request_store do + expect { post :create, params: params } + .to change { Note.count }.by(0) + + expect(response).to have_gitlab_http_status(:too_many_requests) + expect(response.body).to eq(_('This endpoint has been requested too many times. Try again later.')) + end + + it 'logs the event in auth.log' do + attributes = { + message: 'Application_Rate_Limiter_Request', + env: :notes_create_request_limit, + remote_ip: '0.0.0.0', + request_method: 'POST', + path: request_full_path, + user_id: user.id, + username: user.username + } + + expect(Gitlab::AuthLogger).to receive(:error).with(attributes).once + post :create, params: params + end + + it 'allows user in allow-list to create notes, even if the case is different' do + user.update_attribute(:username, user.username.titleize) + stub_application_setting(notes_create_limit_allowlist: ["#{user.username.downcase}"]) + + post :create, params: params + expect(response).to have_gitlab_http_status(:found) + end +end diff --git a/spec/support/shared_examples/features/project_upload_files_shared_examples.rb b/spec/support/shared_examples/features/project_upload_files_shared_examples.rb index 25203fa3182..4411c91d479 100644 --- a/spec/support/shared_examples/features/project_upload_files_shared_examples.rb +++ b/spec/support/shared_examples/features/project_upload_files_shared_examples.rb @@ -3,7 +3,13 @@ RSpec.shared_examples 'it uploads and commit a new text file' do it 'uploads and commit a new text file', :js do find('.add-to-tree').click - click_link('Upload file') + + page.within('.dropdown-menu') do + click_link('Upload file') + + wait_for_requests + end + drop_in_dropzone(File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt')) page.within('#modal-upload-blob') do @@ -29,7 +35,13 @@ end RSpec.shared_examples 'it uploads and commit a new image file' do it 'uploads and commit a new image file', :js do find('.add-to-tree').click - click_link('Upload file') + + page.within('.dropdown-menu') do + click_link('Upload file') + + wait_for_requests + end + drop_in_dropzone(File.join(Rails.root, 'spec', 'fixtures', 'logo_sample.svg')) page.within('#modal-upload-blob') do diff --git a/spec/support/shared_examples/helpers/issuable_description_templates_shared_examples.rb b/spec/support/shared_examples/helpers/issuable_description_templates_shared_examples.rb index 9e8c96d576a..47e34b21036 100644 --- a/spec/support/shared_examples/helpers/issuable_description_templates_shared_examples.rb +++ b/spec/support/shared_examples/helpers/issuable_description_templates_shared_examples.rb @@ -23,11 +23,11 @@ RSpec.shared_examples 'project issuable templates' do end it 'returns only md files as issue templates' do - expect(helper.issuable_templates(project, 'issue')).to eq(templates('issue', project)) + expect(helper.issuable_templates(project, 'issue')).to eq(expected_templates('issue')) end it 'returns only md files as merge_request templates' do - expect(helper.issuable_templates(project, 'merge_request')).to eq(templates('merge_request', project)) + expect(helper.issuable_templates(project, 'merge_request')).to eq(expected_templates('merge_request')) end end diff --git a/spec/support/shared_examples/models/slack_mattermost_notifications_shared_examples.rb b/spec/support/shared_examples/models/slack_mattermost_notifications_shared_examples.rb index a1867e1ce39..e31c39d7e1b 100644 --- a/spec/support/shared_examples/models/slack_mattermost_notifications_shared_examples.rb +++ b/spec/support/shared_examples/models/slack_mattermost_notifications_shared_examples.rb @@ -7,7 +7,7 @@ RSpec.shared_examples 'slack or mattermost notifications' do |service_name| let(:webhook_url) { 'https://example.gitlab.com' } def execute_with_options(options) - receive(:new).with(webhook_url, options.merge(http_client: SlackService::Notifier::HTTPClient)) + receive(:new).with(webhook_url, options.merge(http_client: SlackMattermost::Notifier::HTTPClient)) .and_return(double(:slack_service).as_null_object) end diff --git a/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb index be051dcbb7b..c15c59e1a1d 100644 --- a/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb +++ b/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb @@ -45,136 +45,234 @@ RSpec.shared_examples 'handling get metadata requests' do |scope: :project| end end - where(:auth, :package_name_type, :request_forward, :visibility, :user_role, :expected_result, :expected_status) do - nil | :scoped_naming_convention | true | 'PUBLIC' | nil | :accept | :ok - nil | :scoped_naming_convention | false | 'PUBLIC' | nil | :accept | :ok - nil | :non_existing | true | 'PUBLIC' | nil | :redirect | :redirected - nil | :non_existing | false | 'PUBLIC' | nil | :reject | :not_found - nil | :scoped_naming_convention | true | 'PRIVATE' | nil | :reject | :not_found - nil | :scoped_naming_convention | false | 'PRIVATE' | nil | :reject | :not_found - nil | :non_existing | true | 'PRIVATE' | nil | :redirect | :redirected - nil | :non_existing | false | 'PRIVATE' | nil | :reject | :not_found - nil | :scoped_naming_convention | true | 'INTERNAL' | nil | :reject | :not_found - nil | :scoped_naming_convention | false | 'INTERNAL' | nil | :reject | :not_found - nil | :non_existing | true | 'INTERNAL' | nil | :redirect | :redirected - nil | :non_existing | false | 'INTERNAL' | nil | :reject | :not_found - - :oauth | :scoped_naming_convention | true | 'PUBLIC' | :guest | :accept | :ok - :oauth | :scoped_naming_convention | true | 'PUBLIC' | :reporter | :accept | :ok - :oauth | :scoped_naming_convention | false | 'PUBLIC' | :guest | :accept | :ok - :oauth | :scoped_naming_convention | false | 'PUBLIC' | :reporter | :accept | :ok - :oauth | :non_existing | true | 'PUBLIC' | :guest | :redirect | :redirected - :oauth | :non_existing | true | 'PUBLIC' | :reporter | :redirect | :redirected - :oauth | :non_existing | false | 'PUBLIC' | :guest | :reject | :not_found - :oauth | :non_existing | false | 'PUBLIC' | :reporter | :reject | :not_found - :oauth | :scoped_naming_convention | true | 'PRIVATE' | :guest | :reject | :forbidden - :oauth | :scoped_naming_convention | true | 'PRIVATE' | :reporter | :accept | :ok - :oauth | :scoped_naming_convention | false | 'PRIVATE' | :guest | :reject | :forbidden - :oauth | :scoped_naming_convention | false | 'PRIVATE' | :reporter | :accept | :ok - :oauth | :non_existing | true | 'PRIVATE' | :guest | :redirect | :redirected - :oauth | :non_existing | true | 'PRIVATE' | :reporter | :redirect | :redirected - :oauth | :non_existing | false | 'PRIVATE' | :guest | :reject | :forbidden - :oauth | :non_existing | false | 'PRIVATE' | :reporter | :reject | :not_found - :oauth | :scoped_naming_convention | true | 'INTERNAL' | :guest | :accept | :ok - :oauth | :scoped_naming_convention | true | 'INTERNAL' | :reporter | :accept | :ok - :oauth | :scoped_naming_convention | false | 'INTERNAL' | :guest | :accept | :ok - :oauth | :scoped_naming_convention | false | 'INTERNAL' | :reporter | :accept | :ok - :oauth | :non_existing | true | 'INTERNAL' | :guest | :redirect | :redirected - :oauth | :non_existing | true | 'INTERNAL' | :reporter | :redirect | :redirected - :oauth | :non_existing | false | 'INTERNAL' | :guest | :reject | :not_found - :oauth | :non_existing | false | 'INTERNAL' | :reporter | :reject | :not_found - - :personal_access_token | :scoped_naming_convention | true | 'PUBLIC' | :guest | :accept | :ok - :personal_access_token | :scoped_naming_convention | true | 'PUBLIC' | :reporter | :accept | :ok - :personal_access_token | :scoped_naming_convention | false | 'PUBLIC' | :guest | :accept | :ok - :personal_access_token | :scoped_naming_convention | false | 'PUBLIC' | :reporter | :accept | :ok - :personal_access_token | :non_existing | true | 'PUBLIC' | :guest | :redirect | :redirected - :personal_access_token | :non_existing | true | 'PUBLIC' | :reporter | :redirect | :redirected - :personal_access_token | :non_existing | false | 'PUBLIC' | :guest | :reject | :not_found - :personal_access_token | :non_existing | false | 'PUBLIC' | :reporter | :reject | :not_found - :personal_access_token | :scoped_naming_convention | true | 'PRIVATE' | :guest | :reject | :forbidden - :personal_access_token | :scoped_naming_convention | true | 'PRIVATE' | :reporter | :accept | :ok - :personal_access_token | :scoped_naming_convention | false | 'PRIVATE' | :guest | :reject | :forbidden - :personal_access_token | :scoped_naming_convention | false | 'PRIVATE' | :reporter | :accept | :ok - :personal_access_token | :non_existing | true | 'PRIVATE' | :guest | :redirect | :redirected - :personal_access_token | :non_existing | true | 'PRIVATE' | :reporter | :redirect | :redirected - :personal_access_token | :non_existing | false | 'PRIVATE' | :guest | :reject | :forbidden - :personal_access_token | :non_existing | false | 'PRIVATE' | :reporter | :reject | :not_found - :personal_access_token | :scoped_naming_convention | true | 'INTERNAL' | :guest | :accept | :ok - :personal_access_token | :scoped_naming_convention | true | 'INTERNAL' | :reporter | :accept | :ok - :personal_access_token | :scoped_naming_convention | false | 'INTERNAL' | :guest | :accept | :ok - :personal_access_token | :scoped_naming_convention | false | 'INTERNAL' | :reporter | :accept | :ok - :personal_access_token | :non_existing | true | 'INTERNAL' | :guest | :redirect | :redirected - :personal_access_token | :non_existing | true | 'INTERNAL' | :reporter | :redirect | :redirected - :personal_access_token | :non_existing | false | 'INTERNAL' | :guest | :reject | :not_found - :personal_access_token | :non_existing | false | 'INTERNAL' | :reporter | :reject | :not_found - - :job_token | :scoped_naming_convention | true | 'PUBLIC' | :developer | :accept | :ok - :job_token | :scoped_naming_convention | false | 'PUBLIC' | :developer | :accept | :ok - :job_token | :non_existing | true | 'PUBLIC' | :developer | :redirect | :redirected - :job_token | :non_existing | false | 'PUBLIC' | :developer | :reject | :not_found - :job_token | :scoped_naming_convention | true | 'PRIVATE' | :developer | :accept | :ok - :job_token | :scoped_naming_convention | false | 'PRIVATE' | :developer | :accept | :ok - :job_token | :non_existing | true | 'PRIVATE' | :developer | :redirect | :redirected - :job_token | :non_existing | false | 'PRIVATE' | :developer | :reject | :not_found - :job_token | :scoped_naming_convention | true | 'INTERNAL' | :developer | :accept | :ok - :job_token | :scoped_naming_convention | false | 'INTERNAL' | :developer | :accept | :ok - :job_token | :non_existing | true | 'INTERNAL' | :developer | :redirect | :redirected - :job_token | :non_existing | false | 'INTERNAL' | :developer | :reject | :not_found - - :deploy_token | :scoped_naming_convention | true | 'PUBLIC' | nil | :accept | :ok - :deploy_token | :scoped_naming_convention | false | 'PUBLIC' | nil | :accept | :ok - :deploy_token | :non_existing | true | 'PUBLIC' | nil | :redirect | :redirected - :deploy_token | :non_existing | false | 'PUBLIC' | nil | :reject | :not_found - :deploy_token | :scoped_naming_convention | true | 'PRIVATE' | nil | :accept | :ok - :deploy_token | :scoped_naming_convention | false | 'PRIVATE' | nil | :accept | :ok - :deploy_token | :non_existing | true | 'PRIVATE' | nil | :redirect | :redirected - :deploy_token | :non_existing | false | 'PRIVATE' | nil | :reject | :not_found - :deploy_token | :scoped_naming_convention | true | 'INTERNAL' | nil | :accept | :ok - :deploy_token | :scoped_naming_convention | false | 'INTERNAL' | nil | :accept | :ok - :deploy_token | :non_existing | true | 'INTERNAL' | nil | :redirect | :redirected - :deploy_token | :non_existing | false | 'INTERNAL' | nil | :reject | :not_found - end + shared_examples 'handling all conditions' do + where(:auth, :package_name_type, :request_forward, :visibility, :user_role, :expected_result, :expected_status) do + nil | :scoped_naming_convention | true | :public | nil | :accept | :ok + nil | :scoped_naming_convention | false | :public | nil | :accept | :ok + nil | :scoped_no_naming_convention | true | :public | nil | :accept | :ok + nil | :scoped_no_naming_convention | false | :public | nil | :accept | :ok + nil | :unscoped | true | :public | nil | :accept | :ok + nil | :unscoped | false | :public | nil | :accept | :ok + nil | :non_existing | true | :public | nil | :redirect | :redirected + nil | :non_existing | false | :public | nil | :reject | :not_found + nil | :scoped_naming_convention | true | :private | nil | :reject | :not_found + nil | :scoped_naming_convention | false | :private | nil | :reject | :not_found + nil | :scoped_no_naming_convention | true | :private | nil | :reject | :not_found + nil | :scoped_no_naming_convention | false | :private | nil | :reject | :not_found + nil | :unscoped | true | :private | nil | :reject | :not_found + nil | :unscoped | false | :private | nil | :reject | :not_found + nil | :non_existing | true | :private | nil | :redirect | :redirected + nil | :non_existing | false | :private | nil | :reject | :not_found + nil | :scoped_naming_convention | true | :internal | nil | :reject | :not_found + nil | :scoped_naming_convention | false | :internal | nil | :reject | :not_found + nil | :scoped_no_naming_convention | true | :internal | nil | :reject | :not_found + nil | :scoped_no_naming_convention | false | :internal | nil | :reject | :not_found + nil | :unscoped | true | :internal | nil | :reject | :not_found + nil | :unscoped | false | :internal | nil | :reject | :not_found + nil | :non_existing | true | :internal | nil | :redirect | :redirected + nil | :non_existing | false | :internal | nil | :reject | :not_found + + :oauth | :scoped_naming_convention | true | :public | :guest | :accept | :ok + :oauth | :scoped_naming_convention | true | :public | :reporter | :accept | :ok + :oauth | :scoped_naming_convention | false | :public | :guest | :accept | :ok + :oauth | :scoped_naming_convention | false | :public | :reporter | :accept | :ok + :oauth | :scoped_no_naming_convention | true | :public | :guest | :accept | :ok + :oauth | :scoped_no_naming_convention | true | :public | :reporter | :accept | :ok + :oauth | :scoped_no_naming_convention | false | :public | :guest | :accept | :ok + :oauth | :scoped_no_naming_convention | false | :public | :reporter | :accept | :ok + :oauth | :unscoped | true | :public | :guest | :accept | :ok + :oauth | :unscoped | true | :public | :reporter | :accept | :ok + :oauth | :unscoped | false | :public | :guest | :accept | :ok + :oauth | :unscoped | false | :public | :reporter | :accept | :ok + :oauth | :non_existing | true | :public | :guest | :redirect | :redirected + :oauth | :non_existing | true | :public | :reporter | :redirect | :redirected + :oauth | :non_existing | false | :public | :guest | :reject | :not_found + :oauth | :non_existing | false | :public | :reporter | :reject | :not_found + :oauth | :scoped_naming_convention | true | :private | :guest | :reject | :forbidden + :oauth | :scoped_naming_convention | true | :private | :reporter | :accept | :ok + :oauth | :scoped_naming_convention | false | :private | :guest | :reject | :forbidden + :oauth | :scoped_naming_convention | false | :private | :reporter | :accept | :ok + :oauth | :scoped_no_naming_convention | true | :private | :guest | :reject | :forbidden + :oauth | :scoped_no_naming_convention | true | :private | :reporter | :accept | :ok + :oauth | :scoped_no_naming_convention | false | :private | :guest | :reject | :forbidden + :oauth | :scoped_no_naming_convention | false | :private | :reporter | :accept | :ok + :oauth | :unscoped | true | :private | :guest | :reject | :forbidden + :oauth | :unscoped | true | :private | :reporter | :accept | :ok + :oauth | :unscoped | false | :private | :guest | :reject | :forbidden + :oauth | :unscoped | false | :private | :reporter | :accept | :ok + :oauth | :non_existing | true | :private | :guest | :redirect | :redirected + :oauth | :non_existing | true | :private | :reporter | :redirect | :redirected + :oauth | :non_existing | false | :private | :guest | :reject | :forbidden + :oauth | :non_existing | false | :private | :reporter | :reject | :not_found + :oauth | :scoped_naming_convention | true | :internal | :guest | :accept | :ok + :oauth | :scoped_naming_convention | true | :internal | :reporter | :accept | :ok + :oauth | :scoped_naming_convention | false | :internal | :guest | :accept | :ok + :oauth | :scoped_naming_convention | false | :internal | :reporter | :accept | :ok + :oauth | :scoped_no_naming_convention | true | :internal | :guest | :accept | :ok + :oauth | :scoped_no_naming_convention | true | :internal | :reporter | :accept | :ok + :oauth | :scoped_no_naming_convention | false | :internal | :guest | :accept | :ok + :oauth | :scoped_no_naming_convention | false | :internal | :reporter | :accept | :ok + :oauth | :unscoped | true | :internal | :guest | :accept | :ok + :oauth | :unscoped | true | :internal | :reporter | :accept | :ok + :oauth | :unscoped | false | :internal | :guest | :accept | :ok + :oauth | :unscoped | false | :internal | :reporter | :accept | :ok + :oauth | :non_existing | true | :internal | :guest | :redirect | :redirected + :oauth | :non_existing | true | :internal | :reporter | :redirect | :redirected + :oauth | :non_existing | false | :internal | :guest | :reject | :not_found + :oauth | :non_existing | false | :internal | :reporter | :reject | :not_found + + :personal_access_token | :scoped_naming_convention | true | :public | :guest | :accept | :ok + :personal_access_token | :scoped_naming_convention | true | :public | :reporter | :accept | :ok + :personal_access_token | :scoped_naming_convention | false | :public | :guest | :accept | :ok + :personal_access_token | :scoped_naming_convention | false | :public | :reporter | :accept | :ok + :personal_access_token | :scoped_no_naming_convention | true | :public | :guest | :accept | :ok + :personal_access_token | :scoped_no_naming_convention | true | :public | :reporter | :accept | :ok + :personal_access_token | :scoped_no_naming_convention | false | :public | :guest | :accept | :ok + :personal_access_token | :scoped_no_naming_convention | false | :public | :reporter | :accept | :ok + :personal_access_token | :unscoped | true | :public | :guest | :accept | :ok + :personal_access_token | :unscoped | true | :public | :reporter | :accept | :ok + :personal_access_token | :unscoped | false | :public | :guest | :accept | :ok + :personal_access_token | :unscoped | false | :public | :reporter | :accept | :ok + :personal_access_token | :non_existing | true | :public | :guest | :redirect | :redirected + :personal_access_token | :non_existing | true | :public | :reporter | :redirect | :redirected + :personal_access_token | :non_existing | false | :public | :guest | :reject | :not_found + :personal_access_token | :non_existing | false | :public | :reporter | :reject | :not_found + :personal_access_token | :scoped_naming_convention | true | :private | :guest | :reject | :forbidden + :personal_access_token | :scoped_naming_convention | true | :private | :reporter | :accept | :ok + :personal_access_token | :scoped_naming_convention | false | :private | :guest | :reject | :forbidden + :personal_access_token | :scoped_naming_convention | false | :private | :reporter | :accept | :ok + :personal_access_token | :scoped_no_naming_convention | true | :private | :guest | :reject | :forbidden + :personal_access_token | :scoped_no_naming_convention | true | :private | :reporter | :accept | :ok + :personal_access_token | :scoped_no_naming_convention | false | :private | :guest | :reject | :forbidden + :personal_access_token | :scoped_no_naming_convention | false | :private | :reporter | :accept | :ok + :personal_access_token | :unscoped | true | :private | :guest | :reject | :forbidden + :personal_access_token | :unscoped | true | :private | :reporter | :accept | :ok + :personal_access_token | :unscoped | false | :private | :guest | :reject | :forbidden + :personal_access_token | :unscoped | false | :private | :reporter | :accept | :ok + :personal_access_token | :non_existing | true | :private | :guest | :redirect | :redirected + :personal_access_token | :non_existing | true | :private | :reporter | :redirect | :redirected + :personal_access_token | :non_existing | false | :private | :guest | :reject | :forbidden + :personal_access_token | :non_existing | false | :private | :reporter | :reject | :not_found + :personal_access_token | :scoped_naming_convention | true | :internal | :guest | :accept | :ok + :personal_access_token | :scoped_naming_convention | true | :internal | :reporter | :accept | :ok + :personal_access_token | :scoped_naming_convention | false | :internal | :guest | :accept | :ok + :personal_access_token | :scoped_naming_convention | false | :internal | :reporter | :accept | :ok + :personal_access_token | :scoped_no_naming_convention | true | :internal | :guest | :accept | :ok + :personal_access_token | :scoped_no_naming_convention | true | :internal | :reporter | :accept | :ok + :personal_access_token | :scoped_no_naming_convention | false | :internal | :guest | :accept | :ok + :personal_access_token | :scoped_no_naming_convention | false | :internal | :reporter | :accept | :ok + :personal_access_token | :unscoped | true | :internal | :guest | :accept | :ok + :personal_access_token | :unscoped | true | :internal | :reporter | :accept | :ok + :personal_access_token | :unscoped | false | :internal | :guest | :accept | :ok + :personal_access_token | :unscoped | false | :internal | :reporter | :accept | :ok + :personal_access_token | :non_existing | true | :internal | :guest | :redirect | :redirected + :personal_access_token | :non_existing | true | :internal | :reporter | :redirect | :redirected + :personal_access_token | :non_existing | false | :internal | :guest | :reject | :not_found + :personal_access_token | :non_existing | false | :internal | :reporter | :reject | :not_found + + :job_token | :scoped_naming_convention | true | :public | :developer | :accept | :ok + :job_token | :scoped_naming_convention | false | :public | :developer | :accept | :ok + :job_token | :scoped_no_naming_convention | true | :public | :developer | :accept | :ok + :job_token | :scoped_no_naming_convention | false | :public | :developer | :accept | :ok + :job_token | :unscoped | true | :public | :developer | :accept | :ok + :job_token | :unscoped | false | :public | :developer | :accept | :ok + :job_token | :non_existing | true | :public | :developer | :redirect | :redirected + :job_token | :non_existing | false | :public | :developer | :reject | :not_found + :job_token | :scoped_naming_convention | true | :private | :developer | :accept | :ok + :job_token | :scoped_naming_convention | false | :private | :developer | :accept | :ok + :job_token | :scoped_no_naming_convention | true | :private | :developer | :accept | :ok + :job_token | :scoped_no_naming_convention | false | :private | :developer | :accept | :ok + :job_token | :unscoped | true | :private | :developer | :accept | :ok + :job_token | :unscoped | false | :private | :developer | :accept | :ok + :job_token | :non_existing | true | :private | :developer | :redirect | :redirected + :job_token | :non_existing | false | :private | :developer | :reject | :not_found + :job_token | :scoped_naming_convention | true | :internal | :developer | :accept | :ok + :job_token | :scoped_naming_convention | false | :internal | :developer | :accept | :ok + :job_token | :scoped_no_naming_convention | true | :internal | :developer | :accept | :ok + :job_token | :scoped_no_naming_convention | false | :internal | :developer | :accept | :ok + :job_token | :unscoped | true | :internal | :developer | :accept | :ok + :job_token | :unscoped | false | :internal | :developer | :accept | :ok + :job_token | :non_existing | true | :internal | :developer | :redirect | :redirected + :job_token | :non_existing | false | :internal | :developer | :reject | :not_found + + :deploy_token | :scoped_naming_convention | true | :public | nil | :accept | :ok + :deploy_token | :scoped_naming_convention | false | :public | nil | :accept | :ok + :deploy_token | :scoped_no_naming_convention | true | :public | nil | :accept | :ok + :deploy_token | :scoped_no_naming_convention | false | :public | nil | :accept | :ok + :deploy_token | :unscoped | true | :public | nil | :accept | :ok + :deploy_token | :unscoped | false | :public | nil | :accept | :ok + :deploy_token | :non_existing | true | :public | nil | :redirect | :redirected + :deploy_token | :non_existing | false | :public | nil | :reject | :not_found + :deploy_token | :scoped_naming_convention | true | :private | nil | :accept | :ok + :deploy_token | :scoped_naming_convention | false | :private | nil | :accept | :ok + :deploy_token | :scoped_no_naming_convention | true | :private | nil | :accept | :ok + :deploy_token | :scoped_no_naming_convention | false | :private | nil | :accept | :ok + :deploy_token | :unscoped | true | :private | nil | :accept | :ok + :deploy_token | :unscoped | false | :private | nil | :accept | :ok + :deploy_token | :non_existing | true | :private | nil | :redirect | :redirected + :deploy_token | :non_existing | false | :private | nil | :reject | :not_found + :deploy_token | :scoped_naming_convention | true | :internal | nil | :accept | :ok + :deploy_token | :scoped_naming_convention | false | :internal | nil | :accept | :ok + :deploy_token | :scoped_no_naming_convention | true | :internal | nil | :accept | :ok + :deploy_token | :scoped_no_naming_convention | false | :internal | nil | :accept | :ok + :deploy_token | :unscoped | true | :internal | nil | :accept | :ok + :deploy_token | :unscoped | false | :internal | nil | :accept | :ok + :deploy_token | :non_existing | true | :internal | nil | :redirect | :redirected + :deploy_token | :non_existing | false | :internal | nil | :reject | :not_found + end - with_them do - include_context 'set package name from package name type' - - let(:headers) do - case auth - when :oauth - build_token_auth_header(token.token) - when :personal_access_token - build_token_auth_header(personal_access_token.token) - when :job_token - build_token_auth_header(job.token) - when :deploy_token - build_token_auth_header(deploy_token.token) - else - {} + with_them do + include_context 'set package name from package name type' + + let(:headers) do + case auth + when :oauth + build_token_auth_header(token.token) + when :personal_access_token + build_token_auth_header(personal_access_token.token) + when :job_token + build_token_auth_header(job.token) + when :deploy_token + build_token_auth_header(deploy_token.token) + else + {} + end end - end - before do - project.send("add_#{user_role}", user) if user_role - project.update!(visibility: Gitlab::VisibilityLevel.const_get(visibility, false)) - package.update!(name: package_name) unless package_name == 'non-existing-package' - stub_application_setting(npm_package_requests_forwarding: request_forward) - end + before do + project.send("add_#{user_role}", user) if user_role + project.update!(visibility: visibility.to_s) + package.update!(name: package_name) unless package_name == 'non-existing-package' + stub_application_setting(npm_package_requests_forwarding: request_forward) + end - example_name = "#{params[:expected_result]} metadata request" - status = params[:expected_status] + example_name = "#{params[:expected_result]} metadata request" + status = params[:expected_status] - if scope == :instance && params[:package_name_type] != :scoped_naming_convention - if params[:request_forward] - example_name = 'redirect metadata request' - status = :redirected - else - example_name = 'reject metadata request' - status = :not_found + if scope == :instance && params[:package_name_type] != :scoped_naming_convention + if params[:request_forward] + example_name = 'redirect metadata request' + status = :redirected + else + example_name = 'reject metadata request' + status = :not_found + end end + + it_behaves_like example_name, status: status end + end - it_behaves_like example_name, status: status + context 'with a group namespace' do + it_behaves_like 'handling all conditions' + end + + if scope != :project + context 'with a user namespace' do + let_it_be(:namespace) { user.namespace } + + it_behaves_like 'handling all conditions' + end end context 'with a developer' do @@ -225,26 +323,44 @@ RSpec.shared_examples 'handling get dist tags requests' do |scope: :project| shared_examples 'handling different package names, visibilities and user roles' do where(:package_name_type, :visibility, :user_role, :expected_result, :expected_status) do - :scoped_naming_convention | 'PUBLIC' | :anonymous | :accept | :ok - :scoped_naming_convention | 'PUBLIC' | :guest | :accept | :ok - :scoped_naming_convention | 'PUBLIC' | :reporter | :accept | :ok - :non_existing | 'PUBLIC' | :anonymous | :reject | :not_found - :non_existing | 'PUBLIC' | :guest | :reject | :not_found - :non_existing | 'PUBLIC' | :reporter | :reject | :not_found - - :scoped_naming_convention | 'PRIVATE' | :anonymous | :reject | :not_found - :scoped_naming_convention | 'PRIVATE' | :guest | :reject | :forbidden - :scoped_naming_convention | 'PRIVATE' | :reporter | :accept | :ok - :non_existing | 'PRIVATE' | :anonymous | :reject | :not_found - :non_existing | 'PRIVATE' | :guest | :reject | :forbidden - :non_existing | 'PRIVATE' | :reporter | :reject | :not_found - - :scoped_naming_convention | 'INTERNAL' | :anonymous | :reject | :not_found - :scoped_naming_convention | 'INTERNAL' | :guest | :accept | :ok - :scoped_naming_convention | 'INTERNAL' | :reporter | :accept | :ok - :non_existing | 'INTERNAL' | :anonymous | :reject | :not_found - :non_existing | 'INTERNAL' | :guest | :reject | :not_found - :non_existing | 'INTERNAL' | :reporter | :reject | :not_found + :scoped_naming_convention | :public | :anonymous | :accept | :ok + :scoped_naming_convention | :public | :guest | :accept | :ok + :scoped_naming_convention | :public | :reporter | :accept | :ok + :scoped_no_naming_convention | :public | :anonymous | :accept | :ok + :scoped_no_naming_convention | :public | :guest | :accept | :ok + :scoped_no_naming_convention | :public | :reporter | :accept | :ok + :unscoped | :public | :anonymous | :accept | :ok + :unscoped | :public | :guest | :accept | :ok + :unscoped | :public | :reporter | :accept | :ok + :non_existing | :public | :anonymous | :reject | :not_found + :non_existing | :public | :guest | :reject | :not_found + :non_existing | :public | :reporter | :reject | :not_found + + :scoped_naming_convention | :private | :anonymous | :reject | :not_found + :scoped_naming_convention | :private | :guest | :reject | :forbidden + :scoped_naming_convention | :private | :reporter | :accept | :ok + :scoped_no_naming_convention | :private | :anonymous | :reject | :not_found + :scoped_no_naming_convention | :private | :guest | :reject | :forbidden + :scoped_no_naming_convention | :private | :reporter | :accept | :ok + :unscoped | :private | :anonymous | :reject | :not_found + :unscoped | :private | :guest | :reject | :forbidden + :unscoped | :private | :reporter | :accept | :ok + :non_existing | :private | :anonymous | :reject | :not_found + :non_existing | :private | :guest | :reject | :forbidden + :non_existing | :private | :reporter | :reject | :not_found + + :scoped_naming_convention | :internal | :anonymous | :reject | :not_found + :scoped_naming_convention | :internal | :guest | :accept | :ok + :scoped_naming_convention | :internal | :reporter | :accept | :ok + :scoped_no_naming_convention | :internal | :anonymous | :reject | :not_found + :scoped_no_naming_convention | :internal | :guest | :accept | :ok + :scoped_no_naming_convention | :internal | :reporter | :accept | :ok + :unscoped | :internal | :anonymous | :reject | :not_found + :unscoped | :internal | :guest | :accept | :ok + :unscoped | :internal | :reporter | :accept | :ok + :non_existing | :internal | :anonymous | :reject | :not_found + :non_existing | :internal | :guest | :reject | :not_found + :non_existing | :internal | :reporter | :reject | :not_found end with_them do @@ -254,7 +370,7 @@ RSpec.shared_examples 'handling get dist tags requests' do |scope: :project| before do project.send("add_#{user_role}", user) unless anonymous - project.update!(visibility: Gitlab::VisibilityLevel.const_get(visibility, false)) + project.update!(visibility: visibility.to_s) end example_name = "#{params[:expected_result]} package tags request" @@ -269,16 +385,30 @@ RSpec.shared_examples 'handling get dist tags requests' do |scope: :project| end end - context 'with oauth token' do - let(:headers) { build_token_auth_header(token.token) } + shared_examples 'handling all conditions' do + context 'with oauth token' do + let(:headers) { build_token_auth_header(token.token) } + + it_behaves_like 'handling different package names, visibilities and user roles' + end + + context 'with personal access token' do + let(:headers) { build_token_auth_header(personal_access_token.token) } - it_behaves_like 'handling different package names, visibilities and user roles' + it_behaves_like 'handling different package names, visibilities and user roles' + end end - context 'with personal access token' do - let(:headers) { build_token_auth_header(personal_access_token.token) } + context 'with a group namespace' do + it_behaves_like 'handling all conditions' + end - it_behaves_like 'handling different package names, visibilities and user roles' + if scope != :project + context 'with a user namespace' do + let_it_be(:namespace) { user.namespace } + + it_behaves_like 'handling all conditions' + end end end @@ -303,26 +433,44 @@ RSpec.shared_examples 'handling create dist tag requests' do |scope: :project| shared_examples 'handling different package names, visibilities and user roles' do where(:package_name_type, :visibility, :user_role, :expected_result, :expected_status) do - :scoped_naming_convention | 'PUBLIC' | :anonymous | :reject | :forbidden - :scoped_naming_convention | 'PUBLIC' | :guest | :reject | :forbidden - :scoped_naming_convention | 'PUBLIC' | :developer | :accept | :ok - :non_existing | 'PUBLIC' | :anonymous | :reject | :forbidden - :non_existing | 'PUBLIC' | :guest | :reject | :forbidden - :non_existing | 'PUBLIC' | :developer | :reject | :not_found - - :scoped_naming_convention | 'PRIVATE' | :anonymous | :reject | :not_found - :scoped_naming_convention | 'PRIVATE' | :guest | :reject | :forbidden - :scoped_naming_convention | 'PRIVATE' | :developer | :accept | :ok - :non_existing | 'PRIVATE' | :anonymous | :reject | :not_found - :non_existing | 'PRIVATE' | :guest | :reject | :forbidden - :non_existing | 'PRIVATE' | :developer | :reject | :not_found - - :scoped_naming_convention | 'INTERNAL' | :anonymous | :reject | :forbidden - :scoped_naming_convention | 'INTERNAL' | :guest | :reject | :forbidden - :scoped_naming_convention | 'INTERNAL' | :developer | :accept | :ok - :non_existing | 'INTERNAL' | :anonymous | :reject | :forbidden - :non_existing | 'INTERNAL' | :guest | :reject | :forbidden - :non_existing | 'INTERNAL' | :developer | :reject | :not_found + :scoped_naming_convention | :public | :anonymous | :reject | :forbidden + :scoped_naming_convention | :public | :guest | :reject | :forbidden + :scoped_naming_convention | :public | :developer | :accept | :ok + :scoped_no_naming_convention | :public | :anonymous | :reject | :forbidden + :scoped_no_naming_convention | :public | :guest | :reject | :forbidden + :scoped_no_naming_convention | :public | :developer | :accept | :ok + :unscoped | :public | :anonymous | :reject | :forbidden + :unscoped | :public | :guest | :reject | :forbidden + :unscoped | :public | :developer | :accept | :ok + :non_existing | :public | :anonymous | :reject | :forbidden + :non_existing | :public | :guest | :reject | :forbidden + :non_existing | :public | :developer | :reject | :not_found + + :scoped_naming_convention | :private | :anonymous | :reject | :not_found + :scoped_naming_convention | :private | :guest | :reject | :forbidden + :scoped_naming_convention | :private | :developer | :accept | :ok + :scoped_no_naming_convention | :private | :anonymous | :reject | :not_found + :scoped_no_naming_convention | :private | :guest | :reject | :forbidden + :scoped_no_naming_convention | :private | :developer | :accept | :ok + :unscoped | :private | :anonymous | :reject | :not_found + :unscoped | :private | :guest | :reject | :forbidden + :unscoped | :private | :developer | :accept | :ok + :non_existing | :private | :anonymous | :reject | :not_found + :non_existing | :private | :guest | :reject | :forbidden + :non_existing | :private | :developer | :reject | :not_found + + :scoped_naming_convention | :internal | :anonymous | :reject | :forbidden + :scoped_naming_convention | :internal | :guest | :reject | :forbidden + :scoped_naming_convention | :internal | :developer | :accept | :ok + :scoped_no_naming_convention | :internal | :anonymous | :reject | :forbidden + :scoped_no_naming_convention | :internal | :guest | :reject | :forbidden + :scoped_no_naming_convention | :internal | :developer | :accept | :ok + :unscoped | :internal | :anonymous | :reject | :forbidden + :unscoped | :internal | :guest | :reject | :forbidden + :unscoped | :internal | :developer | :accept | :ok + :non_existing | :internal | :anonymous | :reject | :forbidden + :non_existing | :internal | :guest | :reject | :forbidden + :non_existing | :internal | :developer | :reject | :not_found end with_them do @@ -332,7 +480,7 @@ RSpec.shared_examples 'handling create dist tag requests' do |scope: :project| before do project.send("add_#{user_role}", user) unless anonymous - project.update!(visibility: Gitlab::VisibilityLevel.const_get(visibility, false)) + project.update!(visibility: visibility.to_s) end example_name = "#{params[:expected_result]} create package tag request" @@ -347,16 +495,30 @@ RSpec.shared_examples 'handling create dist tag requests' do |scope: :project| end end - context 'with oauth token' do - let(:headers) { build_token_auth_header(token.token) } + shared_examples 'handling all conditions' do + context 'with oauth token' do + let(:headers) { build_token_auth_header(token.token) } - it_behaves_like 'handling different package names, visibilities and user roles' + it_behaves_like 'handling different package names, visibilities and user roles' + end + + context 'with personal access token' do + let(:headers) { build_token_auth_header(personal_access_token.token) } + + it_behaves_like 'handling different package names, visibilities and user roles' + end end - context 'with personal access token' do - let(:headers) { build_token_auth_header(personal_access_token.token) } + context 'with a group namespace' do + it_behaves_like 'handling all conditions' + end - it_behaves_like 'handling different package names, visibilities and user roles' + if scope != :project + context 'with a user namespace' do + let_it_be(:namespace) { user.namespace } + + it_behaves_like 'handling all conditions' + end end end @@ -379,19 +541,44 @@ RSpec.shared_examples 'handling delete dist tag requests' do |scope: :project| shared_examples 'handling different package names, visibilities and user roles' do where(:package_name_type, :visibility, :user_role, :expected_result, :expected_status) do - :scoped_naming_convention | 'PUBLIC' | :anonymous | :reject | :forbidden - :scoped_naming_convention | 'PUBLIC' | :guest | :reject | :forbidden - :scoped_naming_convention | 'PUBLIC' | :maintainer | :accept | :ok - :non_existing | 'PUBLIC' | :anonymous | :reject | :forbidden - :non_existing | 'PUBLIC' | :guest | :reject | :forbidden - :non_existing | 'PUBLIC' | :maintainer | :reject | :not_found - - :scoped_naming_convention | 'PRIVATE' | :anonymous | :reject | :not_found - :scoped_naming_convention | 'PRIVATE' | :guest | :reject | :forbidden - :scoped_naming_convention | 'PRIVATE' | :maintainer | :accept | :ok - :non_existing | 'INTERNAL' | :anonymous | :reject | :forbidden - :non_existing | 'INTERNAL' | :guest | :reject | :forbidden - :non_existing | 'INTERNAL' | :maintainer | :reject | :not_found + :scoped_naming_convention | :public | :anonymous | :reject | :forbidden + :scoped_naming_convention | :public | :guest | :reject | :forbidden + :scoped_naming_convention | :public | :maintainer | :accept | :ok + :scoped_no_naming_convention | :public | :anonymous | :reject | :forbidden + :scoped_no_naming_convention | :public | :guest | :reject | :forbidden + :scoped_no_naming_convention | :public | :maintainer | :accept | :ok + :unscoped | :public | :anonymous | :reject | :forbidden + :unscoped | :public | :guest | :reject | :forbidden + :unscoped | :public | :maintainer | :accept | :ok + :non_existing | :public | :anonymous | :reject | :forbidden + :non_existing | :public | :guest | :reject | :forbidden + :non_existing | :public | :maintainer | :reject | :not_found + + :scoped_naming_convention | :private | :anonymous | :reject | :not_found + :scoped_naming_convention | :private | :guest | :reject | :forbidden + :scoped_naming_convention | :private | :maintainer | :accept | :ok + :scoped_no_naming_convention | :private | :anonymous | :reject | :not_found + :scoped_no_naming_convention | :private | :guest | :reject | :forbidden + :scoped_no_naming_convention | :private | :maintainer | :accept | :ok + :unscoped | :private | :anonymous | :reject | :not_found + :unscoped | :private | :guest | :reject | :forbidden + :unscoped | :private | :maintainer | :accept | :ok + :non_existing | :private | :anonymous | :reject | :not_found + :non_existing | :private | :guest | :reject | :forbidden + :non_existing | :private | :maintainer | :reject | :not_found + + :scoped_naming_convention | :internal | :anonymous | :reject | :forbidden + :scoped_naming_convention | :internal | :guest | :reject | :forbidden + :scoped_naming_convention | :internal | :maintainer | :accept | :ok + :scoped_no_naming_convention | :internal | :anonymous | :reject | :forbidden + :scoped_no_naming_convention | :internal | :guest | :reject | :forbidden + :scoped_no_naming_convention | :internal | :maintainer | :accept | :ok + :unscoped | :internal | :anonymous | :reject | :forbidden + :unscoped | :internal | :guest | :reject | :forbidden + :unscoped | :internal | :maintainer | :accept | :ok + :non_existing | :internal | :anonymous | :reject | :forbidden + :non_existing | :internal | :guest | :reject | :forbidden + :non_existing | :internal | :maintainer | :reject | :not_found end with_them do @@ -401,7 +588,7 @@ RSpec.shared_examples 'handling delete dist tag requests' do |scope: :project| before do project.send("add_#{user_role}", user) unless anonymous - project.update!(visibility: Gitlab::VisibilityLevel.const_get(visibility, false)) + project.update!(visibility: visibility.to_s) end example_name = "#{params[:expected_result]} delete package tag request" @@ -416,15 +603,29 @@ RSpec.shared_examples 'handling delete dist tag requests' do |scope: :project| end end - context 'with oauth token' do - let(:headers) { build_token_auth_header(token.token) } + shared_examples 'handling all conditions' do + context 'with oauth token' do + let(:headers) { build_token_auth_header(token.token) } + + it_behaves_like 'handling different package names, visibilities and user roles' + end + + context 'with personal access token' do + let(:headers) { build_token_auth_header(personal_access_token.token) } - it_behaves_like 'handling different package names, visibilities and user roles' + it_behaves_like 'handling different package names, visibilities and user roles' + end end - context 'with personal access token' do - let(:headers) { build_token_auth_header(personal_access_token.token) } + context 'with a group namespace' do + it_behaves_like 'handling all conditions' + end - it_behaves_like 'handling different package names, visibilities and user roles' + if scope != :project + context 'with a user namespace' do + let_it_be(:namespace) { user.namespace } + + it_behaves_like 'handling all conditions' + end end end diff --git a/spec/support/shared_examples/requests/api/rubygems_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/rubygems_packages_shared_examples.rb new file mode 100644 index 00000000000..2e8e970a019 --- /dev/null +++ b/spec/support/shared_examples/requests/api/rubygems_packages_shared_examples.rb @@ -0,0 +1,130 @@ +# frozen_string_literal: true + +RSpec.shared_examples 'rejects rubygems packages access' do |user_type, status, add_member = true| + context "for user type #{user_type}" do + before do + project.send("add_#{user_type}", user) if add_member && user_type != :anonymous + end + + it_behaves_like 'returning response status', status + end +end + +RSpec.shared_examples 'process rubygems workhorse authorization' do |user_type, status, add_member = true| + context "for user type #{user_type}" do + before do + project.send("add_#{user_type}", user) if add_member && user_type != :anonymous + end + + it_behaves_like 'returning response status', status + + it 'has the proper content type' do + subject + + expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE) + end + + context 'with a request that bypassed gitlab-workhorse' do + let(:headers) do + { 'HTTP_AUTHORIZATION' => personal_access_token.token } + .merge(workhorse_headers) + .tap { |h| h.delete(Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER) } + end + + before do + project.add_maintainer(user) + end + + it_behaves_like 'returning response status', :forbidden + end + end +end + +RSpec.shared_examples 'process rubygems upload' do |user_type, status, add_member = true| + RSpec.shared_examples 'creates rubygems package files' do + it 'creates package files', :aggregate_failures do + expect { subject } + .to change { project.packages.count }.by(1) + .and change { Packages::PackageFile.count }.by(1) + expect(response).to have_gitlab_http_status(status) + + package_file = project.packages.last.package_files.reload.last + expect(package_file.file_name).to eq('package.gem') + end + end + + context "for user type #{user_type}" do + before do + project.send("add_#{user_type}", user) if add_member && user_type != :anonymous + end + + context 'with object storage disabled' do + before do + stub_package_file_object_storage(enabled: false) + end + + context 'without a file from workhorse' do + let(:send_rewritten_field) { false } + + it_behaves_like 'returning response status', :bad_request + end + + context 'with correct params' do + it_behaves_like 'package workhorse uploads' + it_behaves_like 'creates rubygems package files' + it_behaves_like 'a package tracking event', 'API::RubygemPackages', 'push_package' + end + end + + context 'with object storage enabled' do + let(:tmp_object) do + fog_connection.directories.new(key: 'packages').files.create( # rubocop:disable Rails/SaveBang + key: "tmp/uploads/#{file_name}", + body: 'content' + ) + end + + let(:fog_file) { fog_to_uploaded_file(tmp_object) } + let(:params) { { file: fog_file, 'file.remote_id' => file_name } } + + context 'and direct upload enabled' do + let(:fog_connection) do + stub_package_file_object_storage(direct_upload: true) + end + + it_behaves_like 'creates rubygems package files' + + ['123123', '../../123123'].each do |remote_id| + context "with invalid remote_id: #{remote_id}" do + let(:params) do + { + file: fog_file, + 'file.remote_id' => remote_id + } + end + + it_behaves_like 'returning response status', :forbidden + end + end + end + + context 'and direct upload disabled' do + context 'and background upload disabled' do + let(:fog_connection) do + stub_package_file_object_storage(direct_upload: false, background_upload: false) + end + + it_behaves_like 'creates rubygems package files' + end + + context 'and background upload enabled' do + let(:fog_connection) do + stub_package_file_object_storage(direct_upload: false, background_upload: true) + end + + it_behaves_like 'creates rubygems package files' + end + end + end + end +end diff --git a/spec/support/shared_examples/service_desk_issue_templates_examples.rb b/spec/support/shared_examples/service_desk_issue_templates_examples.rb new file mode 100644 index 00000000000..fd9645df7a3 --- /dev/null +++ b/spec/support/shared_examples/service_desk_issue_templates_examples.rb @@ -0,0 +1,12 @@ +# frozen_string_literal: true + +RSpec.shared_examples 'issue description templates from current project only' do + it 'loads issue description templates from the project only' do + within('#service-desk-template-select') do + expect(page).to have_content('project-issue-bar') + expect(page).to have_content('project-issue-foo') + expect(page).not_to have_content('group-issue-bar') + expect(page).not_to have_content('group-issue-foo') + end + end +end diff --git a/spec/tooling/danger/helper_spec.rb b/spec/tooling/danger/helper_spec.rb index c338d138352..59af3ecd145 100644 --- a/spec/tooling/danger/helper_spec.rb +++ b/spec/tooling/danger/helper_spec.rb @@ -325,8 +325,6 @@ RSpec.describe Tooling::Danger::Helper do 'db/fixtures/foo.rb' | [:backend] 'ee/db/fixtures/foo.rb' | [:backend] - 'doc/api/graphql/reference/gitlab_schema.graphql' | [:backend] - 'doc/api/graphql/reference/gitlab_schema.json' | [:backend] 'qa/foo' | [:qa] 'ee/qa/foo' | [:qa] diff --git a/spec/views/admin/application_settings/_package_registry.html.haml_spec.rb b/spec/views/admin/application_settings/_package_registry.html.haml_spec.rb index ef40829c29b..405e86ee89f 100644 --- a/spec/views/admin/application_settings/_package_registry.html.haml_spec.rb +++ b/spec/views/admin/application_settings/_package_registry.html.haml_spec.rb @@ -30,8 +30,8 @@ RSpec.describe 'admin/application_settings/_package_registry' do expect(rendered).to have_field('Maximum Maven package file size in bytes', type: 'number') expect(page.find_field('Maximum Maven package file size in bytes').value).to eq(default_plan_limits.maven_max_file_size.to_s) - expect(rendered).to have_field('Maximum NPM package file size in bytes', type: 'number') - expect(page.find_field('Maximum NPM package file size in bytes').value).to eq(default_plan_limits.npm_max_file_size.to_s) + expect(rendered).to have_field('Maximum npm package file size in bytes', type: 'number') + expect(page.find_field('Maximum npm package file size in bytes').value).to eq(default_plan_limits.npm_max_file_size.to_s) expect(rendered).to have_field('Maximum NuGet package file size in bytes', type: 'number') expect(page.find_field('Maximum NuGet package file size in bytes').value).to eq(default_plan_limits.nuget_max_file_size.to_s) diff --git a/spec/workers/analytics/instance_statistics/count_job_trigger_worker_spec.rb b/spec/workers/analytics/instance_statistics/count_job_trigger_worker_spec.rb index c7de8553d86..da0cbe37400 100644 --- a/spec/workers/analytics/instance_statistics/count_job_trigger_worker_spec.rb +++ b/spec/workers/analytics/instance_statistics/count_job_trigger_worker_spec.rb @@ -6,12 +6,12 @@ RSpec.describe Analytics::InstanceStatistics::CountJobTriggerWorker do it_behaves_like 'an idempotent worker' context 'triggers a job for each measurement identifiers' do - let(:expected_count) { Analytics::InstanceStatistics::Measurement.identifier_query_mapping.keys.size } + let(:expected_count) { Analytics::UsageTrends::Measurement.identifier_query_mapping.keys.size } it 'triggers CounterJobWorker jobs' do subject.perform - expect(Analytics::InstanceStatistics::CounterJobWorker.jobs.count).to eq(expected_count) + expect(Analytics::UsageTrends::CounterJobWorker.jobs.count).to eq(expected_count) end end end diff --git a/spec/workers/analytics/instance_statistics/counter_job_worker_spec.rb b/spec/workers/analytics/instance_statistics/counter_job_worker_spec.rb index 667ec0bcb75..4994fec44ab 100644 --- a/spec/workers/analytics/instance_statistics/counter_job_worker_spec.rb +++ b/spec/workers/analytics/instance_statistics/counter_job_worker_spec.rb @@ -6,7 +6,7 @@ RSpec.describe Analytics::InstanceStatistics::CounterJobWorker do let_it_be(:user_1) { create(:user) } let_it_be(:user_2) { create(:user) } - let(:users_measurement_identifier) { ::Analytics::InstanceStatistics::Measurement.identifiers.fetch(:users) } + let(:users_measurement_identifier) { ::Analytics::UsageTrends::Measurement.identifiers.fetch(:users) } let(:recorded_at) { Time.zone.now } let(:job_args) { [users_measurement_identifier, user_1.id, user_2.id, recorded_at] } @@ -18,7 +18,7 @@ RSpec.describe Analytics::InstanceStatistics::CounterJobWorker do it 'counts a scope and stores the result' do subject - measurement = Analytics::InstanceStatistics::Measurement.users.first + measurement = Analytics::UsageTrends::Measurement.users.first expect(measurement.recorded_at).to be_like_time(recorded_at) expect(measurement.identifier).to eq('users') expect(measurement.count).to eq(2) @@ -26,14 +26,14 @@ RSpec.describe Analytics::InstanceStatistics::CounterJobWorker do end context 'when no records are in the database' do - let(:users_measurement_identifier) { ::Analytics::InstanceStatistics::Measurement.identifiers.fetch(:groups) } + let(:users_measurement_identifier) { ::Analytics::UsageTrends::Measurement.identifiers.fetch(:groups) } subject { described_class.new.perform(users_measurement_identifier, nil, nil, recorded_at) } it 'sets 0 as the count' do subject - measurement = Analytics::InstanceStatistics::Measurement.groups.first + measurement = Analytics::UsageTrends::Measurement.groups.first expect(measurement.recorded_at).to be_like_time(recorded_at) expect(measurement.identifier).to eq('groups') expect(measurement.count).to eq(0) @@ -49,19 +49,19 @@ RSpec.describe Analytics::InstanceStatistics::CounterJobWorker do it 'does not insert anything when BatchCount returns error' do allow(Gitlab::Database::BatchCount).to receive(:batch_count).and_return(Gitlab::Database::BatchCounter::FALLBACK) - expect { subject }.not_to change { Analytics::InstanceStatistics::Measurement.count } + expect { subject }.not_to change { Analytics::UsageTrends::Measurement.count } end context 'when pipelines_succeeded identifier is passed' do let_it_be(:pipeline) { create(:ci_pipeline, :success) } - let(:successful_pipelines_measurement_identifier) { ::Analytics::InstanceStatistics::Measurement.identifiers.fetch(:pipelines_succeeded) } + let(:successful_pipelines_measurement_identifier) { ::Analytics::UsageTrends::Measurement.identifiers.fetch(:pipelines_succeeded) } let(:job_args) { [successful_pipelines_measurement_identifier, pipeline.id, pipeline.id, recorded_at] } it 'counts successful pipelines' do subject - measurement = Analytics::InstanceStatistics::Measurement.pipelines_succeeded.first + measurement = Analytics::UsageTrends::Measurement.pipelines_succeeded.first expect(measurement.recorded_at).to be_like_time(recorded_at) expect(measurement.identifier).to eq('pipelines_succeeded') expect(measurement.count).to eq(1) diff --git a/spec/workers/analytics/usage_trends/count_job_trigger_worker_spec.rb b/spec/workers/analytics/usage_trends/count_job_trigger_worker_spec.rb new file mode 100644 index 00000000000..735e4a214a9 --- /dev/null +++ b/spec/workers/analytics/usage_trends/count_job_trigger_worker_spec.rb @@ -0,0 +1,17 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Analytics::UsageTrends::CountJobTriggerWorker do + it_behaves_like 'an idempotent worker' + + context 'triggers a job for each measurement identifiers' do + let(:expected_count) { Analytics::UsageTrends::Measurement.identifier_query_mapping.keys.size } + + it 'triggers CounterJobWorker jobs' do + subject.perform + + expect(Analytics::UsageTrends::CounterJobWorker.jobs.count).to eq(expected_count) + end + end +end diff --git a/spec/workers/analytics/usage_trends/counter_job_worker_spec.rb b/spec/workers/analytics/usage_trends/counter_job_worker_spec.rb new file mode 100644 index 00000000000..9e4c82ee981 --- /dev/null +++ b/spec/workers/analytics/usage_trends/counter_job_worker_spec.rb @@ -0,0 +1,70 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Analytics::UsageTrends::CounterJobWorker do + let_it_be(:user_1) { create(:user) } + let_it_be(:user_2) { create(:user) } + + let(:users_measurement_identifier) { ::Analytics::UsageTrends::Measurement.identifiers.fetch(:users) } + let(:recorded_at) { Time.zone.now } + let(:job_args) { [users_measurement_identifier, user_1.id, user_2.id, recorded_at] } + + before do + allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false) + end + + include_examples 'an idempotent worker' do + it 'counts a scope and stores the result' do + subject + + measurement = Analytics::UsageTrends::Measurement.users.first + expect(measurement.recorded_at).to be_like_time(recorded_at) + expect(measurement.identifier).to eq('users') + expect(measurement.count).to eq(2) + end + end + + context 'when no records are in the database' do + let(:users_measurement_identifier) { ::Analytics::UsageTrends::Measurement.identifiers.fetch(:groups) } + + subject { described_class.new.perform(users_measurement_identifier, nil, nil, recorded_at) } + + it 'sets 0 as the count' do + subject + + measurement = Analytics::UsageTrends::Measurement.groups.first + expect(measurement.recorded_at).to be_like_time(recorded_at) + expect(measurement.identifier).to eq('groups') + expect(measurement.count).to eq(0) + end + end + + it 'does not raise error when inserting duplicated measurement' do + subject + + expect { subject }.not_to raise_error + end + + it 'does not insert anything when BatchCount returns error' do + allow(Gitlab::Database::BatchCount).to receive(:batch_count).and_return(Gitlab::Database::BatchCounter::FALLBACK) + + expect { subject }.not_to change { Analytics::UsageTrends::Measurement.count } + end + + context 'when pipelines_succeeded identifier is passed' do + let_it_be(:pipeline) { create(:ci_pipeline, :success) } + + let(:successful_pipelines_measurement_identifier) { ::Analytics::UsageTrends::Measurement.identifiers.fetch(:pipelines_succeeded) } + let(:job_args) { [successful_pipelines_measurement_identifier, pipeline.id, pipeline.id, recorded_at] } + + it 'counts successful pipelines' do + subject + + measurement = Analytics::UsageTrends::Measurement.pipelines_succeeded.first + expect(measurement.recorded_at).to be_like_time(recorded_at) + expect(measurement.identifier).to eq('pipelines_succeeded') + expect(measurement.count).to eq(1) + end + end +end diff --git a/spec/workers/jira_connect/sync_project_worker_spec.rb b/spec/workers/jira_connect/sync_project_worker_spec.rb index f7fa565d534..04cc3bec3af 100644 --- a/spec/workers/jira_connect/sync_project_worker_spec.rb +++ b/spec/workers/jira_connect/sync_project_worker_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' RSpec.describe JiraConnect::SyncProjectWorker, factory_default: :keep do describe '#perform' do - let_it_be(:project) { create_default(:project) } + let_it_be(:project) { create_default(:project).freeze } let!(:mr_with_jira_title) { create(:merge_request, :unique_branches, title: 'TEST-123') } let!(:mr_with_jira_description) { create(:merge_request, :unique_branches, description: 'TEST-323') } let!(:mr_with_other_title) { create(:merge_request, :unique_branches) } diff --git a/spec/workers/post_receive_spec.rb b/spec/workers/post_receive_spec.rb index aaae0988602..be501318920 100644 --- a/spec/workers/post_receive_spec.rb +++ b/spec/workers/post_receive_spec.rb @@ -93,6 +93,29 @@ RSpec.describe PostReceive do perform end + + it 'tracks an event for the empty_repo_upload experiment', :snowplow do + allow_next_instance_of(ApplicationExperiment) do |e| + allow(e).to receive(:should_track?).and_return(true) + allow(e).to receive(:track_initial_writes) + end + + perform + + expect_snowplow_event(category: 'empty_repo_upload', action: 'initial_write', context: [{ schema: 'iglu:com.gitlab/gitlab_experiment/jsonschema/0-3-0', data: anything }]) + end + + it 'does not track an event for the empty_repo_upload experiment when project is not empty', :snowplow do + allow(empty_project).to receive(:empty_repo?).and_return(false) + allow_next_instance_of(ApplicationExperiment) do |e| + allow(e).to receive(:should_track?).and_return(true) + allow(e).to receive(:track_initial_writes) + end + + perform + + expect_no_snowplow_event + end end shared_examples 'not updating remote mirrors' do @@ -159,7 +182,7 @@ RSpec.describe PostReceive do end it 'expires the status cache' do - expect(project.repository).to receive(:empty?).and_return(true) + expect(project.repository).to receive(:empty?).at_least(:once).and_return(true) expect(project.repository).to receive(:expire_status_cache) perform |