summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
Diffstat (limited to 'spec')
-rw-r--r--spec/controllers/admin/runners_controller_spec.rb5
-rw-r--r--spec/controllers/boards/issues_controller_spec.rb195
-rw-r--r--spec/controllers/projects/deployments_controller_spec.rb74
-rw-r--r--spec/controllers/projects/environments/prometheus_api_controller_spec.rb2
-rw-r--r--spec/controllers/projects/merge_requests_controller_spec.rb79
-rw-r--r--spec/factories/services.rb12
-rw-r--r--spec/features/admin/admin_sees_project_statistics_spec.rb2
-rw-r--r--spec/features/admin/admin_settings_spec.rb35
-rw-r--r--spec/features/dashboard/milestones_spec.rb14
-rw-r--r--spec/features/merge_request/user_creates_merge_request_spec.rb2
-rw-r--r--spec/features/projects/files/user_edits_files_spec.rb1
-rw-r--r--spec/features/projects/fork_spec.rb2
-rw-r--r--spec/features/search/user_uses_header_search_field_spec.rb124
-rw-r--r--spec/fixtures/api/schemas/current-board.json16
-rw-r--r--spec/fixtures/phabricator_responses/user.search.json62
-rw-r--r--spec/frontend/api_spec.js16
-rw-r--r--spec/frontend/commons/nav/user_merge_requests_spec.js113
-rw-r--r--spec/frontend/confidential_merge_request/components/__snapshots__/project_form_group_spec.js.snap12
-rw-r--r--spec/frontend/error_tracking_settings/mock.js2
-rw-r--r--spec/frontend/filterable_list_spec.js53
-rw-r--r--spec/frontend/ide/lib/files_spec.js4
-rw-r--r--spec/frontend/notes/components/discussion_notes_replies_wrapper_spec.js51
-rw-r--r--spec/frontend/projects/projects_filterable_list_spec.js31
-rw-r--r--spec/graphql/gitlab_schema_spec.rb20
-rw-r--r--spec/graphql/types/diff_refs_type_spec.rb9
-rw-r--r--spec/graphql/types/merge_request_type_spec.rb2
-rw-r--r--spec/graphql/types/notes/diff_position_type_spec.rb2
-rw-r--r--spec/graphql/types/notes/discussion_type_spec.rb2
-rw-r--r--spec/helpers/avatars_helper_spec.rb2
-rw-r--r--spec/helpers/blob_helper_spec.rb38
-rw-r--r--spec/helpers/boards_helper_spec.rb16
-rw-r--r--spec/helpers/emails_helper_spec.rb2
-rw-r--r--spec/helpers/markup_helper_spec.rb73
-rw-r--r--spec/helpers/sorting_helper_spec.rb148
-rw-r--r--spec/helpers/storage_helper_spec.rb2
-rw-r--r--spec/javascripts/diffs/components/inline_diff_view_spec.js3
-rw-r--r--spec/javascripts/helpers/vuex_action_helper.js4
-rw-r--r--spec/javascripts/ide/components/repo_editor_spec.js40
-rw-r--r--spec/javascripts/ide/stores/mutations/file_spec.js20
-rw-r--r--spec/javascripts/issuable_spec.js12
-rw-r--r--spec/javascripts/monitoring/charts/single_stat_spec.js9
-rw-r--r--spec/javascripts/monitoring/mock_data.js72
-rw-r--r--spec/javascripts/monitoring/store/mutations_spec.js2
-rw-r--r--spec/javascripts/monitoring/utils_spec.js27
-rw-r--r--spec/javascripts/notes/components/comment_form_spec.js15
-rw-r--r--spec/javascripts/notes/stores/actions_spec.js28
-rw-r--r--spec/javascripts/registry/components/collapsible_container_spec.js2
-rw-r--r--spec/javascripts/registry/components/table_registry_spec.js2
-rw-r--r--spec/javascripts/test_bundle.js3
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js3
-rw-r--r--spec/lib/api/helpers/pagination_spec.rb10
-rw-r--r--spec/lib/banzai/filter/inline_metrics_filter_spec.rb55
-rw-r--r--spec/lib/banzai/filter/inline_metrics_redactor_filter_spec.rb58
-rw-r--r--spec/lib/banzai/renderer_spec.rb18
-rw-r--r--spec/lib/gitlab/asciidoc_spec.rb99
-rw-r--r--spec/lib/gitlab/background_migration/fix_user_namespace_names_spec.rb104
-rw-r--r--spec/lib/gitlab/background_migration/fix_user_project_route_names_spec.rb98
-rw-r--r--spec/lib/gitlab/background_migration/populate_merge_request_assignees_table_spec.rb26
-rw-r--r--spec/lib/gitlab/batch_pop_queueing_spec.rb147
-rw-r--r--spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb1
-rw-r--r--spec/lib/gitlab/current_settings_spec.rb4
-rw-r--r--spec/lib/gitlab/cycle_analytics/events_spec.rb2
-rw-r--r--spec/lib/gitlab/cycle_analytics/test_stage_spec.rb34
-rw-r--r--spec/lib/gitlab/cycle_analytics/usage_data_spec.rb2
-rw-r--r--spec/lib/gitlab/database_importers/common_metrics/importer_spec.rb (renamed from spec/db/importers/common_metrics_importer_spec.rb)21
-rw-r--r--spec/lib/gitlab/database_importers/common_metrics/prometheus_metric_spec.rb16
-rw-r--r--spec/lib/gitlab/email/handler/create_issue_handler_spec.rb1
-rw-r--r--spec/lib/gitlab/git/commit_spec.rb2
-rw-r--r--spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb99
-rw-r--r--spec/lib/gitlab/global_id_spec.rb37
-rw-r--r--spec/lib/gitlab/kubernetes/kube_client_spec.rb3
-rw-r--r--spec/lib/gitlab/kubernetes/role_binding_spec.rb4
-rw-r--r--spec/lib/gitlab/kubernetes/role_spec.rb30
-rw-r--r--spec/lib/gitlab/lets_encrypt_spec.rb31
-rw-r--r--spec/lib/gitlab/markdown_cache/active_record/extension_spec.rb7
-rw-r--r--spec/lib/gitlab/metrics/dashboard/url_spec.rb56
-rw-r--r--spec/lib/gitlab/metrics/samplers/unicorn_sampler_spec.rb33
-rw-r--r--spec/lib/gitlab/phabricator_import/cache/map_spec.rb17
-rw-r--r--spec/lib/gitlab/phabricator_import/conduit/user_spec.rb49
-rw-r--r--spec/lib/gitlab/phabricator_import/conduit/users_response_spec.rb21
-rw-r--r--spec/lib/gitlab/phabricator_import/issues/importer_spec.rb32
-rw-r--r--spec/lib/gitlab/phabricator_import/issues/task_importer_spec.rb36
-rw-r--r--spec/lib/gitlab/phabricator_import/representation/task_spec.rb16
-rw-r--r--spec/lib/gitlab/phabricator_import/representation/user_spec.rb28
-rw-r--r--spec/lib/gitlab/phabricator_import/user_finder_spec.rb89
-rw-r--r--spec/lib/gitlab/sql/pattern_spec.rb12
-rw-r--r--spec/lib/gitlab/url_blocker_spec.rb4
-rw-r--r--spec/lib/peek/views/redis_detailed_spec.rb36
-rw-r--r--spec/migrations/active_record/schema_spec.rb2
-rw-r--r--spec/models/ci/build_spec.rb1
-rw-r--r--spec/models/clusters/clusters_hierarchy_spec.rb73
-rw-r--r--spec/models/commit_range_spec.rb12
-rw-r--r--spec/models/concerns/cache_markdown_field_spec.rb30
-rw-r--r--spec/models/concerns/deployment_platform_spec.rb18
-rw-r--r--spec/models/concerns/issuable_spec.rb10
-rw-r--r--spec/models/concerns/project_api_compatibility_spec.rb38
-rw-r--r--spec/models/concerns/project_features_compatibility_spec.rb18
-rw-r--r--spec/models/concerns/reactive_caching_spec.rb61
-rw-r--r--spec/models/concerns/routable_spec.rb47
-rw-r--r--spec/models/cycle_analytics/code_spec.rb3
-rw-r--r--spec/models/cycle_analytics/issue_spec.rb5
-rw-r--r--spec/models/cycle_analytics/plan_spec.rb5
-rw-r--r--spec/models/cycle_analytics/production_spec.rb3
-rw-r--r--spec/models/cycle_analytics/project_level_spec.rb (renamed from spec/models/cycle_analytics_spec.rb)8
-rw-r--r--spec/models/cycle_analytics/review_spec.rb3
-rw-r--r--spec/models/cycle_analytics/staging_spec.rb2
-rw-r--r--spec/models/cycle_analytics/test_spec.rb3
-rw-r--r--spec/models/deployment_metrics_spec.rb126
-rw-r--r--spec/models/deployment_spec.rb147
-rw-r--r--spec/models/discussion_spec.rb14
-rw-r--r--spec/models/environment_status_spec.rb3
-rw-r--r--spec/models/issue/metrics_spec.rb6
-rw-r--r--spec/models/merge_request_spec.rb30
-rw-r--r--spec/models/note_spec.rb2
-rw-r--r--spec/models/pages_domain_spec.rb29
-rw-r--r--spec/models/project_services/bugzilla_service_spec.rb6
-rw-r--r--spec/models/project_services/custom_issue_tracker_service_spec.rb6
-rw-r--r--spec/models/project_services/drone_ci_service_spec.rb9
-rw-r--r--spec/models/project_services/gitlab_issue_tracker_service_spec.rb6
-rw-r--r--spec/models/project_services/jira_service_spec.rb6
-rw-r--r--spec/models/project_services/redmine_service_spec.rb6
-rw-r--r--spec/models/project_services/youtrack_service_spec.rb6
-rw-r--r--spec/models/project_statistics_spec.rb43
-rw-r--r--spec/models/repository_spec.rb7
-rw-r--r--spec/models/user_spec.rb20
-rw-r--r--spec/requests/api/graphql/mutations/notes/create/diff_note_spec.rb64
-rw-r--r--spec/requests/api/graphql/mutations/notes/create/image_diff_note_spec.rb70
-rw-r--r--spec/requests/api/graphql/mutations/notes/create/note_spec.rb64
-rw-r--r--spec/requests/api/graphql/mutations/notes/destroy_spec.rb52
-rw-r--r--spec/requests/api/graphql/mutations/notes/update_spec.rb72
-rw-r--r--spec/requests/api/graphql_spec.rb10
-rw-r--r--spec/requests/api/group_clusters_spec.rb452
-rw-r--r--spec/requests/api/internal_spec.rb12
-rw-r--r--spec/requests/api/project_clusters_spec.rb16
-rw-r--r--spec/requests/api/projects_spec.rb54
-rw-r--r--spec/requests/api/user_counts_spec.rb40
-rw-r--r--spec/routing/api_routing_spec.rb23
-rw-r--r--spec/routing/environments_spec.rb9
-rw-r--r--spec/routing/project_routing_spec.rb6
-rw-r--r--spec/rubocop/cop/gitlab/rails_logger_spec.rb42
-rw-r--r--spec/rubocop/cop/qa/element_with_pattern_spec.rb4
-rw-r--r--spec/serializers/environment_status_entity_spec.rb12
-rw-r--r--spec/services/audit_event_service_spec.rb32
-rw-r--r--spec/services/boards/issues/move_service_spec.rb86
-rw-r--r--spec/services/clusters/gcp/kubernetes/create_or_update_namespace_service_spec.rb2
-rw-r--r--spec/services/clusters/gcp/kubernetes/create_or_update_service_account_service_spec.rb20
-rw-r--r--spec/services/issuable/bulk_update_service_spec.rb496
-rw-r--r--spec/services/issuable/clone/content_rewriter_spec.rb13
-rw-r--r--spec/services/issues/update_service_spec.rb16
-rw-r--r--spec/services/merge_requests/get_urls_service_spec.rb6
-rw-r--r--spec/services/merge_requests/merge_service_spec.rb13
-rw-r--r--spec/services/merge_requests/merge_to_ref_service_spec.rb22
-rw-r--r--spec/services/merge_requests/mergeability_check_service_spec.rb8
-rw-r--r--spec/services/merge_requests/update_service_spec.rb2
-rw-r--r--spec/services/notification_service_spec.rb56
-rw-r--r--spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb6
-rw-r--r--spec/spec_helper.rb13
-rw-r--r--spec/support/database_cleaner.rb27
-rw-r--r--spec/support/db_cleaner.rb4
-rw-r--r--spec/support/features/rss_shared_examples.rb4
-rw-r--r--spec/support/helpers/git_http_helpers.rb4
-rw-r--r--spec/support/helpers/graphql_helpers.rb15
-rw-r--r--spec/support/helpers/kubernetes_helpers.rb5
-rw-r--r--spec/support/helpers/memory_usage_helper.rb37
-rw-r--r--spec/support/helpers/migrations_helpers.rb13
-rw-r--r--spec/support/helpers/reactive_caching_helpers.rb2
-rw-r--r--spec/support/helpers/test_env.rb22
-rw-r--r--spec/support/matchers/abort_matcher.rb46
-rw-r--r--spec/support/shared_contexts/email_shared_context.rb2
-rw-r--r--spec/support/shared_examples/graphql/notes_creation_shared_examples.rb61
-rw-r--r--spec/support/shared_examples/mentionable_shared_examples.rb51
-rw-r--r--spec/support/shared_examples/policies/clusterable_shared_examples.rb8
-rw-r--r--spec/tasks/gitlab/storage_rake_spec.rb34
-rw-r--r--spec/views/notify/pipeline_failed_email.html.haml_spec.rb4
-rw-r--r--spec/views/notify/pipeline_failed_email.text.erb_spec.rb2
-rw-r--r--spec/views/notify/pipeline_success_email.html.haml_spec.rb4
-rw-r--r--spec/workers/project_cache_worker_spec.rb14
177 files changed, 4837 insertions, 932 deletions
diff --git a/spec/controllers/admin/runners_controller_spec.rb b/spec/controllers/admin/runners_controller_spec.rb
index 78c5e2a2656..bbeda7dae0f 100644
--- a/spec/controllers/admin/runners_controller_spec.rb
+++ b/spec/controllers/admin/runners_controller_spec.rb
@@ -23,10 +23,11 @@ describe Admin::RunnersController do
control_count = ActiveRecord::QueryRecorder.new { get :index }.count
- create(:ci_runner, :tagged_only)
+ create_list(:ci_runner, 5, :tagged_only)
# There is still an N+1 query for `runner.builds.count`
- expect { get :index }.not_to exceed_query_limit(control_count + 1)
+ # We also need to add 1 because it takes 2 queries to preload tags
+ expect { get :index }.not_to exceed_query_limit(control_count + 6)
expect(response).to have_gitlab_http_status(200)
expect(response.body).to have_content('tag1')
diff --git a/spec/controllers/boards/issues_controller_spec.rb b/spec/controllers/boards/issues_controller_spec.rb
index c84bb913cad..6cad060d888 100644
--- a/spec/controllers/boards/issues_controller_spec.rb
+++ b/spec/controllers/boards/issues_controller_spec.rb
@@ -164,6 +164,201 @@ describe Boards::IssuesController do
end
end
+ describe 'PUT move_multiple' do
+ let(:todo) { create(:group_label, group: group, name: 'Todo') }
+ let(:development) { create(:group_label, group: group, name: 'Development') }
+ let(:user) { create(:group_member, :maintainer, user: create(:user), group: group ).user }
+ let(:guest) { create(:group_member, :guest, user: create(:user), group: group ).user }
+ let(:project) { create(:project, group: group) }
+ let(:group) { create(:group) }
+ let(:board) { create(:board, project: project) }
+ let(:list1) { create(:list, board: board, label: todo, position: 0) }
+ let(:list2) { create(:list, board: board, label: development, position: 1) }
+ let(:issue1) { create(:labeled_issue, project: project, labels: [todo], author: user, relative_position: 10) }
+ let(:issue2) { create(:labeled_issue, project: project, labels: [todo], author: user, relative_position: 20) }
+ let(:issue3) { create(:labeled_issue, project: project, labels: [todo], author: user, relative_position: 30) }
+ let(:issue4) { create(:labeled_issue, project: project, labels: [development], author: user, relative_position: 100) }
+
+ let(:move_params) do
+ {
+ board_id: board.id,
+ ids: [issue1.id, issue2.id, issue3.id],
+ from_list_id: list1.id,
+ to_list_id: list2.id,
+ move_before_id: issue4.id,
+ move_after_id: nil
+ }
+ end
+
+ before do
+ project.add_maintainer(user)
+ project.add_guest(guest)
+ end
+
+ shared_examples 'move issues endpoint provider' do
+ before do
+ sign_in(signed_in_user)
+ end
+
+ it 'moves issues as expected' do
+ put :bulk_move, params: move_issues_params
+ expect(response).to have_gitlab_http_status(expected_status)
+
+ list_issues user: requesting_user, board: board, list: list2
+ expect(response).to have_gitlab_http_status(200)
+
+ expect(response).to match_response_schema('entities/issue_boards')
+
+ responded_issues = json_response['issues']
+ expect(responded_issues.length).to eq expected_issue_count
+
+ ids_in_order = responded_issues.pluck('id')
+ expect(ids_in_order).to eq(expected_issue_ids_in_order)
+ end
+ end
+
+ context 'when items are moved to another list' do
+ it_behaves_like 'move issues endpoint provider' do
+ let(:signed_in_user) { user }
+ let(:move_issues_params) { move_params }
+ let(:requesting_user) { user }
+ let(:expected_status) { 200 }
+ let(:expected_issue_count) { 4 }
+ let(:expected_issue_ids_in_order) { [issue4.id, issue1.id, issue2.id, issue3.id] }
+ end
+ end
+
+ context 'when moving just one issue' do
+ it_behaves_like 'move issues endpoint provider' do
+ let(:signed_in_user) { user }
+ let(:move_issues_params) do
+ move_params.dup.tap do |hash|
+ hash[:ids] = [issue2.id]
+ end
+ end
+ let(:requesting_user) { user }
+ let(:expected_status) { 200 }
+ let(:expected_issue_count) { 2 }
+ let(:expected_issue_ids_in_order) { [issue4.id, issue2.id] }
+ end
+ end
+
+ context 'when user is not allowed to move issue' do
+ it_behaves_like 'move issues endpoint provider' do
+ let(:signed_in_user) { guest }
+ let(:move_issues_params) do
+ move_params.dup.tap do |hash|
+ hash[:ids] = [issue2.id]
+ end
+ end
+ let(:requesting_user) { user }
+ let(:expected_status) { 403 }
+ let(:expected_issue_count) { 1 }
+ let(:expected_issue_ids_in_order) { [issue4.id] }
+ end
+ end
+
+ context 'when issues should be moved visually above existing issue in list' do
+ it_behaves_like 'move issues endpoint provider' do
+ let(:signed_in_user) { user }
+ let(:move_issues_params) do
+ move_params.dup.tap do |hash|
+ hash[:move_after_id] = issue4.id
+ hash[:move_before_id] = nil
+ end
+ end
+ let(:requesting_user) { user }
+ let(:expected_status) { 200 }
+ let(:expected_issue_count) { 4 }
+ let(:expected_issue_ids_in_order) { [issue1.id, issue2.id, issue3.id, issue4.id] }
+ end
+ end
+
+ context 'when destination list is empty' do
+ before do
+ # Remove issue from list
+ issue4.labels -= [development]
+ issue4.save!
+ end
+
+ it_behaves_like 'move issues endpoint provider' do
+ let(:signed_in_user) { user }
+ let(:move_issues_params) do
+ move_params.dup.tap do |hash|
+ hash[:move_before_id] = nil
+ end
+ end
+ let(:requesting_user) { user }
+ let(:expected_status) { 200 }
+ let(:expected_issue_count) { 3 }
+ let(:expected_issue_ids_in_order) { [issue1.id, issue2.id, issue3.id] }
+ end
+ end
+
+ context 'when no position arguments are given' do
+ it_behaves_like 'move issues endpoint provider' do
+ let(:signed_in_user) { user }
+ let(:move_issues_params) do
+ move_params.dup.tap do |hash|
+ hash[:move_before_id] = nil
+ end
+ end
+ let(:requesting_user) { user }
+ let(:expected_status) { 200 }
+ let(:expected_issue_count) { 4 }
+ let(:expected_issue_ids_in_order) { [issue1.id, issue2.id, issue3.id, issue4.id] }
+ end
+ end
+
+ context 'when move_before_id and move_after_id are given' do
+ let(:issue5) { create(:labeled_issue, project: project, labels: [development], author: user, relative_position: 90) }
+
+ it_behaves_like 'move issues endpoint provider' do
+ let(:signed_in_user) { user }
+ let(:move_issues_params) do
+ move_params.dup.tap do |hash|
+ hash[:move_before_id] = issue5.id
+ hash[:move_after_id] = issue4.id
+ end
+ end
+ let(:requesting_user) { user }
+ let(:expected_status) { 200 }
+ let(:expected_issue_count) { 5 }
+ let(:expected_issue_ids_in_order) { [issue5.id, issue1.id, issue2.id, issue3.id, issue4.id] }
+ end
+ end
+
+ context 'when request contains too many issues' do
+ it_behaves_like 'move issues endpoint provider' do
+ let(:signed_in_user) { user }
+ let(:move_issues_params) do
+ move_params.dup.tap do |hash|
+ hash[:ids] = (0..51).to_a
+ end
+ end
+ let(:requesting_user) { user }
+ let(:expected_status) { 422 }
+ let(:expected_issue_count) { 1 }
+ let(:expected_issue_ids_in_order) { [issue4.id] }
+ end
+ end
+
+ context 'when request is malformed' do
+ it_behaves_like 'move issues endpoint provider' do
+ let(:signed_in_user) { user }
+ let(:move_issues_params) do
+ move_params.dup.tap do |hash|
+ hash[:ids] = 'foobar'
+ end
+ end
+ let(:requesting_user) { user }
+ let(:expected_status) { 400 }
+ let(:expected_issue_count) { 1 }
+ let(:expected_issue_ids_in_order) { [issue4.id] }
+ end
+ end
+ end
+
def list_issues(user:, board:, list: nil)
sign_in(user)
diff --git a/spec/controllers/projects/deployments_controller_spec.rb b/spec/controllers/projects/deployments_controller_spec.rb
index 95417936df4..b9ee69a617b 100644
--- a/spec/controllers/projects/deployments_controller_spec.rb
+++ b/spec/controllers/projects/deployments_controller_spec.rb
@@ -41,34 +41,26 @@ describe Projects::DeploymentsController do
describe 'GET #metrics' do
let(:deployment) { create(:deployment, :success, project: project, environment: environment) }
- before do
- allow(controller).to receive(:deployment).and_return(deployment)
- end
-
context 'when metrics are disabled' do
- before do
- allow(deployment).to receive(:has_metrics?).and_return false
- end
-
it 'responds with not found' do
- get :metrics, params: deployment_params(id: deployment.id)
+ get :metrics, params: deployment_params(id: deployment.to_param)
expect(response).to be_not_found
end
end
context 'when metrics are enabled' do
- before do
- allow(deployment).to receive(:has_metrics?).and_return true
- end
-
context 'when environment has no metrics' do
before do
- expect(deployment).to receive(:metrics).and_return(nil)
+ expect_next_instance_of(DeploymentMetrics) do |deployment_metrics|
+ allow(deployment_metrics).to receive(:has_metrics?).and_return(true)
+
+ expect(deployment_metrics).to receive(:metrics).and_return(nil)
+ end
end
it 'returns a empty response 204 resposne' do
- get :metrics, params: deployment_params(id: deployment.id)
+ get :metrics, params: deployment_params(id: deployment.to_param)
expect(response).to have_gitlab_http_status(204)
expect(response.body).to eq('')
end
@@ -84,11 +76,15 @@ describe Projects::DeploymentsController do
end
before do
- expect(deployment).to receive(:metrics).and_return(empty_metrics)
+ expect_next_instance_of(DeploymentMetrics) do |deployment_metrics|
+ allow(deployment_metrics).to receive(:has_metrics?).and_return(true)
+
+ expect(deployment_metrics).to receive(:metrics).and_return(empty_metrics)
+ end
end
it 'returns a metrics JSON document' do
- get :metrics, params: deployment_params(id: deployment.id)
+ get :metrics, params: deployment_params(id: deployment.to_param)
expect(response).to be_ok
expect(json_response['success']).to be(true)
@@ -96,54 +92,32 @@ describe Projects::DeploymentsController do
expect(json_response['last_update']).to eq(42)
end
end
-
- context 'when metrics service does not implement deployment metrics' do
- before do
- allow(deployment).to receive(:metrics).and_raise(NotImplementedError)
- end
-
- it 'responds with not found' do
- get :metrics, params: deployment_params(id: deployment.id)
-
- expect(response).to be_not_found
- end
- end
end
end
describe 'GET #additional_metrics' do
let(:deployment) { create(:deployment, :success, project: project, environment: environment) }
- before do
- allow(controller).to receive(:deployment).and_return(deployment)
- end
-
context 'when metrics are disabled' do
- before do
- allow(deployment).to receive(:has_metrics?).and_return false
- end
-
it 'responds with not found' do
- get :metrics, params: deployment_params(id: deployment.id)
+ get :metrics, params: deployment_params(id: deployment.to_param)
expect(response).to be_not_found
end
end
context 'when metrics are enabled' do
- let(:prometheus_adapter) { double('prometheus_adapter', can_query?: true) }
-
- before do
- allow(deployment).to receive(:prometheus_adapter).and_return(prometheus_adapter)
- end
-
context 'when environment has no metrics' do
before do
- expect(deployment).to receive(:additional_metrics).and_return({})
+ expect_next_instance_of(DeploymentMetrics) do |deployment_metrics|
+ allow(deployment_metrics).to receive(:has_metrics?).and_return(true)
+
+ expect(deployment_metrics).to receive(:additional_metrics).and_return({})
+ end
end
it 'returns a empty response 204 response' do
- get :additional_metrics, params: deployment_params(id: deployment.id, format: :json)
+ get :additional_metrics, params: deployment_params(id: deployment.to_param, format: :json)
expect(response).to have_gitlab_http_status(204)
expect(response.body).to eq('')
end
@@ -159,11 +133,15 @@ describe Projects::DeploymentsController do
end
before do
- expect(deployment).to receive(:additional_metrics).and_return(empty_metrics)
+ expect_next_instance_of(DeploymentMetrics) do |deployment_metrics|
+ allow(deployment_metrics).to receive(:has_metrics?).and_return(true)
+
+ expect(deployment_metrics).to receive(:additional_metrics).and_return(empty_metrics)
+ end
end
it 'returns a metrics JSON document' do
- get :additional_metrics, params: deployment_params(id: deployment.id, format: :json)
+ get :additional_metrics, params: deployment_params(id: deployment.to_param, format: :json)
expect(response).to be_ok
expect(json_response['success']).to be(true)
diff --git a/spec/controllers/projects/environments/prometheus_api_controller_spec.rb b/spec/controllers/projects/environments/prometheus_api_controller_spec.rb
index fdef9bc5638..45328482ad7 100644
--- a/spec/controllers/projects/environments/prometheus_api_controller_spec.rb
+++ b/spec/controllers/projects/environments/prometheus_api_controller_spec.rb
@@ -176,7 +176,7 @@ describe Projects::Environments::PrometheusApiController do
def environment_params(params = {})
{
id: environment.id.to_s,
- namespace_id: project.namespace.name,
+ namespace_id: project.namespace.full_path,
project_id: project.name,
proxy_path: 'query',
query: '1'
diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb
index 0eca663a683..cc6adc0a6c6 100644
--- a/spec/controllers/projects/merge_requests_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests_controller_spec.rb
@@ -878,16 +878,79 @@ describe Projects::MergeRequestsController do
expect(control_count).to be <= 137
end
- def get_ci_environments_status(extra_params = {})
- params = {
- namespace_id: merge_request.project.namespace.to_param,
- project_id: merge_request.project,
- id: merge_request.iid,
- format: 'json'
- }
+ it 'has no N+1 SQL issues for environments', :request_store, retry: 0 do
+ # First run to insert test data from lets, which does take up some 30 queries
+ get_ci_environments_status
+
+ control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) { get_ci_environments_status }.count
+
+ environment2 = create(:environment, project: forked)
+ create(:deployment, :succeed, environment: environment2, sha: sha, ref: 'master', deployable: build)
+
+ # TODO address the last 11 queries
+ # See https://gitlab.com/gitlab-org/gitlab-ce/issues/63952 (5 queries)
+ # And https://gitlab.com/gitlab-org/gitlab-ce/issues/64105 (6 queries)
+ leeway = 11
+ expect { get_ci_environments_status }.not_to exceed_all_query_limit(control_count + leeway)
+ end
+ end
+
+ context 'when a merge request has multiple environments with deployments' do
+ let(:sha) { merge_request.diff_head_sha }
+ let(:ref) { merge_request.source_branch }
+
+ let!(:build) { create(:ci_build, pipeline: pipeline) }
+ let!(:pipeline) { create(:ci_pipeline, sha: sha, project: project) }
+ let!(:environment) { create(:environment, name: 'env_a', project: project) }
+ let!(:another_environment) { create(:environment, name: 'env_b', project: project) }
+
+ before do
+ merge_request.update_head_pipeline
+
+ create(:deployment, :succeed, environment: environment, sha: sha, ref: ref, deployable: build)
+ create(:deployment, :succeed, environment: another_environment, sha: sha, ref: ref, deployable: build)
+ end
+
+ it 'exposes multiple environment statuses' do
+ get_ci_environments_status
+
+ expect(json_response.count).to eq 2
+ end
+
+ context 'when route map is not present in the project' do
+ it 'does not have N+1 Gitaly requests for environments', :request_store do
+ expect(merge_request).to be_present
- get :ci_environments_status, params: params.merge(extra_params)
+ expect { get_ci_environments_status }
+ .to change { Gitlab::GitalyClient.get_request_count }.by_at_most(1)
+ end
end
+
+ context 'when there is route map present in a project' do
+ before do
+ allow_any_instance_of(EnvironmentStatus)
+ .to receive(:has_route_map?)
+ .and_return(true)
+ end
+
+ it 'does not have N+1 Gitaly requests for diff files', :request_store do
+ expect(merge_request.merge_request_diff.merge_request_diff_files).to be_many
+
+ expect { get_ci_environments_status }
+ .to change { Gitlab::GitalyClient.get_request_count }.by_at_most(1)
+ end
+ end
+ end
+
+ def get_ci_environments_status(extra_params = {})
+ params = {
+ namespace_id: merge_request.project.namespace.to_param,
+ project_id: merge_request.project,
+ id: merge_request.iid,
+ format: 'json'
+ }
+
+ get :ci_environments_status, params: params.merge(extra_params)
end
end
diff --git a/spec/factories/services.rb b/spec/factories/services.rb
index ecb481ed84a..cd1d2c33373 100644
--- a/spec/factories/services.rb
+++ b/spec/factories/services.rb
@@ -100,4 +100,16 @@ FactoryBot.define do
type 'HipchatService'
token 'test_token'
end
+
+ trait :without_properties_callback do
+ after(:build) do |service|
+ allow(service).to receive(:handle_properties)
+ end
+
+ after(:create) do |service|
+ # we have to remove the stub because the behaviour of
+ # handle_properties method is tested after the creation
+ allow(service).to receive(:handle_properties).and_call_original
+ end
+ end
end
diff --git a/spec/features/admin/admin_sees_project_statistics_spec.rb b/spec/features/admin/admin_sees_project_statistics_spec.rb
index b5323a1c76d..ecd0aab925b 100644
--- a/spec/features/admin/admin_sees_project_statistics_spec.rb
+++ b/spec/features/admin/admin_sees_project_statistics_spec.rb
@@ -15,7 +15,7 @@ describe "Admin > Admin sees project statistics" do
let(:project) { create(:project, :repository) }
it "shows project statistics" do
- expect(page).to have_content("Storage: 0 Bytes (0 Bytes repositories, 0 Bytes wikis, 0 Bytes build artifacts, 0 Bytes LFS)")
+ expect(page).to have_content("Storage: 0 Bytes (Repository: 0 Bytes / Wikis: 0 Bytes / Build Artifacts: 0 Bytes / LFS: 0 Bytes)")
end
end
diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb
index 4a9037afb43..518b3625348 100644
--- a/spec/features/admin/admin_settings_spec.rb
+++ b/spec/features/admin/admin_settings_spec.rb
@@ -400,35 +400,16 @@ describe 'Admin updates settings' do
.to have_content "The form contains the following error: Polling interval multiplier must be greater than or equal to 0"
end
- context 'When pages_auto_ssl is enabled' do
- before do
- stub_feature_flags(pages_auto_ssl: true)
- visit preferences_admin_application_settings_path
- end
-
- it "Change Pages Let's Encrypt settings" do
- page.within('.as-pages') do
- fill_in 'Email', with: 'my@test.example.com'
- check "I have read and agree to the Let's Encrypt Terms of Service"
- click_button 'Save changes'
- end
-
- expect(current_settings.lets_encrypt_notification_email).to eq 'my@test.example.com'
- expect(current_settings.lets_encrypt_terms_of_service_accepted).to eq true
- end
- end
-
- context 'When pages_auto_ssl is disabled' do
- before do
- stub_feature_flags(pages_auto_ssl: false)
- visit preferences_admin_application_settings_path
+ it "Change Pages Let's Encrypt settings" do
+ visit preferences_admin_application_settings_path
+ page.within('.as-pages') do
+ fill_in 'Email', with: 'my@test.example.com'
+ check "I have read and agree to the Let's Encrypt Terms of Service"
+ click_button 'Save changes'
end
- it "Doesn't show Let's Encrypt options" do
- page.within('.as-pages') do
- expect(page).not_to have_content('Email')
- end
- end
+ expect(current_settings.lets_encrypt_notification_email).to eq 'my@test.example.com'
+ expect(current_settings.lets_encrypt_terms_of_service_accepted).to eq true
end
end
diff --git a/spec/features/dashboard/milestones_spec.rb b/spec/features/dashboard/milestones_spec.rb
index 8fb2e37e269..c3310a4a132 100644
--- a/spec/features/dashboard/milestones_spec.rb
+++ b/spec/features/dashboard/milestones_spec.rb
@@ -29,5 +29,19 @@ describe 'Dashboard > Milestones' do
expect(page).to have_content(milestone.title)
expect(page).to have_content(group.name)
end
+
+ describe 'new milestones dropdown', :js do
+ it 'takes user to a new milestone page', :js do
+ find('.new-project-item-select-button').click
+
+ page.within('.select2-results') do
+ first('.select2-result-label').click
+ end
+
+ find('.new-project-item-link').click
+
+ expect(current_path).to eq(new_group_milestone_path(group))
+ end
+ end
end
end
diff --git a/spec/features/merge_request/user_creates_merge_request_spec.rb b/spec/features/merge_request/user_creates_merge_request_spec.rb
index d05ef2a8f12..2a70cbb2a72 100644
--- a/spec/features/merge_request/user_creates_merge_request_spec.rb
+++ b/spec/features/merge_request/user_creates_merge_request_spec.rb
@@ -78,7 +78,7 @@ describe "User creates a merge request", :js do
click_button("Submit merge request")
- expect(page).to have_content(title).and have_content("Request to merge #{user.namespace.name}:#{source_branch} into master")
+ expect(page).to have_content(title).and have_content("Request to merge #{user.namespace.path}:#{source_branch} into master")
end
end
end
diff --git a/spec/features/projects/files/user_edits_files_spec.rb b/spec/features/projects/files/user_edits_files_spec.rb
index e0fa9dbb5fa..e4b02408b49 100644
--- a/spec/features/projects/files/user_edits_files_spec.rb
+++ b/spec/features/projects/files/user_edits_files_spec.rb
@@ -162,6 +162,7 @@ describe 'Projects > Files > User edits files', :js do
expect_fork_status
+ expect(page).to have_css('.ide-sidebar-project-title', text: "#{project2.name} #{user.namespace.full_path}/#{project2.path}")
expect(page).to have_css('.ide .multi-file-tab', text: '.gitignore')
end
diff --git a/spec/features/projects/fork_spec.rb b/spec/features/projects/fork_spec.rb
index 7c71b4c52e0..26b94bf58b2 100644
--- a/spec/features/projects/fork_spec.rb
+++ b/spec/features/projects/fork_spec.rb
@@ -50,7 +50,7 @@ describe 'Project fork' do
click_link('New merge request')
end
- expect(current_path).to have_content(/#{user.namespace.name}/i)
+ expect(current_path).to have_content(/#{user.namespace.path}/i)
end
it 'shows avatars when Gravatar is disabled' do
diff --git a/spec/features/search/user_uses_header_search_field_spec.rb b/spec/features/search/user_uses_header_search_field_spec.rb
index 1cc47cd6bd1..7ddd5c12cdf 100644
--- a/spec/features/search/user_uses_header_search_field_spec.rb
+++ b/spec/features/search/user_uses_header_search_field_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe 'User uses header search field' do
+describe 'User uses header search field', :js do
include FilteredSearchHelpers
let(:project) { create(:project) }
@@ -11,57 +11,12 @@ describe 'User uses header search field' do
sign_in(user)
end
- context 'when user is in a global scope', :js do
+ shared_examples 'search field examples' do
before do
- visit(root_path)
- page.find('#search').click
+ visit(url)
end
- context 'when clicking issues' do
- it 'shows assigned issues' do
- find('.search-input-container .dropdown-menu').click_link('Issues assigned to me')
-
- expect(page).to have_selector('.filtered-search')
- expect_tokens([assignee_token(user.name)])
- expect_filtered_search_input_empty
- end
-
- it 'shows created issues' do
- find('.search-input-container .dropdown-menu').click_link("Issues I've created")
-
- expect(page).to have_selector('.filtered-search')
- expect_tokens([author_token(user.name)])
- expect_filtered_search_input_empty
- end
- end
-
- context 'when clicking merge requests' do
- let!(:merge_request) { create(:merge_request, source_project: project, author: user, assignees: [user]) }
-
- it 'shows assigned merge requests' do
- find('.search-input-container .dropdown-menu').click_link('Merge requests assigned to me')
-
- expect(page).to have_selector('.filtered-search')
- expect_tokens([assignee_token(user.name)])
- expect_filtered_search_input_empty
- end
-
- it 'shows created merge requests' do
- find('.search-input-container .dropdown-menu').click_link("Merge requests I've created")
-
- expect(page).to have_selector('.filtered-search')
- expect_tokens([author_token(user.name)])
- expect_filtered_search_input_empty
- end
- end
- end
-
- context 'when user is in a project scope' do
- before do
- visit(project_path(project))
- end
-
- it 'starts searching by pressing the enter key', :js do
+ it 'starts searching by pressing the enter key' do
fill_in('search', with: 'gitlab')
find('#search').native.send_keys(:enter)
@@ -70,30 +25,31 @@ describe 'User uses header search field' do
end
end
- context 'when clicking the search field', :js do
+ context 'when clicking the search field' do
before do
page.find('#search').click
+ wait_for_all_requests
end
it 'shows category search dropdown' do
- expect(page).to have_selector('.dropdown-header', text: /#{project.name}/i)
+ expect(page).to have_selector('.dropdown-header', text: /#{scope_name}/i)
end
context 'when clicking issues' do
let!(:issue) { create(:issue, project: project, author: user, assignees: [user]) }
it 'shows assigned issues' do
- find('.dropdown-menu').click_link('Issues assigned to me')
+ find('.search-input-container .dropdown-menu').click_link('Issues assigned to me')
- expect(page).to have_selector('.filtered-search')
+ expect(page).to have_selector('.issues-list .issue')
expect_tokens([assignee_token(user.name)])
expect_filtered_search_input_empty
end
it 'shows created issues' do
- find('.dropdown-menu').click_link("Issues I've created")
+ find('.search-input-container .dropdown-menu').click_link("Issues I've created")
- expect(page).to have_selector('.filtered-search')
+ expect(page).to have_selector('.issues-list .issue')
expect_tokens([author_token(user.name)])
expect_filtered_search_input_empty
end
@@ -103,33 +59,77 @@ describe 'User uses header search field' do
let!(:merge_request) { create(:merge_request, source_project: project, author: user, assignees: [user]) }
it 'shows assigned merge requests' do
- find('.dropdown-menu').click_link('Merge requests assigned to me')
+ find('.search-input-container .dropdown-menu').click_link('Merge requests assigned to me')
- expect(page).to have_selector('.merge-requests-holder')
+ expect(page).to have_selector('.mr-list .merge-request')
expect_tokens([assignee_token(user.name)])
expect_filtered_search_input_empty
end
it 'shows created merge requests' do
- find('.dropdown-menu').click_link("Merge requests I've created")
+ find('.search-input-container .dropdown-menu').click_link("Merge requests I've created")
- expect(page).to have_selector('.merge-requests-holder')
+ expect(page).to have_selector('.mr-list .merge-request')
expect_tokens([author_token(user.name)])
expect_filtered_search_input_empty
end
end
end
- context 'when entering text into the search field', :js do
+ context 'when entering text into the search field' do
before do
page.within('.search-input-wrap') do
- fill_in('search', with: project.name[0..3])
+ fill_in('search', with: scope_name.first(4))
end
end
it 'does not display the category search dropdown' do
- expect(page).not_to have_selector('.dropdown-header', text: /#{project.name}/i)
+ expect(page).not_to have_selector('.dropdown-header', text: /#{scope_name}/i)
end
end
end
+
+ context 'when user is in a global scope' do
+ include_examples 'search field examples' do
+ let(:url) { root_path }
+ let(:scope_name) { 'All GitLab' }
+ end
+ end
+
+ context 'when user is in a project scope' do
+ include_examples 'search field examples' do
+ let(:url) { project_path(project) }
+ let(:scope_name) { project.name }
+ end
+ end
+
+ context 'when user is in a group scope' do
+ let(:group) { create(:group) }
+ let(:project) { create(:project, namespace: group) }
+
+ before do
+ group.add_maintainer(user)
+ end
+
+ include_examples 'search field examples' do
+ let(:url) { group_path(group) }
+ let(:scope_name) { group.name }
+ end
+ end
+
+ context 'when user is in a subgroup scope' do
+ let(:group) { create(:group) }
+ let(:subgroup) { create(:group, :public, parent: group) }
+ let(:project) { create(:project, namespace: subgroup) }
+
+ before do
+ group.add_owner(user)
+ subgroup.add_owner(user)
+ end
+
+ include_examples 'search field examples' do
+ let(:url) { group_path(subgroup) }
+ let(:scope_name) { subgroup.name }
+ end
+ end
end
diff --git a/spec/fixtures/api/schemas/current-board.json b/spec/fixtures/api/schemas/current-board.json
new file mode 100644
index 00000000000..2ddc038e908
--- /dev/null
+++ b/spec/fixtures/api/schemas/current-board.json
@@ -0,0 +1,16 @@
+{
+ "type": "object",
+ "allOf": [
+ { "$ref": "board.json" },
+ {
+ "required" : [
+ "id",
+ "name"
+ ],
+ "properties": {
+ "id": { "type": "integer" },
+ "name": { "type": "string" }
+ }
+ }
+ ]
+}
diff --git a/spec/fixtures/phabricator_responses/user.search.json b/spec/fixtures/phabricator_responses/user.search.json
new file mode 100644
index 00000000000..f3ec653a23e
--- /dev/null
+++ b/spec/fixtures/phabricator_responses/user.search.json
@@ -0,0 +1,62 @@
+{
+ "result": {
+ "data": [
+ {
+ "id": 1,
+ "type": "USER",
+ "phid": "PHID-USER-hohoho",
+ "fields": {
+ "username": "jane",
+ "realName": "Jane Doe",
+ "roles": [
+ "admin",
+ "verified",
+ "approved",
+ "activated"
+ ],
+ "dateCreated": 1405970599,
+ "dateModified": 1406705963,
+ "policy": {
+ "view": "public",
+ "edit": "no-one"
+ }
+ },
+ "attachments": {}
+ },
+ {
+ "id": 2,
+ "type": "USER",
+ "phid": "PHID-USER-hihihi",
+ "fields": {
+ "username": "john",
+ "realName": "John Doe",
+ "roles": [
+ "admin",
+ "verified",
+ "approved",
+ "activated"
+ ],
+ "dateCreated": 1403609184,
+ "dateModified": 1559138722,
+ "policy": {
+ "view": "public",
+ "edit": "no-one"
+ }
+ },
+ "attachments": {}
+ }
+ ],
+ "maps": {},
+ "query": {
+ "queryKey": null
+ },
+ "cursor": {
+ "limit": "100",
+ "after": null,
+ "before": null,
+ "order": null
+ }
+ },
+ "error_code": null,
+ "error_info": null
+}
diff --git a/spec/frontend/api_spec.js b/spec/frontend/api_spec.js
index 0188d12a57d..7004373be0e 100644
--- a/spec/frontend/api_spec.js
+++ b/spec/frontend/api_spec.js
@@ -412,6 +412,22 @@ describe('Api', () => {
});
});
+ describe('user counts', () => {
+ it('fetches single user counts', done => {
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/user_counts`;
+ mock.onGet(expectedUrl).reply(200, {
+ merge_requests: 4,
+ });
+
+ Api.userCounts()
+ .then(({ data }) => {
+ expect(data.merge_requests).toBe(4);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
describe('user status', () => {
it('fetches single user status', done => {
const userId = '123456';
diff --git a/spec/frontend/commons/nav/user_merge_requests_spec.js b/spec/frontend/commons/nav/user_merge_requests_spec.js
new file mode 100644
index 00000000000..4da6d53557a
--- /dev/null
+++ b/spec/frontend/commons/nav/user_merge_requests_spec.js
@@ -0,0 +1,113 @@
+import {
+ openUserCountsBroadcast,
+ closeUserCountsBroadcast,
+ refreshUserMergeRequestCounts,
+} from '~/commons/nav/user_merge_requests';
+import Api from '~/api';
+
+jest.mock('~/api');
+
+const TEST_COUNT = 1000;
+const MR_COUNT_CLASS = 'merge-requests-count';
+
+describe('User Merge Requests', () => {
+ let channelMock;
+ let newBroadcastChannelMock;
+
+ beforeEach(() => {
+ global.gon.current_user_id = 123;
+
+ channelMock = {
+ postMessage: jest.fn(),
+ close: jest.fn(),
+ };
+ newBroadcastChannelMock = jest.fn().mockImplementation(() => channelMock);
+
+ global.BroadcastChannel = newBroadcastChannelMock;
+ setFixtures(`<div class="${MR_COUNT_CLASS}">0</div>`);
+ });
+
+ const findMRCountText = () => document.body.querySelector(`.${MR_COUNT_CLASS}`).textContent;
+
+ describe('refreshUserMergeRequestCounts', () => {
+ beforeEach(() => {
+ Api.userCounts.mockReturnValue(
+ Promise.resolve({
+ data: { merge_requests: TEST_COUNT },
+ }),
+ );
+ });
+
+ describe('with open broadcast channel', () => {
+ beforeEach(() => {
+ openUserCountsBroadcast();
+
+ return refreshUserMergeRequestCounts();
+ });
+
+ it('updates the top count of merge requests', () => {
+ expect(findMRCountText()).toEqual(TEST_COUNT.toLocaleString());
+ });
+
+ it('calls the API', () => {
+ expect(Api.userCounts).toHaveBeenCalled();
+ });
+
+ it('posts count to BroadcastChannel', () => {
+ expect(channelMock.postMessage).toHaveBeenCalledWith(TEST_COUNT);
+ });
+ });
+
+ describe('without open broadcast channel', () => {
+ beforeEach(() => refreshUserMergeRequestCounts());
+
+ it('does not post anything', () => {
+ expect(channelMock.postMessage).not.toHaveBeenCalled();
+ });
+ });
+ });
+
+ describe('openUserCountsBroadcast', () => {
+ beforeEach(() => {
+ openUserCountsBroadcast();
+ });
+
+ it('creates BroadcastChannel that updates DOM on message received', () => {
+ expect(findMRCountText()).toEqual('0');
+
+ channelMock.onmessage({ data: TEST_COUNT });
+
+ expect(findMRCountText()).toEqual(TEST_COUNT.toLocaleString());
+ });
+
+ it('closes if called while already open', () => {
+ expect(channelMock.close).not.toHaveBeenCalled();
+
+ openUserCountsBroadcast();
+
+ expect(channelMock.close).toHaveBeenCalled();
+ });
+ });
+
+ describe('closeUserCountsBroadcast', () => {
+ describe('when not opened', () => {
+ it('does nothing', () => {
+ expect(channelMock.close).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('when opened', () => {
+ beforeEach(() => {
+ openUserCountsBroadcast();
+ });
+
+ it('closes', () => {
+ expect(channelMock.close).not.toHaveBeenCalled();
+
+ closeUserCountsBroadcast();
+
+ expect(channelMock.close).toHaveBeenCalled();
+ });
+ });
+ });
+});
diff --git a/spec/frontend/confidential_merge_request/components/__snapshots__/project_form_group_spec.js.snap b/spec/frontend/confidential_merge_request/components/__snapshots__/project_form_group_spec.js.snap
index a241c764df7..47bdc677068 100644
--- a/spec/frontend/confidential_merge_request/components/__snapshots__/project_form_group_spec.js.snap
+++ b/spec/frontend/confidential_merge_request/components/__snapshots__/project_form_group_spec.js.snap
@@ -2,7 +2,7 @@
exports[`Confidential merge request project form group component renders empty state when response is empty 1`] = `
<div
- class="form-group"
+ class="confidential-merge-request-fork-group form-group"
>
<label>
Project
@@ -19,7 +19,7 @@ exports[`Confidential merge request project form group component renders empty s
<br />
<span>
- To protect this issues confidentiality,
+ To protect this issue's confidentiality,
<a
class="help-link"
href="https://test.com"
@@ -30,7 +30,7 @@ exports[`Confidential merge request project form group component renders empty s
</span>
<gllink-stub
- class="help-link"
+ class="w-auto p-0 d-inline-block text-primary bg-transparent"
href="/help"
target="_blank"
>
@@ -52,7 +52,7 @@ exports[`Confidential merge request project form group component renders empty s
exports[`Confidential merge request project form group component renders fork dropdown 1`] = `
<div
- class="form-group"
+ class="confidential-merge-request-fork-group form-group"
>
<label>
Project
@@ -69,7 +69,7 @@ exports[`Confidential merge request project form group component renders fork dr
<br />
<span>
- To protect this issues confidentiality,
+ To protect this issue's confidentiality,
<a
class="help-link"
href="https://test.com"
@@ -80,7 +80,7 @@ exports[`Confidential merge request project form group component renders fork dr
</span>
<gllink-stub
- class="help-link"
+ class="w-auto p-0 d-inline-block text-primary bg-transparent"
href="/help"
target="_blank"
>
diff --git a/spec/frontend/error_tracking_settings/mock.js b/spec/frontend/error_tracking_settings/mock.js
index 42233f82d54..8c5bfd08beb 100644
--- a/spec/frontend/error_tracking_settings/mock.js
+++ b/spec/frontend/error_tracking_settings/mock.js
@@ -1,5 +1,5 @@
import createStore from '~/error_tracking_settings/store';
-import { TEST_HOST } from '../helpers/test_constants';
+import { TEST_HOST } from 'helpers/test_constants';
const defaultStore = createStore();
diff --git a/spec/frontend/filterable_list_spec.js b/spec/frontend/filterable_list_spec.js
new file mode 100644
index 00000000000..67d18611661
--- /dev/null
+++ b/spec/frontend/filterable_list_spec.js
@@ -0,0 +1,53 @@
+import FilterableList from '~/filterable_list';
+import { getJSONFixture, setHTMLFixture } from './helpers/fixtures';
+
+describe('FilterableList', () => {
+ let List;
+ let form;
+ let filter;
+ let holder;
+
+ beforeEach(() => {
+ setHTMLFixture(`
+ <form id="project-filter-form">
+ <input name="name" class="js-projects-list-filter" />
+ </div>
+ <div class="js-projects-list-holder"></div>
+ `);
+ getJSONFixture('static/projects.json');
+ form = document.querySelector('form#project-filter-form');
+ filter = document.querySelector('.js-projects-list-filter');
+ holder = document.querySelector('.js-projects-list-holder');
+ List = new FilterableList(form, filter, holder);
+ });
+
+ it('processes input parameters', () => {
+ expect(List.filterForm).toEqual(form);
+ expect(List.listFilterElement).toEqual(filter);
+ expect(List.listHolderElement).toEqual(holder);
+ });
+
+ describe('getPagePath', () => {
+ it('returns properly constructed base endpoint', () => {
+ List.filterForm.action = '/foo/bar/';
+ List.listFilterElement.value = 'blah';
+
+ expect(List.getPagePath()).toEqual('/foo/bar/?name=blah');
+ });
+
+ it('properly appends custom parameters to existing URL', () => {
+ List.filterForm.action = '/foo/bar?alpha=beta';
+ List.listFilterElement.value = 'blah';
+
+ expect(List.getPagePath()).toEqual('/foo/bar?alpha=beta&name=blah');
+ });
+ });
+
+ describe('getFilterEndpoint', () => {
+ it('returns getPagePath by default', () => {
+ jest.spyOn(List, 'getPagePath').mockReturnValue('blah/blah/foo');
+
+ expect(List.getFilterEndpoint()).toEqual(List.getPagePath());
+ });
+ });
+});
diff --git a/spec/frontend/ide/lib/files_spec.js b/spec/frontend/ide/lib/files_spec.js
index aa1fa0373db..08a31318544 100644
--- a/spec/frontend/ide/lib/files_spec.js
+++ b/spec/frontend/ide/lib/files_spec.js
@@ -12,6 +12,7 @@ const createEntries = paths => {
const { name, parent } = splitParent(path);
const parentEntry = acc[parent];
+ const previewMode = viewerInformationForPath(name);
acc[path] = {
...decorateData({
@@ -22,7 +23,8 @@ const createEntries = paths => {
path,
url: createUrl(`/${TEST_PROJECT_ID}/${type}/${TEST_BRANCH_ID}/-/${escapeFileUrl(path)}`),
type,
- previewMode: viewerInformationForPath(path),
+ previewMode,
+ binary: (previewMode && previewMode.binary) || false,
parentPath: parent,
parentTreeUrl: parentEntry
? parentEntry.url
diff --git a/spec/frontend/notes/components/discussion_notes_replies_wrapper_spec.js b/spec/frontend/notes/components/discussion_notes_replies_wrapper_spec.js
new file mode 100644
index 00000000000..279ca017b44
--- /dev/null
+++ b/spec/frontend/notes/components/discussion_notes_replies_wrapper_spec.js
@@ -0,0 +1,51 @@
+import { mount } from '@vue/test-utils';
+import DiscussionNotesRepliesWrapper from '~/notes/components/discussion_notes_replies_wrapper.vue';
+
+const TEST_CHILDREN = '<li>Hello!</li><li>World!</li>';
+
+// We have to wrap our SUT with a TestComponent because multiple roots are possible
+// because it's a functional component.
+const TestComponent = {
+ components: { DiscussionNotesRepliesWrapper },
+ template: `<ul><discussion-notes-replies-wrapper v-bind="$attrs">${TEST_CHILDREN}</discussion-notes-replies-wrapper></ul>`,
+};
+
+describe('DiscussionNotesRepliesWrapper', () => {
+ let wrapper;
+
+ const createComponent = (props = {}) => {
+ wrapper = mount(TestComponent, {
+ propsData: props,
+ sync: false,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('when normal discussion', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders children directly', () => {
+ expect(wrapper.html()).toEqual(`<ul>${TEST_CHILDREN}</ul>`);
+ });
+ });
+
+ describe('when diff discussion', () => {
+ beforeEach(() => {
+ createComponent({
+ isDiffDiscussion: true,
+ });
+ });
+
+ it('wraps children with notes', () => {
+ const notes = wrapper.find('li.discussion-collapsible ul.notes');
+
+ expect(notes.exists()).toBe(true);
+ expect(notes.html()).toEqual(`<ul class="notes">${TEST_CHILDREN}</ul>`);
+ });
+ });
+});
diff --git a/spec/frontend/projects/projects_filterable_list_spec.js b/spec/frontend/projects/projects_filterable_list_spec.js
new file mode 100644
index 00000000000..e756fb3ab56
--- /dev/null
+++ b/spec/frontend/projects/projects_filterable_list_spec.js
@@ -0,0 +1,31 @@
+import ProjectsFilterableList from '~/projects/projects_filterable_list';
+import { getJSONFixture, setHTMLFixture } from '../helpers/fixtures';
+
+describe('ProjectsFilterableList', () => {
+ let List;
+ let form;
+ let filter;
+ let holder;
+
+ beforeEach(() => {
+ setHTMLFixture(`
+ <form id="project-filter-form">
+ <input name="name" class="js-projects-list-filter" />
+ </div>
+ <div class="js-projects-list-holder"></div>
+ `);
+ getJSONFixture('static/projects.json');
+ form = document.querySelector('form#project-filter-form');
+ filter = document.querySelector('.js-projects-list-filter');
+ holder = document.querySelector('.js-projects-list-holder');
+ List = new ProjectsFilterableList(form, filter, holder);
+ });
+
+ describe('getFilterEndpoint', () => {
+ it('updates converts getPagePath for projects', () => {
+ jest.spyOn(List, 'getPagePath').mockReturnValue('blah/projects?');
+
+ expect(List.getFilterEndpoint()).toEqual('blah/projects.json?');
+ });
+ });
+});
diff --git a/spec/graphql/gitlab_schema_spec.rb b/spec/graphql/gitlab_schema_spec.rb
index 93b86b9b812..dec6b23d72a 100644
--- a/spec/graphql/gitlab_schema_spec.rb
+++ b/spec/graphql/gitlab_schema_spec.rb
@@ -143,6 +143,26 @@ describe GitlabSchema do
end
end
+ context 'for classes that are not ActiveRecord subclasses and have implemented .lazy_find' do
+ it 'returns the correct record' do
+ note = create(:discussion_note_on_merge_request)
+
+ result = described_class.object_from_id(note.to_global_id)
+
+ expect(result.__sync).to eq(note)
+ end
+
+ it 'batchloads the queries' do
+ note1 = create(:discussion_note_on_merge_request)
+ note2 = create(:discussion_note_on_merge_request)
+
+ expect do
+ [described_class.object_from_id(note1.to_global_id),
+ described_class.object_from_id(note2.to_global_id)].map(&:__sync)
+ end.not_to exceed_query_limit(1)
+ end
+ end
+
context 'for other classes' do
# We cannot use an anonymous class here as `GlobalID` expects `.name` not
# to return `nil`
diff --git a/spec/graphql/types/diff_refs_type_spec.rb b/spec/graphql/types/diff_refs_type_spec.rb
new file mode 100644
index 00000000000..91017c827ad
--- /dev/null
+++ b/spec/graphql/types/diff_refs_type_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe GitlabSchema.types['DiffRefs'] do
+ it { expect(described_class.graphql_name).to eq('DiffRefs') }
+
+ it { expect(described_class).to have_graphql_fields(:base_sha, :head_sha, :start_sha) }
+end
diff --git a/spec/graphql/types/merge_request_type_spec.rb b/spec/graphql/types/merge_request_type_spec.rb
index f73bd062369..59bd0123d88 100644
--- a/spec/graphql/types/merge_request_type_spec.rb
+++ b/spec/graphql/types/merge_request_type_spec.rb
@@ -13,7 +13,7 @@ describe GitlabSchema.types['MergeRequest'] do
description_html state created_at updated_at source_project target_project
project project_id source_project_id target_project_id source_branch
target_branch work_in_progress merge_when_pipeline_succeeds diff_head_sha
- merge_commit_sha user_notes_count should_remove_source_branch
+ merge_commit_sha user_notes_count should_remove_source_branch diff_refs
force_remove_source_branch merge_status in_progress_merge_commit_sha
merge_error allow_collaboration should_be_rebased rebase_commit_sha
rebase_in_progress merge_commit_message default_merge_commit_message
diff --git a/spec/graphql/types/notes/diff_position_type_spec.rb b/spec/graphql/types/notes/diff_position_type_spec.rb
index 2f8724d7f0d..345bca8f702 100644
--- a/spec/graphql/types/notes/diff_position_type_spec.rb
+++ b/spec/graphql/types/notes/diff_position_type_spec.rb
@@ -3,7 +3,7 @@ require 'spec_helper'
describe GitlabSchema.types['DiffPosition'] do
it 'exposes the expected fields' do
- expected_fields = [:head_sha, :base_sha, :start_sha, :file_path, :old_path,
+ expected_fields = [:diff_refs, :file_path, :old_path,
:new_path, :position_type, :old_line, :new_line, :x, :y,
:width, :height]
diff --git a/spec/graphql/types/notes/discussion_type_spec.rb b/spec/graphql/types/notes/discussion_type_spec.rb
index 2a1eb0efd35..ba7fc961212 100644
--- a/spec/graphql/types/notes/discussion_type_spec.rb
+++ b/spec/graphql/types/notes/discussion_type_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
describe GitlabSchema.types['Discussion'] do
- it { is_expected.to have_graphql_fields(:id, :created_at, :notes) }
+ it { is_expected.to have_graphql_fields(:id, :created_at, :notes, :reply_id) }
it { is_expected.to require_graphql_authorizations(:read_note) }
end
diff --git a/spec/helpers/avatars_helper_spec.rb b/spec/helpers/avatars_helper_spec.rb
index aa0442ab847..94998d302f9 100644
--- a/spec/helpers/avatars_helper_spec.rb
+++ b/spec/helpers/avatars_helper_spec.rb
@@ -16,7 +16,7 @@ describe AvatarsHelper do
shared_examples 'resource with a custom avatar' do |source_type|
it 'returns a custom avatar image' do
expect(public_send("#{source_type}_icon", *helper_args))
- .to eq "<img src=\"#{resource.avatar.url}\" alt=\"Banana sample\" />"
+ .to eq "<img src=\"#{resource.avatar.url}\" />"
end
end
diff --git a/spec/helpers/blob_helper_spec.rb b/spec/helpers/blob_helper_spec.rb
index 6808ed86c9a..1f236429347 100644
--- a/spec/helpers/blob_helper_spec.rb
+++ b/spec/helpers/blob_helper_spec.rb
@@ -29,14 +29,15 @@ describe BlobHelper do
let(:project) { create(:project, :repository, namespace: namespace) }
before do
- allow(self).to receive(:current_user).and_return(nil)
- allow(self).to receive(:can_collaborate_with_project?).and_return(true)
+ allow(helper).to receive(:current_user).and_return(nil)
+ allow(helper).to receive(:can?).and_return(true)
+ allow(helper).to receive(:can_collaborate_with_project?).and_return(true)
end
it 'verifies blob is text' do
expect(helper).not_to receive(:blob_text_viewable?)
- button = edit_blob_button(project, 'refs/heads/master', 'README.md')
+ button = helper.edit_blob_button(project, 'refs/heads/master', 'README.md')
expect(button).to start_with('<button')
end
@@ -46,25 +47,25 @@ describe BlobHelper do
expect(project.repository).not_to receive(:blob_at)
- edit_blob_button(project, 'refs/heads/master', 'README.md', blob: blob)
+ helper.edit_blob_button(project, 'refs/heads/master', 'README.md', blob: blob)
end
it 'returns a link with the proper route' do
stub_feature_flags(web_ide_default: false)
- link = edit_blob_button(project, 'master', 'README.md')
+ link = helper.edit_blob_button(project, 'master', 'README.md')
expect(Capybara.string(link).find_link('Edit')[:href]).to eq("/#{project.full_path}/edit/master/README.md")
end
it 'returns a link with a Web IDE route' do
- link = edit_blob_button(project, 'master', 'README.md')
+ link = helper.edit_blob_button(project, 'master', 'README.md')
expect(Capybara.string(link).find_link('Edit')[:href]).to eq("/-/ide/project/#{project.full_path}/edit/master/-/README.md")
end
it 'returns a link with the passed link_opts on the expected route' do
stub_feature_flags(web_ide_default: false)
- link = edit_blob_button(project, 'master', 'README.md', link_opts: { mr_id: 10 })
+ link = helper.edit_blob_button(project, 'master', 'README.md', link_opts: { mr_id: 10 })
expect(Capybara.string(link).find_link('Edit')[:href]).to eq("/#{project.full_path}/edit/master/README.md?mr_id=10")
end
@@ -203,6 +204,13 @@ describe BlobHelper do
describe '#ide_edit_path' do
let(:project) { create(:project) }
+ let(:current_user) { create(:user) }
+ let(:can_push_code) { true }
+
+ before do
+ allow(helper).to receive(:current_user).and_return(current_user)
+ allow(helper).to receive(:can?).and_return(can_push_code)
+ end
around do |example|
old_script_name = Rails.application.routes.default_url_options[:script_name]
@@ -243,5 +251,21 @@ describe BlobHelper do
expect(helper.ide_edit_path(project, "testing/slashes", "readme.md/")).to eq("/-/ide/project/#{project.namespace.path}/#{project.path}/edit/testing/slashes/-/readme.md/")
end
+
+ context 'when user is not logged in' do
+ let(:current_user) { nil }
+
+ it 'returns IDE path inside the project' do
+ expect(helper.ide_edit_path(project, "master", "")).to eq("/-/ide/project/#{project.namespace.path}/#{project.path}/edit/master")
+ end
+ end
+
+ context 'when user cannot push to the project' do
+ let(:can_push_code) { false }
+
+ it "returns IDE path with the user's fork" do
+ expect(helper.ide_edit_path(project, "master", "")).to eq("/-/ide/project/#{current_user.namespace.full_path}/#{project.path}/edit/master")
+ end
+ end
end
end
diff --git a/spec/helpers/boards_helper_spec.rb b/spec/helpers/boards_helper_spec.rb
index b22947911b9..f014537eb54 100644
--- a/spec/helpers/boards_helper_spec.rb
+++ b/spec/helpers/boards_helper_spec.rb
@@ -1,10 +1,12 @@
require 'spec_helper'
describe BoardsHelper do
+ set(:project) { create(:project) }
+
describe '#build_issue_link_base' do
context 'project board' do
it 'returns correct path for project board' do
- @project = create(:project)
+ @project = project
@board = create(:board, project: @project)
expect(build_issue_link_base).to eq("/#{@project.namespace.path}/#{@project.path}/issues")
@@ -31,7 +33,6 @@ describe BoardsHelper do
describe '#board_data' do
let(:user) { create(:user) }
- let(:project) { create(:project) }
let(:board) { create(:board, project: project) }
before do
@@ -46,4 +47,15 @@ describe BoardsHelper do
expect(helper.board_data[:lists_endpoint]).to eq(board_lists_path(board))
end
end
+
+ describe '#current_board_json' do
+ let(:board_json) { helper.current_board_json }
+
+ it 'can serialise with a basic set of attributes' do
+ board = create(:board, project: project)
+ assign(:board, board)
+
+ expect(board_json).to match_schema('current-board')
+ end
+ end
end
diff --git a/spec/helpers/emails_helper_spec.rb b/spec/helpers/emails_helper_spec.rb
index e6aacb5b92b..d25f0c6de4a 100644
--- a/spec/helpers/emails_helper_spec.rb
+++ b/spec/helpers/emails_helper_spec.rb
@@ -103,7 +103,7 @@ describe EmailsHelper do
appearance = create :appearance, header_logo: fixture_file_upload('spec/fixtures/dk.png')
expect(header_logo).to eq(
- %{<img style="height: 50px" src="/uploads/-/system/appearance/header_logo/#{appearance.id}/dk.png" alt="Dk" />}
+ %{<img style="height: 50px" src="/uploads/-/system/appearance/header_logo/#{appearance.id}/dk.png" />}
)
end
end
diff --git a/spec/helpers/markup_helper_spec.rb b/spec/helpers/markup_helper_spec.rb
index 6c6410cee9b..f6e1720e113 100644
--- a/spec/helpers/markup_helper_spec.rb
+++ b/spec/helpers/markup_helper_spec.rb
@@ -268,7 +268,7 @@ describe MarkupHelper do
end
end
- describe 'markup' do
+ describe '#markup' do
let(:content) { 'Noël' }
it 'preserves encoding' do
@@ -302,6 +302,77 @@ describe MarkupHelper do
end
end
+ describe '#markup_unsafe' do
+ subject { helper.markup_unsafe(file_name, text, context) }
+
+ let(:file_name) { 'foo.bar' }
+ let(:text) { 'Noël' }
+ let(:project_base) { build(:project, :repository) }
+ let(:context) { { project: project_base } }
+
+ context 'when text is missing' do
+ let(:text) { nil }
+
+ it 'returns an empty string' do
+ is_expected.to eq('')
+ end
+ end
+
+ context 'when file is a markdown file' do
+ let(:file_name) { 'foo.md' }
+
+ it 'returns html (rendered by Banzai)' do
+ expected_html = '<p data-sourcepos="1:1-1:5" dir="auto">Noël</p>'
+
+ expect(Banzai).to receive(:render).with(text, context) { expected_html }
+
+ is_expected.to eq(expected_html)
+ end
+
+ context 'when renderer returns an error' do
+ before do
+ allow(Banzai).to receive(:render).and_raise("An error")
+ end
+
+ it 'returns html (rendered by ActionView:TextHelper)' do
+ is_expected.to eq('<p>Noël</p>')
+ end
+ end
+ end
+
+ context 'when file is asciidoc file' do
+ let(:file_name) { 'foo.adoc' }
+
+ it 'returns html (rendered by Gitlab::Asciidoc)' do
+ expected_html = "<div>\n<p>Noël</p>\n</div>"
+
+ expect(Gitlab::Asciidoc).to receive(:render).with(text, context) { expected_html }
+
+ is_expected.to eq(expected_html)
+ end
+ end
+
+ context 'when file is a regular text file' do
+ let(:file_name) { 'foo.txt' }
+
+ it 'returns html (rendered by ActionView::TagHelper)' do
+ is_expected.to eq('<pre class="plain-readme">Noël</pre>')
+ end
+ end
+
+ context 'when file has an unknown type' do
+ let(:file_name) { 'foo' }
+
+ it 'returns html (rendered by Gitlab::OtherMarkup)' do
+ expected_html = 'Noël'
+
+ expect(Gitlab::OtherMarkup).to receive(:render).with(file_name, text, context) { expected_html }
+
+ is_expected.to eq(expected_html)
+ end
+ end
+ end
+
describe '#first_line_in_markdown' do
shared_examples_for 'common markdown examples' do
let(:project_base) { build(:project, :repository) }
diff --git a/spec/helpers/sorting_helper_spec.rb b/spec/helpers/sorting_helper_spec.rb
index f405268d198..5397a47b3dd 100644
--- a/spec/helpers/sorting_helper_spec.rb
+++ b/spec/helpers/sorting_helper_spec.rb
@@ -4,6 +4,11 @@ require 'spec_helper'
describe SortingHelper do
include ApplicationHelper
include IconsHelper
+ include ExploreHelper
+
+ def set_sorting_url(option)
+ allow(self).to receive(:request).and_return(double(path: 'http://test.com', query_parameters: { label_name: option }))
+ end
describe '#issuable_sort_option_title' do
it 'returns correct title for issuable_sort_option_overrides key' do
@@ -21,7 +26,7 @@ describe SortingHelper do
describe '#issuable_sort_direction_button' do
before do
- allow(self).to receive(:request).and_return(double(path: 'http://test.com', query_parameters: { label_name: 'test_label' }))
+ set_sorting_url 'test_label'
end
it 'keeps label filter param' do
@@ -44,4 +49,145 @@ describe SortingHelper do
expect(issuable_sort_direction_button('due_date')).to include('sort-lowest')
end
end
+
+ def stub_controller_path(value)
+ allow(helper.controller).to receive(:controller_path).and_return(value)
+ end
+
+ def project_common_options
+ {
+ sort_value_latest_activity => sort_title_latest_activity,
+ sort_value_recently_created => sort_title_created_date,
+ sort_value_name => sort_title_name,
+ sort_value_stars_desc => sort_title_stars
+ }
+ end
+
+ describe 'with `admin/projects` controller' do
+ before do
+ stub_controller_path 'admin/projects'
+ end
+
+ describe '#projects_sort_options_hash' do
+ it 'returns a hash of available sorting options' do
+ admin_options = project_common_options.merge({
+ sort_value_oldest_activity => sort_title_oldest_activity,
+ sort_value_oldest_created => sort_title_oldest_created,
+ sort_value_recently_created => sort_title_recently_created,
+ sort_value_stars_desc => sort_title_most_stars,
+ sort_value_largest_repo => sort_title_largest_repo
+ })
+
+ expect(projects_sort_options_hash).to eq(admin_options)
+ end
+ end
+ end
+
+ describe 'with `projects` controller' do
+ before do
+ stub_controller_path 'projects'
+ end
+
+ describe '#projects_sort_options_hash' do
+ it 'returns a hash of available sorting options' do
+ expect(projects_sort_options_hash).to include(project_common_options)
+ end
+ end
+
+ describe '#projects_reverse_sort_options_hash' do
+ context 'returns a reversed hash of available sorting options' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:sort_key, :reverse_sort_title) do
+ sort_value_latest_activity | sort_value_oldest_activity
+ sort_value_recently_created | sort_value_oldest_created
+ sort_value_name | sort_value_name_desc
+ sort_value_stars_desc | sort_value_stars_asc
+ sort_value_oldest_activity | sort_value_latest_activity
+ sort_value_oldest_created | sort_value_recently_created
+ sort_value_name_desc | sort_value_name
+ sort_value_stars_asc | sort_value_stars_desc
+ end
+
+ with_them do
+ it do
+ reverse_hash = projects_reverse_sort_options_hash
+
+ expect(reverse_hash).to include(sort_key)
+ expect(reverse_hash[sort_key]).to eq(reverse_sort_title)
+ end
+ end
+ end
+ end
+
+ describe '#project_sort_direction_button' do
+ context 'returns the correct icon for each sort option' do
+ using RSpec::Parameterized::TableSyntax
+
+ sort_lowest_icon = 'sort-lowest'
+ sort_highest_icon = 'sort-highest'
+
+ where(:selected_sort, :icon) do
+ sort_value_latest_activity | sort_highest_icon
+ sort_value_recently_created | sort_highest_icon
+ sort_value_name_desc | sort_highest_icon
+ sort_value_stars_desc | sort_highest_icon
+ sort_value_oldest_activity | sort_lowest_icon
+ sort_value_oldest_created | sort_lowest_icon
+ sort_value_name | sort_lowest_icon
+ sort_value_stars_asc | sort_lowest_icon
+ end
+
+ with_them do
+ it do
+ set_sorting_url selected_sort
+
+ expect(project_sort_direction_button(selected_sort)).to include(icon)
+ end
+ end
+ end
+
+ it 'returns the correct link to reverse the current sort option' do
+ sort_options_links = projects_reverse_sort_options_hash
+
+ sort_options_links.each do |selected_sort, reverse_sort|
+ set_sorting_url selected_sort
+
+ expect(project_sort_direction_button(selected_sort)).to include(reverse_sort)
+ end
+ end
+ end
+
+ describe '#projects_sort_option_titles' do
+ it 'returns a hash of titles for the sorting options' do
+ options = project_common_options.merge({
+ sort_value_oldest_activity => sort_title_latest_activity,
+ sort_value_oldest_created => sort_title_created_date,
+ sort_value_name_desc => sort_title_name,
+ sort_value_stars_asc => sort_title_stars
+ })
+
+ expect(projects_sort_option_titles).to eq(options)
+ end
+ end
+
+ describe 'with project_list_filter_bar off' do
+ before do
+ stub_feature_flags(project_list_filter_bar: false)
+ end
+
+ describe '#projects_sort_options_hash' do
+ it 'returns a hash of available sorting options' do
+ options = project_common_options.merge({
+ sort_value_oldest_activity => sort_title_oldest_activity,
+ sort_value_oldest_created => sort_title_oldest_created,
+ sort_value_recently_created => sort_title_recently_created,
+ sort_value_stars_desc => sort_title_most_stars
+ })
+
+ expect(projects_sort_options_hash).to eq(options)
+ end
+ end
+ end
+ end
end
diff --git a/spec/helpers/storage_helper_spec.rb b/spec/helpers/storage_helper_spec.rb
index 62c00964524..4bd0fbb76ca 100644
--- a/spec/helpers/storage_helper_spec.rb
+++ b/spec/helpers/storage_helper_spec.rb
@@ -31,7 +31,7 @@ describe StorageHelper do
build_artifacts_size: 30.megabytes))
end
- let(:message) { '10 KB repositories, 10 Bytes wikis, 30 MB build artifacts, 20 GB LFS' }
+ let(:message) { 'Repository: 10 KB / Wikis: 10 Bytes / Build Artifacts: 30 MB / LFS: 20 GB' }
it 'works on ProjectStatistics' do
expect(helper.storage_counters_details(project.statistics)).to eq(message)
diff --git a/spec/javascripts/diffs/components/inline_diff_view_spec.js b/spec/javascripts/diffs/components/inline_diff_view_spec.js
index 0b3890b68d6..9b61dbe7975 100644
--- a/spec/javascripts/diffs/components/inline_diff_view_spec.js
+++ b/spec/javascripts/diffs/components/inline_diff_view_spec.js
@@ -10,6 +10,7 @@ describe('InlineDiffView', () => {
let component;
const getDiffFileMock = () => Object.assign({}, diffFileMockData);
const getDiscussionsMockData = () => [Object.assign({}, discussionsMockData)];
+ const notesLength = getDiscussionsMockData()[0].notes.length;
beforeEach(done => {
const diffFile = getDiffFileMock();
@@ -40,7 +41,7 @@ describe('InlineDiffView', () => {
Vue.nextTick(() => {
expect(el.querySelectorAll('.notes_holder').length).toEqual(1);
- expect(el.querySelectorAll('.notes_holder .note-discussion li').length).toEqual(6);
+ expect(el.querySelectorAll('.notes_holder .note').length).toEqual(notesLength + 1);
expect(el.innerText.indexOf('comment 5')).toBeGreaterThan(-1);
component.$store.dispatch('setInitialNotes', []);
diff --git a/spec/javascripts/helpers/vuex_action_helper.js b/spec/javascripts/helpers/vuex_action_helper.js
index 88652202a8e..c5de31a4138 100644
--- a/spec/javascripts/helpers/vuex_action_helper.js
+++ b/spec/javascripts/helpers/vuex_action_helper.js
@@ -89,9 +89,7 @@ export default (
payload,
);
- return new Promise(resolve => {
- setImmediate(resolve);
- })
+ return new Promise(setImmediate)
.then(() => result)
.catch(error => {
validateResults();
diff --git a/spec/javascripts/ide/components/repo_editor_spec.js b/spec/javascripts/ide/components/repo_editor_spec.js
index f832096701f..7dc5cb24981 100644
--- a/spec/javascripts/ide/components/repo_editor_spec.js
+++ b/spec/javascripts/ide/components/repo_editor_spec.js
@@ -30,6 +30,7 @@ describe('RepoEditor', () => {
Vue.set(vm.$store.state.entries, f.path, f);
spyOn(vm, 'getFileData').and.returnValue(Promise.resolve());
+ spyOn(vm, 'getRawFileData').and.returnValue(Promise.resolve());
vm.$mount();
@@ -407,6 +408,44 @@ describe('RepoEditor', () => {
});
});
+ describe('initEditor', () => {
+ beforeEach(() => {
+ spyOn(vm.editor, 'createInstance');
+ spyOnProperty(vm, 'shouldHideEditor').and.returnValue(true);
+ });
+
+ it('is being initialised for files without content even if shouldHideEditor is `true`', done => {
+ vm.file.content = '';
+ vm.file.raw = '';
+
+ vm.initEditor();
+ vm.$nextTick()
+ .then(() => {
+ expect(vm.getFileData).toHaveBeenCalled();
+ expect(vm.getRawFileData).toHaveBeenCalled();
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('does not initialize editor for files already with content', done => {
+ expect(vm.getFileData.calls.count()).toEqual(1);
+ expect(vm.getRawFileData.calls.count()).toEqual(1);
+
+ vm.file.content = 'foo';
+
+ vm.initEditor();
+ vm.$nextTick()
+ .then(() => {
+ expect(vm.getFileData.calls.count()).toEqual(1);
+ expect(vm.getRawFileData.calls.count()).toEqual(1);
+ expect(vm.editor.createInstance).not.toHaveBeenCalled();
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
it('calls removePendingTab when old file is pending', done => {
spyOnProperty(vm, 'shouldHideEditor').and.returnValue(true);
spyOn(vm, 'removePendingTab');
@@ -416,6 +455,7 @@ describe('RepoEditor', () => {
vm.$nextTick()
.then(() => {
vm.file = file('testing');
+ vm.file.content = 'foo'; // need to prevent full cycle of initEditor
return vm.$nextTick();
})
diff --git a/spec/javascripts/ide/stores/mutations/file_spec.js b/spec/javascripts/ide/stores/mutations/file_spec.js
index 18ee4330f69..7714f66c9a4 100644
--- a/spec/javascripts/ide/stores/mutations/file_spec.js
+++ b/spec/javascripts/ide/stores/mutations/file_spec.js
@@ -83,6 +83,26 @@ describe('IDE store file mutations', () => {
expect(localFile.raw).toBeNull();
expect(localFile.baseRaw).toBeNull();
});
+
+ it('sets extra file data to all arrays concerned', () => {
+ localState.stagedFiles = [localFile];
+ localState.changedFiles = [localFile];
+ localState.openFiles = [localFile];
+
+ const rawPath = 'foo/bar/blah.md';
+
+ mutations.SET_FILE_DATA(localState, {
+ data: {
+ raw_path: rawPath,
+ },
+ file: localFile,
+ });
+
+ expect(localState.stagedFiles[0].rawPath).toEqual(rawPath);
+ expect(localState.changedFiles[0].rawPath).toEqual(rawPath);
+ expect(localState.openFiles[0].rawPath).toEqual(rawPath);
+ expect(localFile.rawPath).toEqual(rawPath);
+ });
});
describe('SET_FILE_RAW_DATA', () => {
diff --git a/spec/javascripts/issuable_spec.js b/spec/javascripts/issuable_spec.js
index 25543053eba..4d57bfb1b33 100644
--- a/spec/javascripts/issuable_spec.js
+++ b/spec/javascripts/issuable_spec.js
@@ -2,14 +2,14 @@ import $ from 'jquery';
import MockAdaptor from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import IssuableIndex from '~/issuable_index';
+import issuableInitBulkUpdateSidebar from '~/issuable_init_bulk_update_sidebar';
describe('Issuable', () => {
- let Issuable;
describe('initBulkUpdate', () => {
it('should not set bulkUpdateSidebar', () => {
- Issuable = new IssuableIndex('issue_');
+ new IssuableIndex('issue_'); // eslint-disable-line no-new
- expect(Issuable.bulkUpdateSidebar).not.toBeDefined();
+ expect(issuableInitBulkUpdateSidebar.bulkUpdateSidebar).toBeNull();
});
it('should set bulkUpdateSidebar', () => {
@@ -17,9 +17,9 @@ describe('Issuable', () => {
element.classList.add('issues-bulk-update');
document.body.appendChild(element);
- Issuable = new IssuableIndex('issue_');
+ new IssuableIndex('issue_'); // eslint-disable-line no-new
- expect(Issuable.bulkUpdateSidebar).toBeDefined();
+ expect(issuableInitBulkUpdateSidebar.bulkUpdateSidebar).toBeDefined();
});
});
@@ -36,7 +36,7 @@ describe('Issuable', () => {
input.setAttribute('id', 'issuable_email');
document.body.appendChild(input);
- Issuable = new IssuableIndex('issue_');
+ new IssuableIndex('issue_'); // eslint-disable-line no-new
mock = new MockAdaptor(axios);
diff --git a/spec/javascripts/monitoring/charts/single_stat_spec.js b/spec/javascripts/monitoring/charts/single_stat_spec.js
index 12b73002f97..127a4a7955a 100644
--- a/spec/javascripts/monitoring/charts/single_stat_spec.js
+++ b/spec/javascripts/monitoring/charts/single_stat_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import SingleStatChart from '~/monitoring/components/charts/single_stat.vue';
+import { graphDataPrometheusQuery } from '../mock_data';
describe('Single Stat Chart component', () => {
let singleStatChart;
@@ -7,9 +8,7 @@ describe('Single Stat Chart component', () => {
beforeEach(() => {
singleStatChart = shallowMount(SingleStatChart, {
propsData: {
- title: 'Time to render',
- value: 1,
- unit: 'sec',
+ graphData: graphDataPrometheusQuery,
},
});
});
@@ -19,9 +18,9 @@ describe('Single Stat Chart component', () => {
});
describe('computed', () => {
- describe('valueWithUnit', () => {
+ describe('engineeringNotation', () => {
it('should interpolate the value and unit props', () => {
- expect(singleStatChart.vm.valueWithUnit).toBe('1sec');
+ expect(singleStatChart.vm.engineeringNotation).toBe('91MB');
});
});
});
diff --git a/spec/javascripts/monitoring/mock_data.js b/spec/javascripts/monitoring/mock_data.js
index 7bbb215475a..85e660d3925 100644
--- a/spec/javascripts/monitoring/mock_data.js
+++ b/spec/javascripts/monitoring/mock_data.js
@@ -935,3 +935,75 @@ export const dashboardGitResponse = [
default: false,
},
];
+
+export const graphDataPrometheusQuery = {
+ title: 'Super Chart A2',
+ type: 'single-stat',
+ weight: 2,
+ metrics: [
+ {
+ id: 'metric_a1',
+ metric_id: 2,
+ query: 'max(go_memstats_alloc_bytes{job="prometheus"}) by (job) /1024/1024',
+ unit: 'MB',
+ label: 'Total Consumption',
+ prometheus_endpoint_path:
+ '/root/kubernetes-gke-project/environments/35/prometheus/api/v1/query?query=max%28go_memstats_alloc_bytes%7Bjob%3D%22prometheus%22%7D%29+by+%28job%29+%2F1024%2F1024',
+ },
+ ],
+ queries: [
+ {
+ metricId: null,
+ id: 'metric_a1',
+ metric_id: 2,
+ query: 'max(go_memstats_alloc_bytes{job="prometheus"}) by (job) /1024/1024',
+ unit: 'MB',
+ label: 'Total Consumption',
+ prometheus_endpoint_path:
+ '/root/kubernetes-gke-project/environments/35/prometheus/api/v1/query?query=max%28go_memstats_alloc_bytes%7Bjob%3D%22prometheus%22%7D%29+by+%28job%29+%2F1024%2F1024',
+ result: [
+ {
+ metric: { job: 'prometheus' },
+ value: ['2019-06-26T21:03:20.881Z', 91],
+ },
+ ],
+ },
+ ],
+};
+
+export const graphDataPrometheusQueryRange = {
+ title: 'Super Chart A1',
+ type: 'area',
+ weight: 2,
+ metrics: [
+ {
+ id: 'metric_a1',
+ metric_id: 2,
+ query_range:
+ 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) /1024/1024/1024',
+ unit: 'MB',
+ label: 'Total Consumption',
+ prometheus_endpoint_path:
+ '/root/kubernetes-gke-project/environments/35/prometheus/api/v1/query?query=max%28go_memstats_alloc_bytes%7Bjob%3D%22prometheus%22%7D%29+by+%28job%29+%2F1024%2F1024',
+ },
+ ],
+ queries: [
+ {
+ metricId: null,
+ id: 'metric_a1',
+ metric_id: 2,
+ query_range:
+ 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) /1024/1024/1024',
+ unit: 'MB',
+ label: 'Total Consumption',
+ prometheus_endpoint_path:
+ '/root/kubernetes-gke-project/environments/35/prometheus/api/v1/query?query=max%28go_memstats_alloc_bytes%7Bjob%3D%22prometheus%22%7D%29+by+%28job%29+%2F1024%2F1024',
+ result: [
+ {
+ metric: {},
+ values: [[1495700554.925, '8.0390625'], [1495700614.925, '8.0390625']],
+ },
+ ],
+ },
+ ],
+};
diff --git a/spec/javascripts/monitoring/store/mutations_spec.js b/spec/javascripts/monitoring/store/mutations_spec.js
index 91580366531..43776b1b7f2 100644
--- a/spec/javascripts/monitoring/store/mutations_spec.js
+++ b/spec/javascripts/monitoring/store/mutations_spec.js
@@ -115,12 +115,14 @@ describe('Monitoring mutations', () => {
environmentsEndpoint: 'environments.json',
deploymentsEndpoint: 'deployments.json',
dashboardEndpoint: 'dashboard.json',
+ projectPath: '/gitlab-org/gitlab-ce',
});
expect(stateCopy.metricsEndpoint).toEqual('additional_metrics.json');
expect(stateCopy.environmentsEndpoint).toEqual('environments.json');
expect(stateCopy.deploymentsEndpoint).toEqual('deployments.json');
expect(stateCopy.dashboardEndpoint).toEqual('dashboard.json');
+ expect(stateCopy.projectPath).toEqual('/gitlab-org/gitlab-ce');
});
});
diff --git a/spec/javascripts/monitoring/utils_spec.js b/spec/javascripts/monitoring/utils_spec.js
index e3c455d1686..5570d57b8b2 100644
--- a/spec/javascripts/monitoring/utils_spec.js
+++ b/spec/javascripts/monitoring/utils_spec.js
@@ -1,5 +1,6 @@
-import { getTimeDiff } from '~/monitoring/utils';
+import { getTimeDiff, graphDataValidatorForValues } from '~/monitoring/utils';
import { timeWindows } from '~/monitoring/constants';
+import { graphDataPrometheusQuery, graphDataPrometheusQueryRange } from './mock_data';
describe('getTimeDiff', () => {
it('defaults to an 8 hour (28800s) difference', () => {
@@ -27,3 +28,27 @@ describe('getTimeDiff', () => {
});
});
});
+
+describe('graphDataValidatorForValues', () => {
+ /*
+ * When dealing with a metric using the query format, e.g.
+ * query: 'max(go_memstats_alloc_bytes{job="prometheus"}) by (job) /1024/1024'
+ * the validator will look for the `value` key instead of `values`
+ */
+ it('validates data with the query format', () => {
+ const validGraphData = graphDataValidatorForValues(true, graphDataPrometheusQuery);
+
+ expect(validGraphData).toBe(true);
+ });
+
+ /*
+ * When dealing with a metric using the query?range format, e.g.
+ * query_range: 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) /1024/1024/1024',
+ * the validator will look for the `values` key instead of `value`
+ */
+ it('validates data with the query_range format', () => {
+ const validGraphData = graphDataValidatorForValues(false, graphDataPrometheusQueryRange);
+
+ expect(validGraphData).toBe(true);
+ });
+});
diff --git a/spec/javascripts/notes/components/comment_form_spec.js b/spec/javascripts/notes/components/comment_form_spec.js
index 362963ddaf4..88c86746992 100644
--- a/spec/javascripts/notes/components/comment_form_spec.js
+++ b/spec/javascripts/notes/components/comment_form_spec.js
@@ -251,6 +251,21 @@ describe('issue_comment_form component', () => {
});
});
});
+
+ describe('when toggling state', () => {
+ it('should update MR count', done => {
+ spyOn(vm, 'closeIssue').and.returnValue(Promise.resolve());
+
+ const updateMrCountSpy = spyOnDependency(CommentForm, 'refreshUserMergeRequestCounts');
+ vm.toggleIssueState();
+
+ Vue.nextTick(() => {
+ expect(updateMrCountSpy).toHaveBeenCalled();
+
+ done();
+ });
+ });
+ });
});
describe('issue is confidential', () => {
diff --git a/spec/javascripts/notes/stores/actions_spec.js b/spec/javascripts/notes/stores/actions_spec.js
index 65f72a135aa..c461c28a37b 100644
--- a/spec/javascripts/notes/stores/actions_spec.js
+++ b/spec/javascripts/notes/stores/actions_spec.js
@@ -1,6 +1,7 @@
import Vue from 'vue';
import $ from 'jquery';
import _ from 'underscore';
+import Api from '~/api';
import { TEST_HOST } from 'spec/test_constants';
import { headersInterceptor } from 'spec/helpers/vue_resource_helper';
import actionsModule, * as actions from '~/notes/stores/actions';
@@ -8,7 +9,6 @@ import * as mutationTypes from '~/notes/stores/mutation_types';
import * as notesConstants from '~/notes/constants';
import createStore from '~/notes/stores';
import mrWidgetEventHub from '~/vue_merge_request_widget/event_hub';
-import service from '~/notes/services/notes_service';
import testAction from '../../helpers/vuex_action_helper';
import { resetStore } from '../helpers';
import {
@@ -18,6 +18,8 @@ import {
noteableDataMock,
individualNote,
} from '../mock_data';
+import AxiosMockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
const TEST_ERROR_MESSAGE = 'Test error message';
@@ -335,28 +337,24 @@ describe('Actions Notes Store', () => {
});
describe('deleteNote', () => {
- const interceptor = (request, next) => {
- next(
- request.respondWith(JSON.stringify({}), {
- status: 200,
- }),
- );
- };
+ const endpoint = `${TEST_HOST}/note`;
+ let axiosMock;
beforeEach(() => {
- Vue.http.interceptors.push(interceptor);
+ axiosMock = new AxiosMockAdapter(axios);
+ axiosMock.onDelete(endpoint).replyOnce(200, {});
$('body').attr('data-page', '');
});
afterEach(() => {
- Vue.http.interceptors = _.without(Vue.http.interceptors, interceptor);
+ axiosMock.restore();
$('body').attr('data-page', '');
});
it('commits DELETE_NOTE and dispatches updateMergeRequestWidget', done => {
- const note = { path: `${gl.TEST_HOST}`, id: 1 };
+ const note = { path: endpoint, id: 1 };
testAction(
actions.deleteNote,
@@ -381,7 +379,7 @@ describe('Actions Notes Store', () => {
});
it('dispatches removeDiscussionsFromDiff on merge request page', done => {
- const note = { path: `${gl.TEST_HOST}`, id: 1 };
+ const note = { path: endpoint, id: 1 };
$('body').attr('data-page', 'projects:merge_requests:show');
@@ -846,9 +844,9 @@ describe('Actions Notes Store', () => {
let flashContainer;
beforeEach(() => {
- spyOn(service, 'applySuggestion');
+ spyOn(Api, 'applySuggestion');
dispatch.and.returnValue(Promise.resolve());
- service.applySuggestion.and.returnValue(Promise.resolve());
+ Api.applySuggestion.and.returnValue(Promise.resolve());
flashContainer = {};
});
@@ -877,7 +875,7 @@ describe('Actions Notes Store', () => {
it('when service fails, flashes error message', done => {
const response = { response: { data: { message: TEST_ERROR_MESSAGE } } };
- service.applySuggestion.and.returnValue(Promise.reject(response));
+ Api.applySuggestion.and.returnValue(Promise.reject(response));
testSubmitSuggestion(done, () => {
expect(commit).not.toHaveBeenCalled();
diff --git a/spec/javascripts/registry/components/collapsible_container_spec.js b/spec/javascripts/registry/components/collapsible_container_spec.js
index 55017b3e26b..2a5d8dd11da 100644
--- a/spec/javascripts/registry/components/collapsible_container_spec.js
+++ b/spec/javascripts/registry/components/collapsible_container_spec.js
@@ -77,7 +77,7 @@ describe('collapsible registry container', () => {
spyOn(vm, 'deleteItem').and.returnValue(Promise.resolve());
Vue.nextTick(() => {
- document.querySelector('#confirm-repo-deletion-modal .btn-danger').click();
+ document.querySelector(`#${vm.modalId} .btn-danger`).click();
expect(vm.deleteItem).toHaveBeenCalledWith(vm.repo);
done();
diff --git a/spec/javascripts/registry/components/table_registry_spec.js b/spec/javascripts/registry/components/table_registry_spec.js
index 6a0b16f592e..31ac970378e 100644
--- a/spec/javascripts/registry/components/table_registry_spec.js
+++ b/spec/javascripts/registry/components/table_registry_spec.js
@@ -51,7 +51,7 @@ describe('table registry', () => {
spyOn(vm, 'deleteItem').and.returnValue(Promise.resolve());
Vue.nextTick(() => {
- document.querySelector('#confirm-image-deletion-modal .btn-danger').click();
+ document.querySelector(`#${vm.modalId} .btn-danger`).click();
expect(vm.deleteItem).toHaveBeenCalledWith(firstImage);
expect(vm.itemToBeDeleted).toBeNull();
diff --git a/spec/javascripts/test_bundle.js b/spec/javascripts/test_bundle.js
index 2cc476ed52a..ce453d7c483 100644
--- a/spec/javascripts/test_bundle.js
+++ b/spec/javascripts/test_bundle.js
@@ -3,12 +3,12 @@
*/
import $ from 'jquery';
+import 'core-js/features/set-immediate';
import 'vendor/jasmine-jquery';
import '~/commons';
import Vue from 'vue';
import VueResource from 'vue-resource';
import Translate from '~/vue_shared/translate';
-import CheckEE from '~/vue_shared/mixins/is_ee';
import jasmineDiff from 'jasmine-diff';
import { config as testUtilsConfig } from '@vue/test-utils';
@@ -48,7 +48,6 @@ Vue.config.errorHandler = function(err) {
Vue.use(VueResource);
Vue.use(Translate);
-Vue.use(CheckEE);
// enable test fixtures
jasmine.getFixtures().fixturesPath = FIXTURES_PATH;
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
index bb76616be56..ba3ba01944d 100644
--- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
@@ -58,9 +58,11 @@ const createComponent = (customConfig = {}) => {
describe('ReadyToMerge', () => {
let vm;
+ let updateMrCountSpy;
beforeEach(() => {
vm = createComponent();
+ updateMrCountSpy = spyOnDependency(ReadyToMerge, 'refreshUserMergeRequestCounts');
});
afterEach(() => {
@@ -461,6 +463,7 @@ describe('ReadyToMerge', () => {
expect(eventHub.$emit).toHaveBeenCalledWith('MRWidgetUpdateRequested');
expect(eventHub.$emit).toHaveBeenCalledWith('FetchActionsContent');
expect(vm.initiateRemoveSourceBranchPolling).toHaveBeenCalled();
+ expect(updateMrCountSpy).toHaveBeenCalled();
expect(cpc).toBeFalsy();
expect(spc).toBeTruthy();
diff --git a/spec/lib/api/helpers/pagination_spec.rb b/spec/lib/api/helpers/pagination_spec.rb
index c788da55cd2..b0a00392957 100644
--- a/spec/lib/api/helpers/pagination_spec.rb
+++ b/spec/lib/api/helpers/pagination_spec.rb
@@ -114,7 +114,7 @@ describe API::Helpers::Pagination do
expect(paginated_relation.order_values).to be_present
expect(paginated_relation.order_values.size).to eq(1)
expect(paginated_relation.order_values.first).to be_descending
- expect(paginated_relation.order_values.first.expr.name).to eq :id
+ expect(paginated_relation.order_values.first.expr.name).to eq 'id'
end
end
@@ -151,9 +151,9 @@ describe API::Helpers::Pagination do
expect(paginated_relation.order_values).to be_present
expect(paginated_relation.order_values.size).to eq(2)
expect(paginated_relation.order_values.first).to be_descending
- expect(paginated_relation.order_values.first.expr.name).to eq :name
+ expect(paginated_relation.order_values.first.expr.name).to eq 'name'
expect(paginated_relation.order_values.second).to be_descending
- expect(paginated_relation.order_values.second.expr.name).to eq :id
+ expect(paginated_relation.order_values.second.expr.name).to eq 'id'
end
it 'returns the right records (first page)' do
@@ -341,7 +341,7 @@ describe API::Helpers::Pagination do
expect(resource.order_values).to be_empty
expect(paginated_relation.order_values).to be_present
expect(paginated_relation.order_values.first).to be_ascending
- expect(paginated_relation.order_values.first.expr.name).to eq :id
+ expect(paginated_relation.order_values.first.expr.name).to eq 'id'
end
it 'is present it does not add anything' do
@@ -349,7 +349,7 @@ describe API::Helpers::Pagination do
expect(paginated_relation.order_values).to be_present
expect(paginated_relation.order_values.first).to be_descending
- expect(paginated_relation.order_values.first.expr.name).to eq :created_at
+ expect(paginated_relation.order_values.first.expr.name).to eq 'created_at'
end
end
end
diff --git a/spec/lib/banzai/filter/inline_metrics_filter_spec.rb b/spec/lib/banzai/filter/inline_metrics_filter_spec.rb
new file mode 100644
index 00000000000..772c94e3180
--- /dev/null
+++ b/spec/lib/banzai/filter/inline_metrics_filter_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Banzai::Filter::InlineMetricsFilter do
+ include FilterSpecHelper
+
+ let(:input) { %(<a href="#{url}">example</a>) }
+ let(:doc) { filter(input) }
+
+ context 'when the document has an external link' do
+ let(:url) { 'https://foo.com' }
+
+ it 'leaves regular non-metrics links unchanged' do
+ expect(doc.to_s).to eq input
+ end
+ end
+
+ context 'when the document has a metrics dashboard link' do
+ let(:params) { ['foo', 'bar', 12] }
+ let(:url) { urls.metrics_namespace_project_environment_url(*params) }
+
+ it 'leaves the original link unchanged' do
+ expect(doc.at_css('a').to_s).to eq input
+ end
+
+ it 'appends a metrics charts placeholder with dashboard url after metrics links' do
+ node = doc.at_css('.js-render-metrics')
+ expect(node).to be_present
+
+ dashboard_url = urls.metrics_dashboard_namespace_project_environment_url(*params, embedded: true)
+ expect(node.attribute('data-dashboard-url').to_s).to eq dashboard_url
+ end
+
+ context 'when the metrics dashboard link is part of a paragraph' do
+ let(:paragraph) { %(This is an <a href="#{url}">example</a> of metrics.) }
+ let(:input) { %(<p>#{paragraph}</p>) }
+
+ it 'appends the charts placeholder after the enclosing paragraph' do
+ expect(doc.at_css('p').to_s).to include paragraph
+ expect(doc.at_css('.js-render-metrics')).to be_present
+ end
+
+ context 'when the feature is disabled' do
+ before do
+ stub_feature_flags(gfm_embedded_metrics: false)
+ end
+
+ it 'does nothing' do
+ expect(doc.to_s).to eq input
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/banzai/filter/inline_metrics_redactor_filter_spec.rb b/spec/lib/banzai/filter/inline_metrics_redactor_filter_spec.rb
new file mode 100644
index 00000000000..fb2186e9d12
--- /dev/null
+++ b/spec/lib/banzai/filter/inline_metrics_redactor_filter_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Banzai::Filter::InlineMetricsRedactorFilter do
+ include FilterSpecHelper
+
+ set(:project) { create(:project) }
+
+ let(:url) { urls.metrics_dashboard_project_environment_url(project, 1, embedded: true) }
+ let(:input) { %(<a href="#{url}">example</a>) }
+ let(:doc) { filter(input) }
+
+ context 'when the feature is disabled' do
+ before do
+ stub_feature_flags(gfm_embedded_metrics: false)
+ end
+
+ it 'does nothing' do
+ expect(doc.to_s).to eq input
+ end
+ end
+
+ context 'without a metrics charts placeholder' do
+ it 'leaves regular non-metrics links unchanged' do
+ expect(doc.to_s).to eq input
+ end
+ end
+
+ context 'with a metrics charts placeholder' do
+ let(:input) { %(<div class="js-render-metrics" data-dashboard-url="#{url}"></div>) }
+
+ context 'no user is logged in' do
+ it 'redacts the placeholder' do
+ expect(doc.to_s).to be_empty
+ end
+ end
+
+ context 'the user does not have permission do see charts' do
+ let(:doc) { filter(input, current_user: build(:user)) }
+
+ it 'redacts the placeholder' do
+ expect(doc.to_s).to be_empty
+ end
+ end
+
+ context 'the user has requisite permissions' do
+ let(:user) { create(:user) }
+ let(:doc) { filter(input, current_user: user) }
+
+ it 'leaves the placeholder' do
+ project.add_maintainer(user)
+
+ expect(doc.to_s).to eq input
+ end
+ end
+ end
+end
diff --git a/spec/lib/banzai/renderer_spec.rb b/spec/lib/banzai/renderer_spec.rb
index aa828e2f0e9..a099f7482c1 100644
--- a/spec/lib/banzai/renderer_spec.rb
+++ b/spec/lib/banzai/renderer_spec.rb
@@ -19,6 +19,24 @@ describe Banzai::Renderer do
object
end
+ describe '#cache_collection_render' do
+ let(:merge_request) { fake_object(fresh: true) }
+ let(:context) { { cache_key: [merge_request, 'field'], rendered: merge_request.field_html } }
+
+ context 'when an item has a rendered field' do
+ before do
+ allow(merge_request).to receive(:field).and_return('This is the field')
+ allow(merge_request).to receive(:field_html).and_return('This is the field')
+ end
+
+ it 'does not touch redis if the field is in the cache' do
+ expect(Rails).not_to receive(:cache)
+
+ described_class.cache_collection_render([{ text: merge_request.field, context: context }])
+ end
+ end
+ end
+
describe '#render_field' do
let(:renderer) { described_class }
diff --git a/spec/lib/gitlab/asciidoc_spec.rb b/spec/lib/gitlab/asciidoc_spec.rb
index 8f2434acd26..ff002acbd35 100644
--- a/spec/lib/gitlab/asciidoc_spec.rb
+++ b/spec/lib/gitlab/asciidoc_spec.rb
@@ -45,28 +45,117 @@ module Gitlab
end
context "XSS" do
- links = {
- 'links' => {
+ items = {
+ 'link with extra attribute' => {
input: 'link:mylink"onmouseover="alert(1)[Click Here]',
output: "<div>\n<p><a href=\"mylink\">Click Here</a></p>\n</div>"
},
- 'images' => {
+ 'link with unsafe scheme' => {
+ input: 'link:data://danger[Click Here]',
+ output: "<div>\n<p><a>Click Here</a></p>\n</div>"
+ },
+ 'image with onerror' => {
input: 'image:https://localhost.com/image.png[Alt text" onerror="alert(7)]',
output: "<div>\n<p><span><img src=\"https://localhost.com/image.png\" alt='Alt text\" onerror=\"alert(7)'></span></p>\n</div>"
},
- 'pre' => {
+ 'fenced code with inline script' => {
input: '```mypre"><script>alert(3)</script>',
output: "<div>\n<div>\n<pre class=\"code highlight js-syntax-highlight plaintext\" lang=\"plaintext\" v-pre=\"true\"><code><span id=\"LC1\" class=\"line\" lang=\"plaintext\">\"&gt;</span></code></pre>\n</div>\n</div>"
}
}
- links.each do |name, data|
+ items.each do |name, data|
it "does not convert dangerous #{name} into HTML" do
expect(render(data[:input], context)).to include(data[:output])
end
end
end
+ context 'with admonition' do
+ it 'preserves classes' do
+ input = <<~ADOC
+ NOTE: An admonition paragraph, like this note, grabs the reader’s attention.
+ ADOC
+
+ output = <<~HTML
+ <div class="admonitionblock">
+ <table>
+ <tr>
+ <td class="icon">
+ <i class="fa icon-note" title="Note"></i>
+ </td>
+ <td>
+ An admonition paragraph, like this note, grabs the reader’s attention.
+ </td>
+ </tr>
+ </table>
+ </div>
+ HTML
+
+ expect(render(input, context)).to include(output.strip)
+ end
+ end
+
+ context 'with checklist' do
+ it 'preserves classes' do
+ input = <<~ADOC
+ * [x] checked
+ * [ ] not checked
+ ADOC
+
+ output = <<~HTML
+ <div>
+ <ul class="checklist">
+ <li>
+ <p><i class="fa fa-check-square-o"></i> checked</p>
+ </li>
+ <li>
+ <p><i class="fa fa-square-o"></i> not checked</p>
+ </li>
+ </ul>
+ </div>
+ HTML
+
+ expect(render(input, context)).to include(output.strip)
+ end
+ end
+
+ context 'with marks' do
+ it 'preserves classes' do
+ input = <<~ADOC
+ Werewolves are allergic to #cassia cinnamon#.
+
+ Did the werewolves read the [.small]#small print#?
+
+ Where did all the [.underline.small]#cores# run off to?
+
+ We need [.line-through]#ten# make that twenty VMs.
+
+ [.big]##O##nce upon an infinite loop.
+ ADOC
+
+ output = <<~HTML
+ <div>
+ <p>Werewolves are allergic to <mark>cassia cinnamon</mark>.</p>
+ </div>
+ <div>
+ <p>Did the werewolves read the <span class="small">small print</span>?</p>
+ </div>
+ <div>
+ <p>Where did all the <span class="underline small">cores</span> run off to?</p>
+ </div>
+ <div>
+ <p>We need <span class="line-through">ten</span> make that twenty VMs.</p>
+ </div>
+ <div>
+ <p><span class="big">O</span>nce upon an infinite loop.</p>
+ </div>
+ HTML
+
+ expect(render(input, context)).to include(output.strip)
+ end
+ end
+
context 'with fenced block' do
it 'highlights syntax' do
input = <<~ADOC
diff --git a/spec/lib/gitlab/background_migration/fix_user_namespace_names_spec.rb b/spec/lib/gitlab/background_migration/fix_user_namespace_names_spec.rb
new file mode 100644
index 00000000000..5938ecca459
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/fix_user_namespace_names_spec.rb
@@ -0,0 +1,104 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::FixUserNamespaceNames, :migration, schema: 20190620112608 do
+ let(:namespaces) { table(:namespaces) }
+ let(:users) { table(:users) }
+ let(:user) { users.create(name: "The user's full name", projects_limit: 10, username: 'not-null', email: '1') }
+
+ context 'updating the namespace names' do
+ it 'updates a user namespace within range' do
+ user2 = users.create(name: "Other user's full name", projects_limit: 10, username: 'also-not-null', email: '2')
+ user_namespace1 = namespaces.create(
+ id: 2,
+ owner_id: user.id,
+ name: "Should be the user's name",
+ path: user.username
+ )
+ user_namespace2 = namespaces.create(
+ id: 3,
+ owner_id: user2.id,
+ name: "Should also be the user's name",
+ path: user.username
+ )
+
+ described_class.new.perform(1, 5)
+
+ expect(user_namespace1.reload.name).to eq("The user's full name")
+ expect(user_namespace2.reload.name).to eq("Other user's full name")
+ end
+
+ it 'does not update namespaces out of range' do
+ user_namespace = namespaces.create(
+ id: 6,
+ owner_id: user.id,
+ name: "Should be the user's name",
+ path: user.username
+ )
+
+ expect { described_class.new.perform(1, 5) }
+ .not_to change { user_namespace.reload.name }
+ end
+
+ it 'does not update groups owned by the users' do
+ user_group = namespaces.create(
+ id: 2,
+ owner_id: user.id,
+ name: 'A group name',
+ path: 'the-path',
+ type: 'Group'
+ )
+
+ expect { described_class.new.perform(1, 5) }
+ .not_to change { user_group.reload.name }
+ end
+ end
+
+ context 'namespace route names' do
+ let(:routes) { table(:routes) }
+ let(:namespace) do
+ namespaces.create(
+ id: 2,
+ owner_id: user.id,
+ name: "Will be updated to the user's name",
+ path: user.username
+ )
+ end
+
+ it "updates the route name if it didn't match the namespace" do
+ route = routes.create(path: namespace.path, name: 'Incorrect name', source_type: 'Namespace', source_id: namespace.id)
+
+ described_class.new.perform(1, 5)
+
+ expect(route.reload.name).to eq("The user's full name")
+ end
+
+ it 'updates the route name if it was nil match the namespace' do
+ route = routes.create(path: namespace.path, name: nil, source_type: 'Namespace', source_id: namespace.id)
+
+ described_class.new.perform(1, 5)
+
+ expect(route.reload.name).to eq("The user's full name")
+ end
+
+ it "doesn't update group routes" do
+ route = routes.create(path: 'group-path', name: 'Group name', source_type: 'Group', source_id: namespace.id)
+
+ expect { described_class.new.perform(1, 5) }
+ .not_to change { route.reload.name }
+ end
+
+ it "doesn't touch routes for namespaces out of range" do
+ user_namespace = namespaces.create(
+ id: 6,
+ owner_id: user.id,
+ name: "Should be the user's name",
+ path: user.username
+ )
+
+ expect { described_class.new.perform(1, 5) }
+ .not_to change { user_namespace.reload.name }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/fix_user_project_route_names_spec.rb b/spec/lib/gitlab/background_migration/fix_user_project_route_names_spec.rb
new file mode 100644
index 00000000000..d1d6d8411d1
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/fix_user_project_route_names_spec.rb
@@ -0,0 +1,98 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::FixUserProjectRouteNames, :migration, schema: 20190620112608 do
+ let(:namespaces) { table(:namespaces) }
+ let(:users) { table(:users) }
+ let(:routes) { table(:routes) }
+ let(:projects) { table(:projects) }
+
+ let(:user) { users.create(name: "The user's full name", projects_limit: 10, username: 'not-null', email: '1') }
+
+ let(:namespace) do
+ namespaces.create(
+ owner_id: user.id,
+ name: "Should eventually be the user's name",
+ path: user.username
+ )
+ end
+
+ let(:project) do
+ projects.create(namespace_id: namespace.id, name: 'Project Name')
+ end
+
+ it "updates the route for a project if it did not match the user's name" do
+ route = routes.create(
+ id: 1,
+ path: "#{user.username}/#{project.path}",
+ source_id: project.id,
+ source_type: 'Project',
+ name: 'Completely wrong'
+ )
+
+ described_class.new.perform(1, 5)
+
+ expect(route.reload.name).to eq("The user's full name / Project Name")
+ end
+
+ it 'updates the route for a project if the name was nil' do
+ route = routes.create(
+ id: 1,
+ path: "#{user.username}/#{project.path}",
+ source_id: project.id,
+ source_type: 'Project',
+ name: nil
+ )
+
+ described_class.new.perform(1, 5)
+
+ expect(route.reload.name).to eq("The user's full name / Project Name")
+ end
+
+ it 'does not update routes that were are out of the range' do
+ route = routes.create(
+ id: 6,
+ path: "#{user.username}/#{project.path}",
+ source_id: project.id,
+ source_type: 'Project',
+ name: 'Completely wrong'
+ )
+
+ expect { described_class.new.perform(1, 5) }
+ .not_to change { route.reload.name }
+ end
+
+ it 'does not update routes for projects in groups owned by the user' do
+ group = namespaces.create(
+ owner_id: user.id,
+ name: 'A group',
+ path: 'a-path',
+ type: ''
+ )
+ project = projects.create(namespace_id: group.id, name: 'Project Name')
+ route = routes.create(
+ id: 1,
+ path: "#{group.path}/#{project.path}",
+ source_id: project.id,
+ source_type: 'Project',
+ name: 'Completely wrong'
+ )
+
+ expect { described_class.new.perform(1, 5) }
+ .not_to change { route.reload.name }
+ end
+
+ it 'does not update routes for namespaces' do
+ route = routes.create(
+ id: 1,
+ path: namespace.path,
+ source_id: namespace.id,
+ source_type: 'Namespace',
+ name: 'Completely wrong'
+ )
+
+ expect { described_class.new.perform(1, 5) }
+ .not_to change { route.reload.name }
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/populate_merge_request_assignees_table_spec.rb b/spec/lib/gitlab/background_migration/populate_merge_request_assignees_table_spec.rb
index 4a81a37d341..ad4fa4fe03a 100644
--- a/spec/lib/gitlab/background_migration/populate_merge_request_assignees_table_spec.rb
+++ b/spec/lib/gitlab/background_migration/populate_merge_request_assignees_table_spec.rb
@@ -27,14 +27,19 @@ describe Gitlab::BackgroundMigration::PopulateMergeRequestAssigneesTable, :migra
merge_requests.create(params)
end
+ before do
+ create_merge_request(2, assignee_id: user.id)
+ create_merge_request(3, assignee_id: user_2.id)
+ create_merge_request(4, assignee_id: user_3.id)
+
+ # Test filtering MRs without assignees
+ create_merge_request(5, assignee_id: nil)
+ # Test filtering already migrated row
+ merge_request_assignees.create!(merge_request_id: 2, user_id: user_3.id)
+ end
+
describe '#perform' do
it 'creates merge_request_assignees rows according to merge_requests' do
- create_merge_request(2, assignee_id: user.id)
- create_merge_request(3, assignee_id: user_2.id)
- create_merge_request(4, assignee_id: user_3.id)
- # Test filtering already migrated row
- merge_request_assignees.create!(merge_request_id: 2, user_id: user_3.id)
-
subject.perform(1, 4)
rows = merge_request_assignees.order(:id).map { |row| row.attributes.slice('merge_request_id', 'user_id') }
@@ -53,4 +58,13 @@ describe Gitlab::BackgroundMigration::PopulateMergeRequestAssigneesTable, :migra
end
end
end
+
+ describe '#perform_all_sync' do
+ it 'executes peform for all merge requests in batches' do
+ expect(subject).to receive(:perform).with(2, 4).ordered
+ expect(subject).to receive(:perform).with(5, 5).ordered
+
+ subject.perform_all_sync(batch_size: 3)
+ end
+ end
end
diff --git a/spec/lib/gitlab/batch_pop_queueing_spec.rb b/spec/lib/gitlab/batch_pop_queueing_spec.rb
new file mode 100644
index 00000000000..28984d52024
--- /dev/null
+++ b/spec/lib/gitlab/batch_pop_queueing_spec.rb
@@ -0,0 +1,147 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::BatchPopQueueing do
+ include ExclusiveLeaseHelpers
+ using RSpec::Parameterized::TableSyntax
+
+ describe '#initialize' do
+ where(:namespace, :queue_id, :expect_error, :error_type) do
+ 'feature' | '1' | false | nil
+ :feature | '1' | false | nil
+ nil | '1' | true | NoMethodError
+ 'feature' | nil | true | NoMethodError
+ '' | '1' | true | ArgumentError
+ 'feature' | '' | true | ArgumentError
+ 'feature' | 1 | true | NoMethodError
+ end
+
+ with_them do
+ it do
+ if expect_error
+ expect { described_class.new(namespace, queue_id) }.to raise_error(error_type)
+ else
+ expect { described_class.new(namespace, queue_id) }.not_to raise_error
+ end
+ end
+ end
+ end
+
+ describe '#safe_execute', :clean_gitlab_redis_queues do
+ subject { queue.safe_execute(new_items, lock_timeout: lock_timeout) }
+
+ let(:queue) { described_class.new(namespace, queue_id) }
+ let(:namespace) { 'feature' }
+ let(:queue_id) { '1' }
+ let(:lock_timeout) { 10.minutes }
+ let(:new_items) { %w[A B] }
+ let(:lock_key) { queue.send(:lock_key) }
+ let(:queue_key) { queue.send(:queue_key) }
+
+ it 'enqueues new items always' do
+ Gitlab::Redis::Queues.with do |redis|
+ expect(redis).to receive(:sadd).with(queue_key, new_items)
+ expect(redis).to receive(:expire).with(queue_key, (lock_timeout + described_class::EXTRA_QUEUE_EXPIRE_WINDOW).to_i)
+ end
+
+ subject
+ end
+
+ it 'yields the new items with exclusive lease' do
+ uuid = 'test'
+ expect_to_obtain_exclusive_lease(lock_key, uuid, timeout: lock_timeout)
+ expect_to_cancel_exclusive_lease(lock_key, uuid)
+
+ expect { |b| queue.safe_execute(new_items, lock_timeout: lock_timeout, &b) }
+ .to yield_with_args(match_array(new_items))
+ end
+
+ it 'returns the result and no items in the queue' do
+ expect(subject[:status]).to eq(:finished)
+ expect(subject[:new_items]).to be_empty
+
+ Gitlab::Redis::Queues.with do |redis|
+ expect(redis.llen(queue_key)).to be(0)
+ end
+ end
+
+ context 'when new items are enqueued during the process' do
+ it 'returns the result with newly added items' do
+ result = queue.safe_execute(new_items) do
+ queue.safe_execute(['C'])
+ end
+
+ expect(result[:status]).to eq(:finished)
+ expect(result[:new_items]).to eq(['C'])
+
+ Gitlab::Redis::Queues.with do |redis|
+ expect(redis.scard(queue_key)).to be(1)
+ end
+ end
+ end
+
+ context 'when interger items are enqueued' do
+ let(:new_items) { [1, 2, 3] }
+
+ it 'yields as String values' do
+ expect { |b| queue.safe_execute(new_items, lock_timeout: lock_timeout, &b) }
+ .to yield_with_args(%w[1 2 3])
+ end
+ end
+
+ context 'when the queue key does not exist in Redis' do
+ before do
+ allow(queue).to receive(:enqueue) { }
+ end
+
+ it 'yields empty array' do
+ expect { |b| queue.safe_execute(new_items, lock_timeout: lock_timeout, &b) }
+ .to yield_with_args([])
+ end
+ end
+
+ context 'when the other process has already been working on the queue' do
+ before do
+ stub_exclusive_lease_taken(lock_key, timeout: lock_timeout)
+ end
+
+ it 'does not yield the block' do
+ expect { |b| queue.safe_execute(new_items, lock_timeout: lock_timeout, &b) }
+ .not_to yield_control
+ end
+
+ it 'returns the result' do
+ expect(subject[:status]).to eq(:enqueued)
+ end
+ end
+
+ context 'when a duplicate item is enqueued' do
+ it 'returns the poped items to the queue and raise an error' do
+ expect { |b| queue.safe_execute(%w[1 1 2 2], &b) }
+ .to yield_with_args(match_array(%w[1 2]))
+ end
+ end
+
+ context 'when there are two queues' do
+ it 'enqueues items to each queue' do
+ queue_1 = described_class.new(namespace, '1')
+ queue_2 = described_class.new(namespace, '2')
+
+ result_2 = nil
+
+ result_1 = queue_1.safe_execute(['A']) do |_|
+ result_2 = queue_2.safe_execute(['B']) do |_|
+ queue_1.safe_execute(['C'])
+ queue_2.safe_execute(['D'])
+ end
+ end
+
+ expect(result_1[:status]).to eq(:finished)
+ expect(result_1[:new_items]).to eq(['C'])
+ expect(result_2[:status]).to eq(:finished)
+ expect(result_2[:new_items]).to eq(['D'])
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb b/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb
index 483c5ea9cff..972dd7e0d2b 100644
--- a/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb
+++ b/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb
@@ -26,6 +26,7 @@ describe Gitlab::Cache::Ci::ProjectPipelineStatus, :clean_gitlab_redis_cache do
end
it 'loads 10 projects without hitting Gitaly call limit', :request_store do
+ allow(Gitlab::GitalyClient).to receive(:can_access_disk?).and_return(false)
projects = Gitlab::GitalyClient.allow_n_plus_1_calls do
(1..10).map { create(:project, :repository) }
end
diff --git a/spec/lib/gitlab/current_settings_spec.rb b/spec/lib/gitlab/current_settings_spec.rb
index 909dbffa38f..db31e5280a3 100644
--- a/spec/lib/gitlab/current_settings_spec.rb
+++ b/spec/lib/gitlab/current_settings_spec.rb
@@ -80,7 +80,7 @@ describe Gitlab::CurrentSettings do
# during the initialization phase of the test suite, so instead let's mock the internals of it
expect(ActiveRecord::Base.connection).not_to receive(:active?)
expect(ActiveRecord::Base.connection).not_to receive(:cached_table_exists?)
- expect(ActiveRecord::Migrator).not_to receive(:needs_migration?)
+ expect_any_instance_of(ActiveRecord::MigrationContext).not_to receive(:needs_migration?)
expect(ActiveRecord::QueryRecorder.new { described_class.current_application_settings }.count).to eq(0)
end
end
@@ -109,7 +109,7 @@ describe Gitlab::CurrentSettings do
context 'with pending migrations' do
before do
- expect(ActiveRecord::Migrator).to receive(:needs_migration?).and_return(true)
+ expect_any_instance_of(ActiveRecord::MigrationContext).to receive(:needs_migration?).and_return(true)
end
shared_examples 'a non-persisted ApplicationSetting object' do
diff --git a/spec/lib/gitlab/cycle_analytics/events_spec.rb b/spec/lib/gitlab/cycle_analytics/events_spec.rb
index f8b103c0fab..5ee02650e49 100644
--- a/spec/lib/gitlab/cycle_analytics/events_spec.rb
+++ b/spec/lib/gitlab/cycle_analytics/events_spec.rb
@@ -7,7 +7,7 @@ describe 'cycle analytics events' do
let!(:context) { create(:issue, project: project, created_at: 2.days.ago) }
let(:events) do
- CycleAnalytics.new(project, { from: from_date, current_user: user })[stage].events
+ CycleAnalytics::ProjectLevel.new(project, options: { from: from_date, current_user: user })[stage].events
end
before do
diff --git a/spec/lib/gitlab/cycle_analytics/test_stage_spec.rb b/spec/lib/gitlab/cycle_analytics/test_stage_spec.rb
index eacde22cd56..8633a63849f 100644
--- a/spec/lib/gitlab/cycle_analytics/test_stage_spec.rb
+++ b/spec/lib/gitlab/cycle_analytics/test_stage_spec.rb
@@ -3,6 +3,40 @@ require 'lib/gitlab/cycle_analytics/shared_stage_spec'
describe Gitlab::CycleAnalytics::TestStage do
let(:stage_name) { :test }
+ let(:project) { create(:project) }
+ let(:stage) { described_class.new(project: project, options: { from: 2.days.ago, current_user: project.creator }) }
it_behaves_like 'base stage'
+
+ describe '#median' do
+ before do
+ issue_1 = create(:issue, project: project, created_at: 90.minutes.ago)
+ issue_2 = create(:issue, project: project, created_at: 60.minutes.ago)
+ issue_3 = create(:issue, project: project, created_at: 60.minutes.ago)
+ mr_1 = create(:merge_request, :closed, source_project: project, created_at: 60.minutes.ago)
+ mr_2 = create(:merge_request, :closed, source_project: project, created_at: 40.minutes.ago, source_branch: 'A')
+ mr_3 = create(:merge_request, source_project: project, created_at: 10.minutes.ago, source_branch: 'B')
+ mr_4 = create(:merge_request, source_project: project, created_at: 10.minutes.ago, source_branch: 'C')
+ mr_5 = create(:merge_request, source_project: project, created_at: 10.minutes.ago, source_branch: 'D')
+ mr_1.metrics.update!(latest_build_started_at: 32.minutes.ago, latest_build_finished_at: 2.minutes.ago)
+ mr_2.metrics.update!(latest_build_started_at: 62.minutes.ago, latest_build_finished_at: 32.minutes.ago)
+ mr_3.metrics.update!(latest_build_started_at: nil, latest_build_finished_at: nil)
+ mr_4.metrics.update!(latest_build_started_at: nil, latest_build_finished_at: nil)
+ mr_5.metrics.update!(latest_build_started_at: nil, latest_build_finished_at: nil)
+
+ create(:merge_requests_closing_issues, merge_request: mr_1, issue: issue_1)
+ create(:merge_requests_closing_issues, merge_request: mr_2, issue: issue_2)
+ create(:merge_requests_closing_issues, merge_request: mr_3, issue: issue_3)
+ create(:merge_requests_closing_issues, merge_request: mr_4, issue: issue_3)
+ create(:merge_requests_closing_issues, merge_request: mr_5, issue: issue_3)
+ end
+
+ around do |example|
+ Timecop.freeze { example.run }
+ end
+
+ it 'counts median from issues with metrics' do
+ expect(stage.median).to eq(ISSUES_MEDIAN)
+ end
+ end
end
diff --git a/spec/lib/gitlab/cycle_analytics/usage_data_spec.rb b/spec/lib/gitlab/cycle_analytics/usage_data_spec.rb
index a785b17f682..8122e85a981 100644
--- a/spec/lib/gitlab/cycle_analytics/usage_data_spec.rb
+++ b/spec/lib/gitlab/cycle_analytics/usage_data_spec.rb
@@ -34,7 +34,7 @@ describe Gitlab::CycleAnalytics::UsageData do
expect(result).to have_key(:avg_cycle_analytics)
- CycleAnalytics::STAGES.each do |stage|
+ CycleAnalytics::Base::STAGES.each do |stage|
expect(result[:avg_cycle_analytics]).to have_key(stage)
stage_values = result[:avg_cycle_analytics][stage]
diff --git a/spec/db/importers/common_metrics_importer_spec.rb b/spec/lib/gitlab/database_importers/common_metrics/importer_spec.rb
index a717c8cd04d..57c8bafd488 100644
--- a/spec/db/importers/common_metrics_importer_spec.rb
+++ b/spec/lib/gitlab/database_importers/common_metrics/importer_spec.rb
@@ -1,25 +1,8 @@
# frozen_string_literal: true
require 'rails_helper'
-require Rails.root.join("db", "importers", "common_metrics_importer.rb")
-describe Importers::PrometheusMetric do
- let(:existing_group_titles) do
- ::PrometheusMetric::GROUP_DETAILS.each_with_object({}) do |(key, value), memo|
- memo[key] = value[:group_title]
- end
- end
-
- it 'group enum equals ::PrometheusMetric' do
- expect(described_class.groups).to eq(::PrometheusMetric.groups)
- end
-
- it 'GROUP_TITLES equals ::PrometheusMetric' do
- expect(described_class::GROUP_TITLES).to eq(existing_group_titles)
- end
-end
-
-describe Importers::CommonMetricsImporter do
+describe Gitlab::DatabaseImporters::CommonMetrics::Importer do
subject { described_class.new }
context "does import common_metrics.yml" do
@@ -104,7 +87,7 @@ describe Importers::CommonMetricsImporter do
let(:query_identifier) { }
it 'raises exception' do
- expect { subject.execute }.to raise_error(described_class::MissingQueryId)
+ expect { subject.execute }.to raise_error(Gitlab::DatabaseImporters::CommonMetrics::Importer::MissingQueryId)
end
end
diff --git a/spec/lib/gitlab/database_importers/common_metrics/prometheus_metric_spec.rb b/spec/lib/gitlab/database_importers/common_metrics/prometheus_metric_spec.rb
new file mode 100644
index 00000000000..94f544e59b3
--- /dev/null
+++ b/spec/lib/gitlab/database_importers/common_metrics/prometheus_metric_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'rails_helper'
+
+describe Gitlab::DatabaseImporters::CommonMetrics::PrometheusMetric do
+ it 'group enum equals ::PrometheusMetric' do
+ expect(described_class.groups).to eq(::PrometheusMetric.groups)
+ end
+
+ it '.group_titles equals ::PrometheusMetric' do
+ existing_group_titles = ::PrometheusMetricEnums.group_details.each_with_object({}) do |(key, value), memo|
+ memo[key] = value[:group_title]
+ end
+ expect(Gitlab::DatabaseImporters::CommonMetrics::PrometheusMetricEnums.group_titles).to eq(existing_group_titles)
+ end
+end
diff --git a/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb b/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb
index 48139c2f9dc..7833b9f387d 100644
--- a/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb
@@ -105,6 +105,7 @@ describe Gitlab::Email::Handler::CreateIssueHandler do
context "when the issue could not be saved" do
before do
allow_any_instance_of(Issue).to receive(:persisted?).and_return(false)
+ allow_any_instance_of(Issue).to receive(:ensure_metrics).and_return(nil)
end
it "raises an InvalidIssueError" do
diff --git a/spec/lib/gitlab/git/commit_spec.rb b/spec/lib/gitlab/git/commit_spec.rb
index 25052a79916..3f0e6b34291 100644
--- a/spec/lib/gitlab/git/commit_spec.rb
+++ b/spec/lib/gitlab/git/commit_spec.rb
@@ -408,7 +408,7 @@ describe Gitlab::Git::Commit, :seed_helper do
context 'when oids is empty' do
it 'makes no Gitaly request' do
- expect(Gitlab::GitalyClient).not_to receive(:call)
+ expect(Gitlab::GitalyClient).not_to receive(:call).with(repository.storage, :commit_service, :list_commits_by_oid)
described_class.batch_by_oid(repository, [])
end
diff --git a/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb b/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
new file mode 100644
index 00000000000..e7ef9d08f80
--- /dev/null
+++ b/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
@@ -0,0 +1,99 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require 'json'
+require 'tempfile'
+
+describe Gitlab::Git::RuggedImpl::UseRugged, :seed_helper do
+ let(:project) { create(:project, :repository) }
+ let(:repository) { project.repository }
+ let(:feature_flag_name) { 'feature-flag-name' }
+ let(:feature_flag) { Feature.get(feature_flag_name) }
+ let(:temp_gitaly_metadata_file) { create_temporary_gitaly_metadata_file }
+
+ before(:all) do
+ create_gitaly_metadata_file
+ end
+
+ subject(:wrapper) do
+ klazz = Class.new { include Gitlab::Git::RuggedImpl::UseRugged }
+ klazz.new
+ end
+
+ before do
+ Gitlab::GitalyClient.instance_variable_set(:@can_use_disk, {})
+ end
+
+ context 'when feature flag is not persisted' do
+ before do
+ allow(Feature).to receive(:persisted?).with(feature_flag).and_return(false)
+ end
+
+ it 'returns true when gitaly matches disk' do
+ pending('temporary disabled because of https://gitlab.com/gitlab-org/gitlab-ce/issues/64338')
+ expect(subject.use_rugged?(repository, feature_flag_name)).to be true
+ end
+
+ it 'returns false when disk access fails' do
+ allow(Gitlab::GitalyClient).to receive(:storage_metadata_file_path).and_return("/fake/path/doesnt/exist")
+
+ expect(subject.use_rugged?(repository, feature_flag_name)).to be false
+ end
+
+ it "returns false when gitaly doesn't match disk" do
+ allow(Gitlab::GitalyClient).to receive(:storage_metadata_file_path).and_return(temp_gitaly_metadata_file)
+
+ expect(subject.use_rugged?(repository, feature_flag_name)).to be_falsey
+
+ File.delete(temp_gitaly_metadata_file)
+ end
+
+ it "doesn't lead to a second rpc call because gitaly client should use the cached value" do
+ pending('temporary disabled because of https://gitlab.com/gitlab-org/gitlab-ce/issues/64338')
+ expect(subject.use_rugged?(repository, feature_flag_name)).to be true
+
+ expect(Gitlab::GitalyClient).not_to receive(:filesystem_id)
+
+ subject.use_rugged?(repository, feature_flag_name)
+ end
+ end
+
+ context 'when feature flag is persisted' do
+ before do
+ allow(Feature).to receive(:persisted?).with(feature_flag).and_return(true)
+ end
+
+ it 'returns false when the feature flag is off' do
+ allow(feature_flag).to receive(:enabled?).and_return(false)
+
+ expect(subject.use_rugged?(repository, feature_flag_name)).to be_falsey
+ end
+
+ it "returns true when feature flag is on" do
+ allow(feature_flag).to receive(:enabled?).and_return(true)
+ allow(Gitlab::GitalyClient).to receive(:can_use_disk?).and_return(false)
+
+ expect(subject.use_rugged?(repository, feature_flag_name)).to be true
+ end
+ end
+
+ def create_temporary_gitaly_metadata_file
+ tmp = Tempfile.new('.gitaly-metadata')
+ gitaly_metadata = {
+ "gitaly_filesystem_id" => "some-value"
+ }
+ tmp.write(gitaly_metadata.to_json)
+ tmp.flush
+ tmp.close
+ tmp.path
+ end
+
+ def create_gitaly_metadata_file
+ File.open(File.join(SEED_STORAGE_PATH, '.gitaly-metadata'), 'w+') do |f|
+ gitaly_metadata = {
+ "gitaly_filesystem_id" => SecureRandom.uuid
+ }
+ f.write(gitaly_metadata.to_json)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/global_id_spec.rb b/spec/lib/gitlab/global_id_spec.rb
new file mode 100644
index 00000000000..d35b5da0b75
--- /dev/null
+++ b/spec/lib/gitlab/global_id_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::GlobalId do
+ describe '.build' do
+ set(:object) { create(:issue) }
+
+ it 'returns a standard GlobalId if only object is passed' do
+ expect(described_class.build(object).to_s).to eq(object.to_global_id.to_s)
+ end
+
+ it 'returns a GlobalId from params' do
+ expect(described_class.build(model_name: 'MyModel', id: 'myid').to_s).to eq(
+ 'gid://gitlab/MyModel/myid'
+ )
+ end
+
+ it 'returns a GlobalId from object and `id` param' do
+ expect(described_class.build(object, id: 'myid').to_s).to eq(
+ 'gid://gitlab/Issue/myid'
+ )
+ end
+
+ it 'returns a GlobalId from object and `model_name` param' do
+ expect(described_class.build(object, model_name: 'MyModel').to_s).to eq(
+ "gid://gitlab/MyModel/#{object.id}"
+ )
+ end
+
+ it 'returns an error if model_name and id are not able to be determined' do
+ expect { described_class.build(id: 'myid') }.to raise_error(URI::InvalidComponentError)
+ expect { described_class.build(model_name: 'MyModel') }.to raise_error(URI::InvalidComponentError)
+ expect { described_class.build }.to raise_error(URI::InvalidComponentError)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/kubernetes/kube_client_spec.rb b/spec/lib/gitlab/kubernetes/kube_client_spec.rb
index 978e64c4407..97ebb5f1554 100644
--- a/spec/lib/gitlab/kubernetes/kube_client_spec.rb
+++ b/spec/lib/gitlab/kubernetes/kube_client_spec.rb
@@ -176,6 +176,9 @@ describe Gitlab::Kubernetes::KubeClient do
let(:rbac_client) { client.rbac_client }
[
+ :create_role,
+ :get_role,
+ :update_role,
:create_cluster_role_binding,
:get_cluster_role_binding,
:update_cluster_role_binding
diff --git a/spec/lib/gitlab/kubernetes/role_binding_spec.rb b/spec/lib/gitlab/kubernetes/role_binding_spec.rb
index 50acee254cb..4c200eb545f 100644
--- a/spec/lib/gitlab/kubernetes/role_binding_spec.rb
+++ b/spec/lib/gitlab/kubernetes/role_binding_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
describe Gitlab::Kubernetes::RoleBinding, '#generate' do
let(:role_name) { 'edit' }
+ let(:role_kind) { 'ClusterRole' }
let(:namespace) { 'my-namespace' }
let(:service_account_name) { 'my-service-account' }
@@ -20,7 +21,7 @@ describe Gitlab::Kubernetes::RoleBinding, '#generate' do
let(:role_ref) do
{
apiGroup: 'rbac.authorization.k8s.io',
- kind: 'ClusterRole',
+ kind: role_kind,
name: role_name
}
end
@@ -37,6 +38,7 @@ describe Gitlab::Kubernetes::RoleBinding, '#generate' do
described_class.new(
name: "gitlab-#{namespace}",
role_name: role_name,
+ role_kind: role_kind,
namespace: namespace,
service_account_name: service_account_name
).generate
diff --git a/spec/lib/gitlab/kubernetes/role_spec.rb b/spec/lib/gitlab/kubernetes/role_spec.rb
new file mode 100644
index 00000000000..3a5cd3b6704
--- /dev/null
+++ b/spec/lib/gitlab/kubernetes/role_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Kubernetes::Role do
+ let(:role) { described_class.new(name: name, namespace: namespace, rules: rules) }
+ let(:name) { 'example-name' }
+ let(:namespace) { 'example-namespace' }
+
+ let(:rules) do
+ [{
+ apiGroups: %w(hello.world),
+ resources: %w(oil diamonds coffee),
+ verbs: %w(say do walk run)
+ }]
+ end
+
+ describe '#generate' do
+ subject { role.generate }
+
+ let(:resource) do
+ ::Kubeclient::Resource.new(
+ metadata: { name: name, namespace: namespace },
+ rules: rules
+ )
+ end
+
+ it { is_expected.to eq(resource) }
+ end
+end
diff --git a/spec/lib/gitlab/lets_encrypt_spec.rb b/spec/lib/gitlab/lets_encrypt_spec.rb
index 674b114e9d3..65aea0937f1 100644
--- a/spec/lib/gitlab/lets_encrypt_spec.rb
+++ b/spec/lib/gitlab/lets_encrypt_spec.rb
@@ -10,21 +10,10 @@ describe ::Gitlab::LetsEncrypt do
end
describe '.enabled?' do
- let(:project) { create(:project) }
- let(:pages_domain) { create(:pages_domain, project: project) }
-
- subject { described_class.enabled?(pages_domain) }
+ subject { described_class.enabled? }
context 'when terms of service are accepted' do
it { is_expected.to eq(true) }
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(pages_auto_ssl: false)
- end
-
- it { is_expected.to eq(false) }
- end
end
context 'when terms of service are not accepted' do
@@ -34,23 +23,5 @@ describe ::Gitlab::LetsEncrypt do
it { is_expected.to eq(false) }
end
-
- context 'when feature flag for project is disabled' do
- before do
- stub_feature_flags(pages_auto_ssl_for_project: false)
- end
-
- it 'returns false' do
- is_expected.to eq(false)
- end
- end
-
- context 'when domain has not project' do
- let(:pages_domain) { create(:pages_domain) }
-
- it 'returns false' do
- is_expected.to eq(false)
- end
- end
end
end
diff --git a/spec/lib/gitlab/markdown_cache/active_record/extension_spec.rb b/spec/lib/gitlab/markdown_cache/active_record/extension_spec.rb
index 18052b1991c..c5fc74afea5 100644
--- a/spec/lib/gitlab/markdown_cache/active_record/extension_spec.rb
+++ b/spec/lib/gitlab/markdown_cache/active_record/extension_spec.rb
@@ -9,12 +9,13 @@ describe Gitlab::MarkdownCache::ActiveRecord::Extension do
cache_markdown_field :title, whitelisted: true
cache_markdown_field :description, pipeline: :single_line
- attr_accessor :author, :project
+ attribute :author
+ attribute :project
end
end
let(:cache_version) { Gitlab::MarkdownCache::CACHE_COMMONMARK_VERSION << 16 }
- let(:thing) { klass.new(title: markdown, title_html: html, cached_markdown_version: cache_version) }
+ let(:thing) { klass.create(title: markdown, title_html: html, cached_markdown_version: cache_version) }
let(:markdown) { '`Foo`' }
let(:html) { '<p data-sourcepos="1:1-1:5" dir="auto"><code>Foo</code></p>' }
@@ -37,7 +38,7 @@ describe Gitlab::MarkdownCache::ActiveRecord::Extension do
end
context 'a changed markdown field' do
- let(:thing) { klass.new(title: markdown, title_html: html, cached_markdown_version: cache_version) }
+ let(:thing) { klass.create(title: markdown, title_html: html, cached_markdown_version: cache_version) }
before do
thing.title = updated_markdown
diff --git a/spec/lib/gitlab/metrics/dashboard/url_spec.rb b/spec/lib/gitlab/metrics/dashboard/url_spec.rb
new file mode 100644
index 00000000000..34bc6359414
--- /dev/null
+++ b/spec/lib/gitlab/metrics/dashboard/url_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Metrics::Dashboard::Url do
+ describe '#regex' do
+ it 'returns a regular expression' do
+ expect(described_class.regex).to be_a Regexp
+ end
+
+ it 'matches a metrics dashboard link with named params' do
+ url = Gitlab::Routing.url_helpers.metrics_namespace_project_environment_url('foo', 'bar', 1, start: 123345456, anchor: 'title')
+
+ expected_params = {
+ 'url' => url,
+ 'namespace' => 'foo',
+ 'project' => 'bar',
+ 'environment' => '1',
+ 'query' => '?start=123345456',
+ 'anchor' => '#title'
+ }
+
+ expect(described_class.regex).to match url
+
+ described_class.regex.match(url) do |m|
+ expect(m.named_captures).to eq expected_params
+ end
+ end
+
+ it 'does not match other gitlab urls that contain the term metrics' do
+ url = Gitlab::Routing.url_helpers.active_common_namespace_project_prometheus_metrics_url('foo', 'bar', :json)
+
+ expect(described_class.regex).not_to match url
+ end
+
+ it 'does not match other gitlab urls' do
+ url = Gitlab.config.gitlab.url
+
+ expect(described_class.regex).not_to match url
+ end
+
+ it 'does not match non-gitlab urls' do
+ url = 'https://www.super_awesome_site.com/'
+
+ expect(described_class.regex).not_to match url
+ end
+ end
+
+ describe '#build_dashboard_url' do
+ it 'builds the url for the dashboard endpoint' do
+ url = described_class.build_dashboard_url('foo', 'bar', 1)
+
+ expect(url).to match described_class.regex
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/samplers/unicorn_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/unicorn_sampler_spec.rb
index 090e456644f..4b697b2ba0f 100644
--- a/spec/lib/gitlab/metrics/samplers/unicorn_sampler_spec.rb
+++ b/spec/lib/gitlab/metrics/samplers/unicorn_sampler_spec.rb
@@ -4,7 +4,7 @@ describe Gitlab::Metrics::Samplers::UnicornSampler do
subject { described_class.new(1.second) }
describe '#sample' do
- let(:unicorn) { double('unicorn') }
+ let(:unicorn) { Module.new }
let(:raindrops) { double('raindrops') }
let(:stats) { double('stats') }
@@ -78,19 +78,32 @@ describe Gitlab::Metrics::Samplers::UnicornSampler do
end
end
- context 'additional metrics' do
- let(:unicorn_workers) { 2 }
-
+ context 'unicorn workers' do
before do
- allow(unicorn).to receive(:listener_names).and_return([""])
- allow(::Gitlab::Metrics::System).to receive(:cpu_time).and_return(3.14)
- allow(subject).to receive(:unicorn_workers_count).and_return(unicorn_workers)
+ allow(unicorn).to receive(:listener_names).and_return([])
end
- it "sets additional metrics" do
- expect(subject.metrics[:unicorn_workers]).to receive(:set).with({}, unicorn_workers)
+ context 'without http server' do
+ it "does set unicorn_workers to 0" do
+ expect(subject.metrics[:unicorn_workers]).to receive(:set).with({}, 0)
- subject.sample
+ subject.sample
+ end
+ end
+
+ context 'with http server' do
+ let(:http_server_class) { Struct.new(:worker_processes) }
+ let!(:http_server) { http_server_class.new(5) }
+
+ before do
+ stub_const('Unicorn::HttpServer', http_server_class)
+ end
+
+ it "sets additional metrics" do
+ expect(subject.metrics[:unicorn_workers]).to receive(:set).with({}, 5)
+
+ subject.sample
+ end
end
end
end
diff --git a/spec/lib/gitlab/phabricator_import/cache/map_spec.rb b/spec/lib/gitlab/phabricator_import/cache/map_spec.rb
index 52c7a02219f..b6629fad453 100644
--- a/spec/lib/gitlab/phabricator_import/cache/map_spec.rb
+++ b/spec/lib/gitlab/phabricator_import/cache/map_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::PhabricatorImport::Cache::Map, :clean_gitlab_redis_cache do
@@ -28,6 +30,21 @@ describe Gitlab::PhabricatorImport::Cache::Map, :clean_gitlab_redis_cache do
expect(ttl).to be > 10.seconds
end
+
+ it 'sets the object in redis once if a block was given and nothing was cached' do
+ issue = create(:issue, project: project)
+
+ expect(map.get_gitlab_model('does not exist') { issue }).to eq(issue)
+
+ expect { |b| map.get_gitlab_model('does not exist', &b) }
+ .not_to yield_control
+ end
+
+ it 'does not cache `nil` objects' do
+ expect(map).not_to receive(:set_gitlab_model)
+
+ map.get_gitlab_model('does not exist') { nil }
+ end
end
describe '#set_gitlab_model' do
diff --git a/spec/lib/gitlab/phabricator_import/conduit/user_spec.rb b/spec/lib/gitlab/phabricator_import/conduit/user_spec.rb
new file mode 100644
index 00000000000..e88eec2c393
--- /dev/null
+++ b/spec/lib/gitlab/phabricator_import/conduit/user_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+describe Gitlab::PhabricatorImport::Conduit::User do
+ let(:user_client) do
+ described_class.new(phabricator_url: 'https://see-ya-later.phabricator', api_token: 'api-token')
+ end
+
+ describe '#users' do
+ let(:fake_client) { double('Phabricator client') }
+
+ before do
+ allow(user_client).to receive(:client).and_return(fake_client)
+ end
+
+ it 'calls the api with the correct params' do
+ expected_params = {
+ constraints: { phids: ['phid-1', 'phid-2'] }
+ }
+
+ expect(fake_client).to receive(:get).with('user.search',
+ params: expected_params)
+
+ user_client.users(['phid-1', 'phid-2'])
+ end
+
+ it 'returns an array of parsed responses' do
+ response = Gitlab::PhabricatorImport::Conduit::Response
+ .new(fixture_file('phabricator_responses/user.search.json'))
+
+ allow(fake_client).to receive(:get).and_return(response)
+
+ expect(user_client.users(%w[some phids])).to match_array([an_instance_of(Gitlab::PhabricatorImport::Conduit::UsersResponse)])
+ end
+
+ it 'performs multiple requests if more phids than the maximum page size are passed' do
+ stub_const('Gitlab::PhabricatorImport::Conduit::User::MAX_PAGE_SIZE', 1)
+ first_params = { constraints: { phids: ['phid-1'] } }
+ second_params = { constraints: { phids: ['phid-2'] } }
+
+ expect(fake_client).to receive(:get).with('user.search',
+ params: first_params).once
+ expect(fake_client).to receive(:get).with('user.search',
+ params: second_params).once
+
+ user_client.users(['phid-1', 'phid-2'])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/phabricator_import/conduit/users_response_spec.rb b/spec/lib/gitlab/phabricator_import/conduit/users_response_spec.rb
new file mode 100644
index 00000000000..00778ad90fd
--- /dev/null
+++ b/spec/lib/gitlab/phabricator_import/conduit/users_response_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+describe Gitlab::PhabricatorImport::Conduit::UsersResponse do
+ let(:conduit_response) do
+ Gitlab::PhabricatorImport::Conduit::Response
+ .new(JSON.parse(fixture_file('phabricator_responses/user.search.json')))
+ end
+
+ subject(:response) { described_class.new(conduit_response) }
+
+ describe '#users' do
+ it 'builds the correct users representation' do
+ tasks = response.users
+
+ usernames = tasks.map(&:username)
+
+ expect(usernames).to contain_exactly('jane', 'john')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/phabricator_import/issues/importer_spec.rb b/spec/lib/gitlab/phabricator_import/issues/importer_spec.rb
index 2412cf76f79..667321409da 100644
--- a/spec/lib/gitlab/phabricator_import/issues/importer_spec.rb
+++ b/spec/lib/gitlab/phabricator_import/issues/importer_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
describe Gitlab::PhabricatorImport::Issues::Importer do
- set(:project) { create(:project) }
+ let(:project) { create(:project) }
let(:response) do
Gitlab::PhabricatorImport::Conduit::TasksResponse.new(
@@ -15,7 +15,6 @@ describe Gitlab::PhabricatorImport::Issues::Importer do
before do
client = instance_double(Gitlab::PhabricatorImport::Conduit::Maniphest)
-
allow(client).to receive(:tasks).and_return(response)
allow(importer).to receive(:client).and_return(client)
end
@@ -34,20 +33,29 @@ describe Gitlab::PhabricatorImport::Issues::Importer do
importer.execute
end
- it 'schedules the next batch if there is one' do
- expect(Gitlab::PhabricatorImport::ImportTasksWorker)
- .to receive(:schedule).with(project.id, response.pagination.next_page)
+ context 'stubbed task import' do
+ before do
+ # Stub out the actual importing so we don't perform aditional requests
+ expect_next_instance_of(Gitlab::PhabricatorImport::Issues::TaskImporter) do |task_importer|
+ allow(task_importer).to receive(:execute)
+ end.at_least(1)
+ end
- importer.execute
- end
+ it 'schedules the next batch if there is one' do
+ expect(Gitlab::PhabricatorImport::ImportTasksWorker)
+ .to receive(:schedule).with(project.id, response.pagination.next_page)
- it 'does not reschedule when there is no next page' do
- allow(response.pagination).to receive(:has_next_page?).and_return(false)
+ importer.execute
+ end
- expect(Gitlab::PhabricatorImport::ImportTasksWorker)
- .not_to receive(:schedule)
+ it 'does not reschedule when there is no next page' do
+ allow(response.pagination).to receive(:has_next_page?).and_return(false)
- importer.execute
+ expect(Gitlab::PhabricatorImport::ImportTasksWorker)
+ .not_to receive(:schedule)
+
+ importer.execute
+ end
end
end
end
diff --git a/spec/lib/gitlab/phabricator_import/issues/task_importer_spec.rb b/spec/lib/gitlab/phabricator_import/issues/task_importer_spec.rb
index 1625604e754..06ed264e781 100644
--- a/spec/lib/gitlab/phabricator_import/issues/task_importer_spec.rb
+++ b/spec/lib/gitlab/phabricator_import/issues/task_importer_spec.rb
@@ -12,6 +12,8 @@ describe Gitlab::PhabricatorImport::Issues::TaskImporter do
'description' => {
'raw' => '# This is markdown\n it can contain more text.'
},
+ 'authorPHID' => 'PHID-USER-456',
+ 'ownerPHID' => 'PHID-USER-123',
'dateCreated' => '1518688921',
'dateClosed' => '1518789995'
}
@@ -19,9 +21,18 @@ describe Gitlab::PhabricatorImport::Issues::TaskImporter do
)
end
+ subject(:importer) { described_class.new(project, task) }
+
describe '#execute' do
+ let(:fake_user_finder) { instance_double(Gitlab::PhabricatorImport::UserFinder) }
+
+ before do
+ allow(fake_user_finder).to receive(:find)
+ allow(importer).to receive(:user_finder).and_return(fake_user_finder)
+ end
+
it 'creates the issue with the expected attributes' do
- issue = described_class.new(project, task).execute
+ issue = importer.execute
expect(issue.project).to eq(project)
expect(issue).to be_persisted
@@ -34,21 +45,38 @@ describe Gitlab::PhabricatorImport::Issues::TaskImporter do
end
it 'does not recreate the issue when called multiple times' do
- expect { described_class.new(project, task).execute }
+ expect { importer.execute }
.to change { project.issues.reload.size }.from(0).to(1)
- expect { described_class.new(project, task).execute }
+ expect { importer.execute }
.not_to change { project.issues.reload.size }
end
it 'does not trigger a save when the object did not change' do
existing_issue = create(:issue,
task.issue_attributes.merge(author: User.ghost))
- importer = described_class.new(project, task)
allow(importer).to receive(:issue).and_return(existing_issue)
expect(existing_issue).not_to receive(:save!)
importer.execute
end
+
+ it 'links the author if the author can be found' do
+ author = create(:user)
+ expect(fake_user_finder).to receive(:find).with('PHID-USER-456').and_return(author)
+
+ issue = importer.execute
+
+ expect(issue.author).to eq(author)
+ end
+
+ it 'links an assignee if the user can be found' do
+ assignee = create(:user)
+ expect(fake_user_finder).to receive(:find).with('PHID-USER-123').and_return(assignee)
+
+ issue = importer.execute
+
+ expect(issue.assignees).to include(assignee)
+ end
end
end
diff --git a/spec/lib/gitlab/phabricator_import/representation/task_spec.rb b/spec/lib/gitlab/phabricator_import/representation/task_spec.rb
index dfbd8c546eb..5603a6961d6 100644
--- a/spec/lib/gitlab/phabricator_import/representation/task_spec.rb
+++ b/spec/lib/gitlab/phabricator_import/representation/task_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::PhabricatorImport::Representation::Task do
@@ -7,6 +9,8 @@ describe Gitlab::PhabricatorImport::Representation::Task do
'phid' => 'the-phid',
'fields' => {
'name' => 'Title'.ljust(257, '.'), # A string padded to 257 chars
+ 'authorPHID' => 'a phid',
+ 'ownerPHID' => 'another user phid',
'description' => {
'raw' => '# This is markdown\n it can contain more text.'
},
@@ -30,4 +34,16 @@ describe Gitlab::PhabricatorImport::Representation::Task do
expect(task.issue_attributes).to eq(expected_attributes)
end
end
+
+ describe '#author_phid' do
+ it 'returns the correct field' do
+ expect(task.author_phid).to eq('a phid')
+ end
+ end
+
+ describe '#owner_phid' do
+ it 'returns the correct field' do
+ expect(task.owner_phid).to eq('another user phid')
+ end
+ end
end
diff --git a/spec/lib/gitlab/phabricator_import/representation/user_spec.rb b/spec/lib/gitlab/phabricator_import/representation/user_spec.rb
new file mode 100644
index 00000000000..f52467a0cf1
--- /dev/null
+++ b/spec/lib/gitlab/phabricator_import/representation/user_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::PhabricatorImport::Representation::User do
+ subject(:user) do
+ described_class.new(
+ {
+ 'phid' => 'the-phid',
+ 'fields' => {
+ 'username' => 'the-username'
+ }
+ }
+ )
+ end
+
+ describe '#phabricator_id' do
+ it 'returns the phabricator id' do
+ expect(user.phabricator_id).to eq('the-phid')
+ end
+ end
+
+ describe '#username' do
+ it 'returns the username' do
+ expect(user.username).to eq('the-username')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/phabricator_import/user_finder_spec.rb b/spec/lib/gitlab/phabricator_import/user_finder_spec.rb
new file mode 100644
index 00000000000..096321cda5f
--- /dev/null
+++ b/spec/lib/gitlab/phabricator_import/user_finder_spec.rb
@@ -0,0 +1,89 @@
+require 'spec_helper'
+
+describe Gitlab::PhabricatorImport::UserFinder, :clean_gitlab_redis_cache do
+ let(:project) { create(:project, namespace: create(:group)) }
+ subject(:finder) { described_class.new(project, ['first-phid', 'second-phid']) }
+
+ before do
+ project.namespace.add_developer(existing_user)
+ end
+
+ describe '#find' do
+ let!(:existing_user) { create(:user, username: 'existing-user') }
+ let(:cache) { Gitlab::PhabricatorImport::Cache::Map.new(project) }
+
+ before do
+ allow(finder).to receive(:object_map).and_return(cache)
+ end
+
+ context 'for a cached phid' do
+ before do
+ cache.set_gitlab_model(existing_user, 'first-phid')
+ end
+
+ it 'returns the existing user' do
+ expect(finder.find('first-phid')).to eq(existing_user)
+ end
+
+ it 'does not perform a find using the API' do
+ expect(finder).not_to receive(:find_user_for_phid)
+
+ finder.find('first-phid')
+ end
+
+ it 'excludes the phid from the request if one needs to be made' do
+ client = instance_double(Gitlab::PhabricatorImport::Conduit::User)
+ allow(finder).to receive(:client).and_return(client)
+
+ expect(client).to receive(:users).with(['second-phid']).and_return([])
+
+ finder.find('first-phid')
+ finder.find('second-phid')
+ end
+ end
+
+ context 'when the phid is not cached' do
+ let(:response) do
+ [
+ instance_double(
+ Gitlab::PhabricatorImport::Conduit::UsersResponse,
+ users: [instance_double(Gitlab::PhabricatorImport::Representation::User, phabricator_id: 'second-phid', username: 'existing-user')]
+ ),
+ instance_double(
+ Gitlab::PhabricatorImport::Conduit::UsersResponse,
+ users: [instance_double(Gitlab::PhabricatorImport::Representation::User, phabricator_id: 'first-phid', username: 'other-user')]
+ )
+ ]
+ end
+ let(:client) do
+ client = instance_double(Gitlab::PhabricatorImport::Conduit::User)
+ allow(client).to receive(:users).and_return(response)
+
+ client
+ end
+
+ before do
+ allow(finder).to receive(:client).and_return(client)
+ end
+
+ it 'loads the users from the API once' do
+ expect(client).to receive(:users).and_return(response).once
+
+ expect(finder.find('second-phid')).to eq(existing_user)
+ expect(finder.find('first-phid')).to be_nil
+ end
+
+ it 'adds found users to the cache' do
+ expect { finder.find('second-phid') }
+ .to change { cache.get_gitlab_model('second-phid') }
+ .from(nil).to(existing_user)
+ end
+
+ it 'only returns users that are members of the project' do
+ create(:user, username: 'other-user')
+
+ expect(finder.find('first-phid')).to eq(nil)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sql/pattern_spec.rb b/spec/lib/gitlab/sql/pattern_spec.rb
index 5b5052de372..98838712eae 100644
--- a/spec/lib/gitlab/sql/pattern_spec.rb
+++ b/spec/lib/gitlab/sql/pattern_spec.rb
@@ -10,6 +10,12 @@ describe Gitlab::SQL::Pattern do
it 'returns exact matching pattern' do
expect(to_pattern).to eq('12')
end
+
+ context 'and ignore_minimum_char_limit is true' do
+ it 'returns partial matching pattern' do
+ expect(User.to_pattern(query, use_minimum_char_limit: false)).to eq('%12%')
+ end
+ end
end
context 'when a query with a escape character is shorter than 3 chars' do
@@ -18,6 +24,12 @@ describe Gitlab::SQL::Pattern do
it 'returns sanitized exact matching pattern' do
expect(to_pattern).to eq('\_2')
end
+
+ context 'and ignore_minimum_char_limit is true' do
+ it 'returns sanitized partial matching pattern' do
+ expect(User.to_pattern(query, use_minimum_char_limit: false)).to eq('%\_2%')
+ end
+ end
end
context 'when a query is equal to 3 chars' do
diff --git a/spec/lib/gitlab/url_blocker_spec.rb b/spec/lib/gitlab/url_blocker_spec.rb
index 253366e0789..f8b0cbfb6f6 100644
--- a/spec/lib/gitlab/url_blocker_spec.rb
+++ b/spec/lib/gitlab/url_blocker_spec.rb
@@ -353,7 +353,7 @@ describe Gitlab::UrlBlocker do
end
end
- describe '#validate_hostname!' do
+ describe '#validate_hostname' do
let(:ip_addresses) do
[
'2001:db8:1f70::999:de8:7648:6e8',
@@ -378,7 +378,7 @@ describe Gitlab::UrlBlocker do
it 'does not raise error for valid Ip addresses' do
ip_addresses.each do |ip|
- expect { described_class.send(:validate_hostname!, ip) }.not_to raise_error
+ expect { described_class.send(:validate_hostname, ip) }.not_to raise_error
end
end
end
diff --git a/spec/lib/peek/views/redis_detailed_spec.rb b/spec/lib/peek/views/redis_detailed_spec.rb
new file mode 100644
index 00000000000..da13b6df53b
--- /dev/null
+++ b/spec/lib/peek/views/redis_detailed_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Peek::Views::RedisDetailed do
+ let(:redis_detailed_class) do
+ Class.new do
+ include Peek::Views::RedisDetailed
+ end
+ end
+
+ subject { redis_detailed_class.new }
+
+ using RSpec::Parameterized::TableSyntax
+
+ where(:cmd, :expected) do
+ [:auth, 'test'] | 'auth <redacted>'
+ [:set, 'key', 'value'] | 'set key <redacted>'
+ [:set, 'bad'] | 'set bad'
+ [:hmset, 'key1', 'value1', 'key2', 'value2'] | 'hmset key1 <redacted>'
+ [:get, 'key'] | 'get key'
+ end
+
+ with_them do
+ it 'scrubs Redis commands', :request_store do
+ subject.detail_store << { cmd: cmd, duration: 1.second }
+
+ expect(subject.details.count).to eq(1)
+ expect(subject.details.first)
+ .to eq({
+ cmd: expected,
+ duration: 1000
+ })
+ end
+ end
+end
diff --git a/spec/migrations/active_record/schema_spec.rb b/spec/migrations/active_record/schema_spec.rb
index fbf5d387d0e..bc246f88685 100644
--- a/spec/migrations/active_record/schema_spec.rb
+++ b/spec/migrations/active_record/schema_spec.rb
@@ -15,7 +15,7 @@ describe ActiveRecord::Schema do
it '> schema version equals last migration timestamp' do
defined_schema_version = File.open(Rails.root.join('db', 'schema.rb')) do |file|
file.find { |line| line =~ /ActiveRecord::Schema.define/ }
- end.match(/(\d+)/)[0].to_i
+ end.match(/(\d{4}_\d{2}_\d{2}_\d{6})/)[0].to_i
expect(defined_schema_version).to eq(latest_migration_timestamp)
end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index d98db024f73..78862de0657 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -2013,6 +2013,7 @@ describe Ci::Build do
{ key: 'CI', value: 'true', public: true, masked: false },
{ key: 'GITLAB_CI', value: 'true', public: true, masked: false },
{ key: 'GITLAB_FEATURES', value: project.licensed_features.join(','), public: true, masked: false },
+ { key: 'CI_SERVER_HOST', value: Gitlab.config.gitlab.host, public: true, masked: false },
{ key: 'CI_SERVER_NAME', value: 'GitLab', public: true, masked: false },
{ key: 'CI_SERVER_VERSION', value: Gitlab::VERSION, public: true, masked: false },
{ key: 'CI_SERVER_VERSION_MAJOR', value: Gitlab.version_info.major.to_s, public: true, masked: false },
diff --git a/spec/models/clusters/clusters_hierarchy_spec.rb b/spec/models/clusters/clusters_hierarchy_spec.rb
new file mode 100644
index 00000000000..0470ebe17ea
--- /dev/null
+++ b/spec/models/clusters/clusters_hierarchy_spec.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Clusters::ClustersHierarchy do
+ describe '#base_and_ancestors' do
+ def base_and_ancestors(clusterable)
+ described_class.new(clusterable).base_and_ancestors
+ end
+
+ context 'project in nested group with clusters at every level' do
+ let!(:cluster) { create(:cluster, :project, projects: [project]) }
+ let!(:child) { create(:cluster, :group, groups: [child_group]) }
+ let!(:parent) { create(:cluster, :group, groups: [parent_group]) }
+ let!(:ancestor) { create(:cluster, :group, groups: [ancestor_group]) }
+
+ let(:ancestor_group) { create(:group) }
+ let(:parent_group) { create(:group, parent: ancestor_group) }
+ let(:child_group) { create(:group, parent: parent_group) }
+ let(:project) { create(:project, group: child_group) }
+
+ it 'returns clusters for project' do
+ expect(base_and_ancestors(project)).to eq([cluster, child, parent, ancestor])
+ end
+
+ it 'returns clusters for child_group' do
+ expect(base_and_ancestors(child_group)).to eq([child, parent, ancestor])
+ end
+
+ it 'returns clusters for parent_group' do
+ expect(base_and_ancestors(parent_group)).to eq([parent, ancestor])
+ end
+
+ it 'returns clusters for ancestor_group' do
+ expect(base_and_ancestors(ancestor_group)).to eq([ancestor])
+ end
+ end
+
+ context 'project in a namespace' do
+ let!(:cluster) { create(:cluster, :project) }
+
+ it 'returns clusters for project' do
+ expect(base_and_ancestors(cluster.project)).to eq([cluster])
+ end
+ end
+
+ context 'project in nested group with clusters at some levels' do
+ let!(:child) { create(:cluster, :group, groups: [child_group]) }
+ let!(:ancestor) { create(:cluster, :group, groups: [ancestor_group]) }
+
+ let(:ancestor_group) { create(:group) }
+ let(:parent_group) { create(:group, parent: ancestor_group) }
+ let(:child_group) { create(:group, parent: parent_group) }
+ let(:project) { create(:project, group: child_group) }
+
+ it 'returns clusters for project' do
+ expect(base_and_ancestors(project)).to eq([child, ancestor])
+ end
+
+ it 'returns clusters for child_group' do
+ expect(base_and_ancestors(child_group)).to eq([child, ancestor])
+ end
+
+ it 'returns clusters for parent_group' do
+ expect(base_and_ancestors(parent_group)).to eq([ancestor])
+ end
+
+ it 'returns clusters for ancestor_group' do
+ expect(base_and_ancestors(ancestor_group)).to eq([ancestor])
+ end
+ end
+ end
+end
diff --git a/spec/models/commit_range_spec.rb b/spec/models/commit_range_spec.rb
index b96ca89c893..4a524b585e1 100644
--- a/spec/models/commit_range_spec.rb
+++ b/spec/models/commit_range_spec.rb
@@ -139,8 +139,8 @@ describe CommitRange do
end
describe '#has_been_reverted?' do
- let(:issue) { create(:issue) }
- let(:user) { issue.author }
+ let(:user) { create(:user) }
+ let(:issue) { create(:issue, author: user, project: project) }
it 'returns true if the commit has been reverted' do
create(:note_on_issue,
@@ -149,9 +149,11 @@ describe CommitRange do
note: commit1.revert_description(user),
project: issue.project)
- expect_any_instance_of(Commit).to receive(:reverts_commit?)
- .with(commit1, user)
- .and_return(true)
+ expect_next_instance_of(Commit) do |commit|
+ expect(commit).to receive(:reverts_commit?)
+ .with(commit1, user)
+ .and_return(true)
+ end
expect(commit1.has_been_reverted?(user, issue.notes_with_associations)).to eq(true)
end
diff --git a/spec/models/concerns/cache_markdown_field_spec.rb b/spec/models/concerns/cache_markdown_field_spec.rb
index 0e5fb2b5153..9a12c3d6965 100644
--- a/spec/models/concerns/cache_markdown_field_spec.rb
+++ b/spec/models/concerns/cache_markdown_field_spec.rb
@@ -198,6 +198,36 @@ describe CacheMarkdownField, :clean_gitlab_redis_cache do
end
end
end
+
+ describe '#updated_cached_html_for' do
+ let(:thing) { klass.new(description: markdown, description_html: html, cached_markdown_version: cache_version) }
+
+ context 'when the markdown cache is outdated' do
+ before do
+ thing.cached_markdown_version += 1
+ end
+
+ it 'calls #refresh_markdown_cache' do
+ expect(thing).to receive(:refresh_markdown_cache)
+
+ expect(thing.updated_cached_html_for(:description)).to eq(html)
+ end
+ end
+
+ context 'when the markdown field does not exist' do
+ it 'returns nil' do
+ expect(thing.updated_cached_html_for(:something)).to eq(nil)
+ end
+ end
+
+ context 'when the markdown cache is up to date' do
+ it 'does not call #refresh_markdown_cache' do
+ expect(thing).not_to receive(:refresh_markdown_cache)
+
+ expect(thing.updated_cached_html_for(:description)).to eq(html)
+ end
+ end
+ end
end
context 'for Active record classes' do
diff --git a/spec/models/concerns/deployment_platform_spec.rb b/spec/models/concerns/deployment_platform_spec.rb
index 2378f400540..e2fc8a5d127 100644
--- a/spec/models/concerns/deployment_platform_spec.rb
+++ b/spec/models/concerns/deployment_platform_spec.rb
@@ -5,7 +5,7 @@ require 'rails_helper'
describe DeploymentPlatform do
let(:project) { create(:project) }
- describe '#deployment_platform' do
+ shared_examples '#deployment_platform' do
subject { project.deployment_platform }
context 'with no Kubernetes configuration on CI/CD, no Kubernetes Service' do
@@ -84,4 +84,20 @@ describe DeploymentPlatform do
end
end
end
+
+ context 'legacy implementation' do
+ before do
+ stub_feature_flags(clusters_cte: false)
+ end
+
+ include_examples '#deployment_platform'
+ end
+
+ context 'CTE implementation' do
+ before do
+ stub_feature_flags(clusters_cte: true)
+ end
+
+ include_examples '#deployment_platform'
+ end
end
diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb
index 64f02978d79..68224a56515 100644
--- a/spec/models/concerns/issuable_spec.rb
+++ b/spec/models/concerns/issuable_spec.rb
@@ -223,6 +223,16 @@ describe Issuable do
expect(issuable_class.full_search(searchable_issue2.description.downcase)).to eq([searchable_issue2])
end
+ it 'returns issues with a fuzzy matching description for a query shorter than 3 chars if told to do so' do
+ search = searchable_issue2.description.downcase.scan(/\w+/).sample[-1]
+
+ expect(issuable_class.full_search(search, use_minimum_char_limit: false)).to include(searchable_issue2)
+ end
+
+ it 'returns issues with a fuzzy matching title for a query shorter than 3 chars if told to do so' do
+ expect(issuable_class.full_search('i', use_minimum_char_limit: false)).to include(searchable_issue)
+ end
+
context 'when matching columns is "title"' do
it 'returns issues with a matching title' do
expect(issuable_class.full_search(searchable_issue.title, matched_columns: 'title'))
diff --git a/spec/models/concerns/project_api_compatibility_spec.rb b/spec/models/concerns/project_api_compatibility_spec.rb
new file mode 100644
index 00000000000..8cecd4fe7bc
--- /dev/null
+++ b/spec/models/concerns/project_api_compatibility_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ProjectAPICompatibility do
+ let(:project) { create(:project) }
+
+ # git_strategy
+ it "converts build_git_strategy=fetch to build_allow_git_fetch=true" do
+ project.update!(build_git_strategy: 'fetch')
+ expect(project.build_allow_git_fetch).to eq(true)
+ end
+
+ it "converts build_git_strategy=clone to build_allow_git_fetch=false" do
+ project.update!(build_git_strategy: 'clone')
+ expect(project.build_allow_git_fetch).to eq(false)
+ end
+
+ # auto_devops_enabled
+ it "converts auto_devops_enabled=false to auto_devops_enabled?=false" do
+ expect(project.auto_devops_enabled?).to eq(true)
+ project.update!(auto_devops_enabled: false)
+ expect(project.auto_devops_enabled?).to eq(false)
+ end
+
+ it "converts auto_devops_enabled=true to auto_devops_enabled?=true" do
+ expect(project.auto_devops_enabled?).to eq(true)
+ project.update!(auto_devops_enabled: true)
+ expect(project.auto_devops_enabled?).to eq(true)
+ end
+
+ # auto_devops_deploy_strategy
+ it "converts auto_devops_deploy_strategy=timed_incremental to auto_devops.deploy_strategy=timed_incremental" do
+ expect(project.auto_devops).to be_nil
+ project.update!(auto_devops_deploy_strategy: 'timed_incremental')
+ expect(project.auto_devops.deploy_strategy).to eq('timed_incremental')
+ end
+end
diff --git a/spec/models/concerns/project_features_compatibility_spec.rb b/spec/models/concerns/project_features_compatibility_spec.rb
index 5aa43b58217..1fe176ab5af 100644
--- a/spec/models/concerns/project_features_compatibility_spec.rb
+++ b/spec/models/concerns/project_features_compatibility_spec.rb
@@ -4,7 +4,8 @@ require 'spec_helper'
describe ProjectFeaturesCompatibility do
let(:project) { create(:project) }
- let(:features) { %w(issues wiki builds merge_requests snippets) }
+ let(:features_except_repository) { %w(issues wiki builds merge_requests snippets) }
+ let(:features) { features_except_repository + ['repository'] }
# We had issues_enabled, snippets_enabled, builds_enabled, merge_requests_enabled and issues_enabled fields on projects table
# All those fields got moved to a new table called project_feature and are now integers instead of booleans
@@ -12,30 +13,37 @@ describe ProjectFeaturesCompatibility do
# So we can keep it compatible
it "converts fields from 'true' to ProjectFeature::ENABLED" do
- features.each do |feature|
+ features_except_repository.each do |feature|
project.update_attribute("#{feature}_enabled".to_sym, "true")
expect(project.project_feature.public_send("#{feature}_access_level")).to eq(ProjectFeature::ENABLED)
end
end
it "converts fields from 'false' to ProjectFeature::DISABLED" do
- features.each do |feature|
+ features_except_repository.each do |feature|
project.update_attribute("#{feature}_enabled".to_sym, "false")
expect(project.project_feature.public_send("#{feature}_access_level")).to eq(ProjectFeature::DISABLED)
end
end
it "converts fields from true to ProjectFeature::ENABLED" do
- features.each do |feature|
+ features_except_repository.each do |feature|
project.update_attribute("#{feature}_enabled".to_sym, true)
expect(project.project_feature.public_send("#{feature}_access_level")).to eq(ProjectFeature::ENABLED)
end
end
it "converts fields from false to ProjectFeature::DISABLED" do
- features.each do |feature|
+ features_except_repository.each do |feature|
project.update_attribute("#{feature}_enabled".to_sym, false)
expect(project.project_feature.public_send("#{feature}_access_level")).to eq(ProjectFeature::DISABLED)
end
end
+
+ it "accepts private as ProjectFeature::PRIVATE" do
+ features.each do |feature|
+ project.update!("#{feature}_access_level".to_sym => 'private')
+ expect(project.project_feature.public_send("#{feature}_access_level")).to eq(ProjectFeature::PRIVATE)
+ end
+ end
end
diff --git a/spec/models/concerns/reactive_caching_spec.rb b/spec/models/concerns/reactive_caching_spec.rb
index 7faa196623f..3d026932f59 100644
--- a/spec/models/concerns/reactive_caching_spec.rb
+++ b/spec/models/concerns/reactive_caching_spec.rb
@@ -47,30 +47,12 @@ describe ReactiveCaching, :use_clean_rails_memory_store_caching do
subject(:go!) { instance.result }
- context 'when cache is empty' do
- it { is_expected.to be_nil }
-
- it 'enqueues a background worker to bootstrap the cache' do
- expect(ReactiveCachingWorker).to receive(:perform_async).with(CacheTest, 666)
-
- go!
- end
-
- it 'updates the cache lifespan' do
- expect(reactive_cache_alive?(instance)).to be_falsy
-
- go!
-
- expect(reactive_cache_alive?(instance)).to be_truthy
- end
- end
-
- context 'when the cache is full' do
+ shared_examples 'a cacheable value' do |cached_value|
before do
- stub_reactive_cache(instance, 4)
+ stub_reactive_cache(instance, cached_value)
end
- it { is_expected.to eq(4) }
+ it { is_expected.to eq(cached_value) }
it 'does not enqueue a background worker' do
expect(ReactiveCachingWorker).not_to receive(:perform_async)
@@ -90,9 +72,7 @@ describe ReactiveCaching, :use_clean_rails_memory_store_caching do
end
it { is_expected.to be_nil }
- end
- context 'when cache was invalidated' do
it 'refreshes cache' do
expect(ReactiveCachingWorker).to receive(:perform_async).with(CacheTest, 666)
@@ -101,12 +81,34 @@ describe ReactiveCaching, :use_clean_rails_memory_store_caching do
end
end
- context 'when cache contains non-nil but blank value' do
- before do
- stub_reactive_cache(instance, false)
+ context 'when cache is empty' do
+ it { is_expected.to be_nil }
+
+ it 'enqueues a background worker to bootstrap the cache' do
+ expect(ReactiveCachingWorker).to receive(:perform_async).with(CacheTest, 666)
+
+ go!
end
- it { is_expected.to eq(false) }
+ it 'updates the cache lifespan' do
+ expect(reactive_cache_alive?(instance)).to be_falsy
+
+ go!
+
+ expect(reactive_cache_alive?(instance)).to be_truthy
+ end
+ end
+
+ context 'when the cache is full' do
+ it_behaves_like 'a cacheable value', 4
+ end
+
+ context 'when the cache contains non-nil but blank value' do
+ it_behaves_like 'a cacheable value', false
+ end
+
+ context 'when the cache contains nil value' do
+ it_behaves_like 'a cacheable value', nil
end
end
@@ -206,8 +208,9 @@ describe ReactiveCaching, :use_clean_rails_memory_store_caching do
expect(read_reactive_cache(instance)).to eq("preexisting")
end
- it 'enqueues a repeat worker' do
- expect_reactive_cache_update_queued(instance)
+ it 'does not enqueue a repeat worker' do
+ expect(ReactiveCachingWorker)
+ .not_to receive(:perform_in)
expect { go! }.to raise_error("foo")
end
diff --git a/spec/models/concerns/routable_spec.rb b/spec/models/concerns/routable_spec.rb
index 1fb0dd5030c..31163a5bb5c 100644
--- a/spec/models/concerns/routable_spec.rb
+++ b/spec/models/concerns/routable_spec.rb
@@ -15,23 +15,46 @@ describe Group, 'Routable' do
end
describe 'Callbacks' do
- it 'creates route record on create' do
- expect(group.route.path).to eq(group.path)
- expect(group.route.name).to eq(group.name)
- end
+ context 'for a group' do
+ it 'creates route record on create' do
+ expect(group.route.path).to eq(group.path)
+ expect(group.route.name).to eq(group.name)
+ end
+
+ it 'updates route record on path change' do
+ group.update(path: 'wow', name: 'much')
- it 'updates route record on path change' do
- group.update(path: 'wow', name: 'much')
+ expect(group.route.path).to eq('wow')
+ expect(group.route.name).to eq('much')
+ end
+
+ it 'ensure route path uniqueness across different objects' do
+ create(:group, parent: group, path: 'xyz')
+ duplicate = build(:project, namespace: group, path: 'xyz')
- expect(group.route.path).to eq('wow')
- expect(group.route.name).to eq('much')
+ expect { duplicate.save! }.to raise_error(ActiveRecord::RecordInvalid, 'Validation failed: Path has already been taken')
+ end
end
- it 'ensure route path uniqueness across different objects' do
- create(:group, parent: group, path: 'xyz')
- duplicate = build(:project, namespace: group, path: 'xyz')
+ context 'for a user' do
+ let(:user) { create(:user, username: 'jane', name: "Jane Doe") }
- expect { duplicate.save! }.to raise_error(ActiveRecord::RecordInvalid, 'Validation failed: Path has already been taken')
+ it 'creates the route for a record on create' do
+ expect(user.namespace.name).to eq('Jane Doe')
+ expect(user.namespace.path).to eq('jane')
+ end
+
+ it 'updates routes and nested routes on name change' do
+ project = create(:project, path: 'work-stuff', name: 'Work stuff', namespace: user.namespace)
+
+ user.update!(username: 'jaen', name: 'Jaen Did')
+ project.reload
+
+ expect(user.namespace.name).to eq('Jaen Did')
+ expect(user.namespace.path).to eq('jaen')
+ expect(project.full_name).to eq('Jaen Did / Work stuff')
+ expect(project.full_path).to eq('jaen/work-stuff')
+ end
end
end
diff --git a/spec/models/cycle_analytics/code_spec.rb b/spec/models/cycle_analytics/code_spec.rb
index b22a0340015..db6e70973ae 100644
--- a/spec/models/cycle_analytics/code_spec.rb
+++ b/spec/models/cycle_analytics/code_spec.rb
@@ -8,7 +8,8 @@ describe 'CycleAnalytics#code' do
let(:project) { create(:project, :repository) }
let(:from_date) { 10.days.ago }
let(:user) { create(:user, :admin) }
- subject { CycleAnalytics.new(project, from: from_date) }
+
+ subject { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date }) }
context 'with deployment' do
generate_cycle_analytics_spec(
diff --git a/spec/models/cycle_analytics/issue_spec.rb b/spec/models/cycle_analytics/issue_spec.rb
index 07d60be091a..4ccbdf29df6 100644
--- a/spec/models/cycle_analytics/issue_spec.rb
+++ b/spec/models/cycle_analytics/issue_spec.rb
@@ -8,7 +8,8 @@ describe 'CycleAnalytics#issue' do
let(:project) { create(:project, :repository) }
let(:from_date) { 10.days.ago }
let(:user) { create(:user, :admin) }
- subject { CycleAnalytics.new(project, from: from_date) }
+
+ subject { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date }) }
generate_cycle_analytics_spec(
phase: :issue,
@@ -23,7 +24,7 @@ describe 'CycleAnalytics#issue' do
["list label added to issue",
-> (context, data) do
if data[:issue].persisted?
- data[:issue].update(label_ids: [context.create(:label, lists: [context.create(:list)]).id])
+ data[:issue].update(label_ids: [context.create(:list).label_id])
end
end]],
post_fn: -> (context, data) do
diff --git a/spec/models/cycle_analytics/plan_spec.rb b/spec/models/cycle_analytics/plan_spec.rb
index 3d22a284264..c99c38e9cf3 100644
--- a/spec/models/cycle_analytics/plan_spec.rb
+++ b/spec/models/cycle_analytics/plan_spec.rb
@@ -8,7 +8,8 @@ describe 'CycleAnalytics#plan' do
let(:project) { create(:project, :repository) }
let(:from_date) { 10.days.ago }
let(:user) { create(:user, :admin) }
- subject { CycleAnalytics.new(project, from: from_date) }
+
+ subject { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date }) }
generate_cycle_analytics_spec(
phase: :plan,
@@ -24,7 +25,7 @@ describe 'CycleAnalytics#plan' do
end],
["list label added to issue",
-> (context, data) do
- data[:issue].update(label_ids: [context.create(:label, lists: [context.create(:list)]).id])
+ data[:issue].update(label_ids: [context.create(:list).label_id])
end]],
end_time_conditions: [["issue mentioned in a commit",
-> (context, data) do
diff --git a/spec/models/cycle_analytics/production_spec.rb b/spec/models/cycle_analytics/production_spec.rb
index 383727cd8f7..ddd199362d1 100644
--- a/spec/models/cycle_analytics/production_spec.rb
+++ b/spec/models/cycle_analytics/production_spec.rb
@@ -8,7 +8,8 @@ describe 'CycleAnalytics#production' do
let(:project) { create(:project, :repository) }
let(:from_date) { 10.days.ago }
let(:user) { create(:user, :admin) }
- subject { CycleAnalytics.new(project, from: from_date) }
+
+ subject { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date }) }
generate_cycle_analytics_spec(
phase: :production,
diff --git a/spec/models/cycle_analytics_spec.rb b/spec/models/cycle_analytics/project_level_spec.rb
index 5d8b5b573cf..77bd0bfeb9c 100644
--- a/spec/models/cycle_analytics_spec.rb
+++ b/spec/models/cycle_analytics/project_level_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe CycleAnalytics do
+describe CycleAnalytics::ProjectLevel do
let(:project) { create(:project, :repository) }
let(:from_date) { 10.days.ago }
let(:user) { create(:user, :admin) }
@@ -11,9 +11,9 @@ describe CycleAnalytics do
let(:mr) { create_merge_request_closing_issue(user, project, issue, commit_message: "References #{issue.to_reference}") }
let(:pipeline) { create(:ci_empty_pipeline, status: 'created', project: project, ref: mr.source_branch, sha: mr.source_branch_sha, head_pipeline_of: mr) }
- subject { described_class.new(project, from: from_date) }
+ subject { described_class.new(project, options: { from: from_date }) }
- describe '#all_medians_per_stage' do
+ describe '#all_medians_by_stage' do
before do
allow_any_instance_of(Gitlab::ReferenceExtractor).to receive(:issues).and_return([issue])
@@ -26,7 +26,7 @@ describe CycleAnalytics do
hsh[stage_name] = subject[stage_name].median.presence
end
- expect(subject.all_medians_per_stage).to eq(values)
+ expect(subject.all_medians_by_stage).to eq(values)
end
end
end
diff --git a/spec/models/cycle_analytics/review_spec.rb b/spec/models/cycle_analytics/review_spec.rb
index 1af5f9cc1f4..63c481ed465 100644
--- a/spec/models/cycle_analytics/review_spec.rb
+++ b/spec/models/cycle_analytics/review_spec.rb
@@ -8,7 +8,8 @@ describe 'CycleAnalytics#review' do
let(:project) { create(:project, :repository) }
let(:from_date) { 10.days.ago }
let(:user) { create(:user, :admin) }
- subject { CycleAnalytics.new(project, from: from_date) }
+
+ subject { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date }) }
generate_cycle_analytics_spec(
phase: :review,
diff --git a/spec/models/cycle_analytics/staging_spec.rb b/spec/models/cycle_analytics/staging_spec.rb
index 8375944f03c..c134b97553f 100644
--- a/spec/models/cycle_analytics/staging_spec.rb
+++ b/spec/models/cycle_analytics/staging_spec.rb
@@ -9,7 +9,7 @@ describe 'CycleAnalytics#staging' do
let(:from_date) { 10.days.ago }
let(:user) { create(:user, :admin) }
- subject { CycleAnalytics.new(project, from: from_date) }
+ subject { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date }) }
generate_cycle_analytics_spec(
phase: :staging,
diff --git a/spec/models/cycle_analytics/test_spec.rb b/spec/models/cycle_analytics/test_spec.rb
index b78258df564..a6ea73b2699 100644
--- a/spec/models/cycle_analytics/test_spec.rb
+++ b/spec/models/cycle_analytics/test_spec.rb
@@ -8,7 +8,8 @@ describe 'CycleAnalytics#test' do
let(:project) { create(:project, :repository) }
let(:from_date) { 10.days.ago }
let(:user) { create(:user, :admin) }
- subject { CycleAnalytics.new(project, from: from_date) }
+
+ subject { CycleAnalytics::ProjectLevel.new(project, options: { from: from_date }) }
generate_cycle_analytics_spec(
phase: :test,
diff --git a/spec/models/deployment_metrics_spec.rb b/spec/models/deployment_metrics_spec.rb
new file mode 100644
index 00000000000..0aadb1f3a5e
--- /dev/null
+++ b/spec/models/deployment_metrics_spec.rb
@@ -0,0 +1,126 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe DeploymentMetrics do
+ describe '#has_metrics?' do
+ subject { described_class.new(deployment.project, deployment).has_metrics? }
+
+ context 'when deployment is failed' do
+ let(:deployment) { create(:deployment, :failed) }
+
+ it { is_expected.to be_falsy }
+ end
+
+ context 'when deployment is success' do
+ let(:deployment) { create(:deployment, :success) }
+
+ context 'without a monitoring service' do
+ it { is_expected.to be_falsy }
+ end
+
+ context 'with a Prometheus Service' do
+ let(:prometheus_service) { instance_double(PrometheusService, can_query?: true) }
+
+ before do
+ allow(deployment.project).to receive(:find_or_initialize_service).with('prometheus').and_return prometheus_service
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'with a Prometheus Service that cannot query' do
+ let(:prometheus_service) { instance_double(PrometheusService, can_query?: false) }
+
+ before do
+ allow(deployment.project).to receive(:find_or_initialize_service).with('prometheus').and_return prometheus_service
+ end
+
+ it { is_expected.to be_falsy }
+ end
+
+ context 'with a cluster Prometheus' do
+ let(:deployment) { create(:deployment, :success, :on_cluster) }
+ let!(:prometheus) { create(:clusters_applications_prometheus, :installed, cluster: deployment.cluster) }
+
+ before do
+ expect(deployment.cluster.application_prometheus).to receive(:can_query?).and_return(true)
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'fallback deployment platform' do
+ let(:cluster) { create(:cluster, :provided_by_user, environment_scope: '*', projects: [deployment.project]) }
+ let!(:prometheus) { create(:clusters_applications_prometheus, :installed, cluster: cluster) }
+
+ before do
+ expect(deployment.project).to receive(:deployment_platform).and_return(cluster.platform)
+ expect(cluster.application_prometheus).to receive(:can_query?).and_return(true)
+ end
+
+ it { is_expected.to be_truthy }
+ end
+ end
+ end
+
+ describe '#metrics' do
+ let(:deployment) { create(:deployment, :success) }
+ let(:prometheus_adapter) { instance_double(PrometheusService, can_query?: true) }
+ let(:deployment_metrics) { described_class.new(deployment.project, deployment) }
+
+ subject { deployment_metrics.metrics }
+
+ context 'metrics are disabled' do
+ it { is_expected.to eq({}) }
+ end
+
+ context 'metrics are enabled' do
+ let(:simple_metrics) do
+ {
+ success: true,
+ metrics: {},
+ last_update: 42
+ }
+ end
+
+ before do
+ allow(deployment_metrics).to receive(:prometheus_adapter).and_return(prometheus_adapter)
+ expect(prometheus_adapter).to receive(:query).with(:deployment, deployment).and_return(simple_metrics)
+ end
+
+ it { is_expected.to eq(simple_metrics.merge({ deployment_time: deployment.created_at.to_i })) }
+ end
+ end
+
+ describe '#additional_metrics' do
+ let(:project) { create(:project, :repository) }
+ let(:deployment) { create(:deployment, :succeed, project: project) }
+ let(:deployment_metrics) { described_class.new(deployment.project, deployment) }
+
+ subject { deployment_metrics.additional_metrics }
+
+ context 'metrics are disabled' do
+ it { is_expected.to eq({}) }
+ end
+
+ context 'metrics are enabled' do
+ let(:simple_metrics) do
+ {
+ success: true,
+ metrics: {},
+ last_update: 42
+ }
+ end
+
+ let(:prometheus_adapter) { instance_double('prometheus_adapter', can_query?: true) }
+
+ before do
+ allow(deployment_metrics).to receive(:prometheus_adapter).and_return(prometheus_adapter)
+ expect(prometheus_adapter).to receive(:query).with(:additional_metrics_deployment, deployment).and_return(simple_metrics)
+ end
+
+ it { is_expected.to eq(simple_metrics.merge({ deployment_time: deployment.created_at.to_i })) }
+ end
+ end
+end
diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb
index 713fb647708..d4e631f109b 100644
--- a/spec/models/deployment_spec.rb
+++ b/spec/models/deployment_spec.rb
@@ -295,125 +295,6 @@ describe Deployment do
end
end
- describe '#has_metrics?' do
- subject { deployment.has_metrics? }
-
- context 'when deployment is failed' do
- let(:deployment) { create(:deployment, :failed) }
-
- it { is_expected.to be_falsy }
- end
-
- context 'when deployment is success' do
- let(:deployment) { create(:deployment, :success) }
-
- context 'without a monitoring service' do
- it { is_expected.to be_falsy }
- end
-
- context 'with a Prometheus Service' do
- let(:prometheus_service) { double(:prometheus_service, can_query?: true) }
-
- before do
- allow(deployment.project).to receive(:find_or_initialize_service).with('prometheus').and_return prometheus_service
- end
-
- it { is_expected.to be_truthy }
- end
-
- context 'with a Prometheus Service that cannot query' do
- let(:prometheus_service) { double(:prometheus_service, can_query?: false) }
-
- before do
- allow(deployment.project).to receive(:find_or_initialize_service).with('prometheus').and_return prometheus_service
- end
-
- it { is_expected.to be_falsy }
- end
-
- context 'with a cluster Prometheus' do
- let(:deployment) { create(:deployment, :success, :on_cluster) }
- let!(:prometheus) { create(:clusters_applications_prometheus, :installed, cluster: deployment.cluster) }
-
- before do
- expect(deployment.cluster.application_prometheus).to receive(:can_query?).and_return(true)
- end
-
- it { is_expected.to be_truthy }
- end
-
- context 'fallback deployment platform' do
- let(:cluster) { create(:cluster, :provided_by_user, environment_scope: '*', projects: [deployment.project]) }
- let!(:prometheus) { create(:clusters_applications_prometheus, :installed, cluster: cluster) }
-
- before do
- expect(deployment.project).to receive(:deployment_platform).and_return(cluster.platform)
- expect(cluster.application_prometheus).to receive(:can_query?).and_return(true)
- end
-
- it { is_expected.to be_truthy }
- end
- end
- end
-
- describe '#metrics' do
- let(:deployment) { create(:deployment, :success) }
- let(:prometheus_adapter) { double('prometheus_adapter', can_query?: true) }
-
- subject { deployment.metrics }
-
- context 'metrics are disabled' do
- it { is_expected.to eq({}) }
- end
-
- context 'metrics are enabled' do
- let(:simple_metrics) do
- {
- success: true,
- metrics: {},
- last_update: 42
- }
- end
-
- before do
- allow(deployment).to receive(:prometheus_adapter).and_return(prometheus_adapter)
- allow(prometheus_adapter).to receive(:query).with(:deployment, deployment).and_return(simple_metrics)
- end
-
- it { is_expected.to eq(simple_metrics.merge({ deployment_time: deployment.created_at.to_i })) }
- end
- end
-
- describe '#additional_metrics' do
- let(:project) { create(:project, :repository) }
- let(:deployment) { create(:deployment, :succeed, project: project) }
-
- subject { deployment.additional_metrics }
-
- context 'metrics are disabled' do
- it { is_expected.to eq({}) }
- end
-
- context 'metrics are enabled' do
- let(:simple_metrics) do
- {
- success: true,
- metrics: {},
- last_update: 42
- }
- end
-
- let(:prometheus_adapter) { double('prometheus_adapter', can_query?: true) }
-
- before do
- allow(deployment).to receive(:prometheus_adapter).and_return(prometheus_adapter)
- allow(prometheus_adapter).to receive(:query).with(:additional_metrics_deployment, deployment).and_return(simple_metrics)
- end
-
- it { is_expected.to eq(simple_metrics.merge({ deployment_time: deployment.created_at.to_i })) }
- end
- end
-
describe '#stop_action' do
let(:build) { create(:ci_build) }
@@ -441,32 +322,4 @@ describe Deployment do
end
end
end
-
- describe '#deployment_platform_cluster' do
- let(:deployment) { create(:deployment) }
- let(:project) { deployment.project }
- let(:environment) { deployment.environment }
-
- subject { deployment.deployment_platform_cluster }
-
- before do
- expect(project).to receive(:deployment_platform)
- .with(environment: environment.name).and_call_original
- end
-
- context 'project has no deployment platform' do
- before do
- expect(project.clusters).to be_empty
- end
-
- it { is_expected.to be_nil }
- end
-
- context 'project has a deployment platform' do
- let!(:cluster) { create(:cluster, projects: [project]) }
- let!(:platform) { create(:cluster_platform_kubernetes, cluster: cluster) }
-
- it { is_expected.to eq cluster }
- end
- end
end
diff --git a/spec/models/discussion_spec.rb b/spec/models/discussion_spec.rb
index 22d4dab0617..950bdec4d00 100644
--- a/spec/models/discussion_spec.rb
+++ b/spec/models/discussion_spec.rb
@@ -10,6 +10,20 @@ describe Discussion do
let(:second_note) { create(:diff_note_on_merge_request, in_reply_to: first_note) }
let(:third_note) { create(:diff_note_on_merge_request) }
+ describe '.lazy_find' do
+ let!(:note1) { create(:discussion_note_on_merge_request).to_discussion }
+ let!(:note2) { create(:discussion_note_on_merge_request, in_reply_to: note1).to_discussion }
+
+ subject { [note1, note2].map { |note| described_class.lazy_find(note.discussion_id) } }
+
+ it 'batches requests' do
+ expect do
+ [described_class.lazy_find(note1.id),
+ described_class.lazy_find(note2.id)].map(&:__sync)
+ end.not_to exceed_query_limit(1)
+ end
+ end
+
describe '.build' do
it 'returns a discussion of the right type' do
discussion = described_class.build([first_note, second_note], merge_request)
diff --git a/spec/models/environment_status_spec.rb b/spec/models/environment_status_spec.rb
index c503c35305f..e2836420df9 100644
--- a/spec/models/environment_status_spec.rb
+++ b/spec/models/environment_status_spec.rb
@@ -11,11 +11,10 @@ describe EnvironmentStatus do
let(:merge_request) { create(:merge_request, :deployed_review_app, deployment: deployment) }
let(:sha) { deployment.sha }
- subject(:environment_status) { described_class.new(environment, merge_request, sha) }
+ subject(:environment_status) { described_class.new(project, environment, merge_request, sha) }
it { is_expected.to delegate_method(:id).to(:environment) }
it { is_expected.to delegate_method(:name).to(:environment) }
- it { is_expected.to delegate_method(:project).to(:environment) }
it { is_expected.to delegate_method(:deployed_at).to(:deployment) }
it { is_expected.to delegate_method(:status).to(:deployment) }
diff --git a/spec/models/issue/metrics_spec.rb b/spec/models/issue/metrics_spec.rb
index 07858fe8a70..7aa0d97b194 100644
--- a/spec/models/issue/metrics_spec.rb
+++ b/spec/models/issue/metrics_spec.rb
@@ -32,7 +32,7 @@ describe Issue::Metrics do
context "list labels" do
it "records the first time an issue is associated with a list label" do
- list_label = create(:label, lists: [create(:list)])
+ list_label = create(:list).label
time = Time.now
Timecop.freeze(time) { subject.update(label_ids: [list_label.id]) }
metrics = subject.metrics
@@ -43,9 +43,9 @@ describe Issue::Metrics do
it "does not record the second time an issue is associated with a list label" do
time = Time.now
- first_list_label = create(:label, lists: [create(:list)])
+ first_list_label = create(:list).label
Timecop.freeze(time) { subject.update(label_ids: [first_list_label.id]) }
- second_list_label = create(:label, lists: [create(:list)])
+ second_list_label = create(:list).label
Timecop.freeze(time + 5.hours) { subject.update(label_ids: [second_list_label.id]) }
metrics = subject.metrics
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index fe6d68aff3f..9b0c232f370 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -3220,4 +3220,34 @@ describe MergeRequest do
it { is_expected.to be_truthy }
end
end
+
+ describe '#cleanup_refs' do
+ subject { merge_request.cleanup_refs(only: only) }
+
+ let(:merge_request) { build(:merge_request) }
+
+ context 'when removing all refs' do
+ let(:only) { :all }
+
+ it 'deletes all refs from the target project' do
+ expect(merge_request.target_project.repository)
+ .to receive(:delete_refs)
+ .with(merge_request.ref_path, merge_request.merge_ref_path, merge_request.train_ref_path)
+
+ subject
+ end
+ end
+
+ context 'when removing only train ref' do
+ let(:only) { :train }
+
+ it 'deletes train ref from the target project' do
+ expect(merge_request.target_project.repository)
+ .to receive(:delete_refs)
+ .with(merge_request.train_ref_path)
+
+ subject
+ end
+ end
+ end
end
diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb
index 7a1ab20186a..03003e3dd7d 100644
--- a/spec/models/note_spec.rb
+++ b/spec/models/note_spec.rb
@@ -177,6 +177,7 @@ describe Note do
pipeline: :note,
cache_key: [note1, "note"],
project: note1.project,
+ rendered: note1.note_html,
author: note1.author
}
}]).and_call_original
@@ -189,6 +190,7 @@ describe Note do
pipeline: :note,
cache_key: [note2, "note"],
project: note2.project,
+ rendered: note2.note_html,
author: note2.author
}
}]).and_call_original
diff --git a/spec/models/pages_domain_spec.rb b/spec/models/pages_domain_spec.rb
index 661957cf08b..973c67937b7 100644
--- a/spec/models/pages_domain_spec.rb
+++ b/spec/models/pages_domain_spec.rb
@@ -53,24 +53,33 @@ describe PagesDomain do
end
let(:pages_domain) do
- build(:pages_domain, certificate: certificate, key: key).tap do |pd|
+ build(:pages_domain, certificate: certificate, key: key,
+ auto_ssl_enabled: auto_ssl_enabled).tap do |pd|
allow(pd).to receive(:project).and_return(project)
pd.valid?
end
end
- where(:pages_https_only, :certificate, :key, :errors_on) do
+ where(:pages_https_only, :certificate, :key, :auto_ssl_enabled, :errors_on) do
attributes = attributes_for(:pages_domain)
cert, key = attributes.fetch_values(:certificate, :key)
- true | nil | nil | %i(certificate key)
- true | cert | nil | %i(key)
- true | nil | key | %i(certificate key)
- true | cert | key | []
- false | nil | nil | []
- false | cert | nil | %i(key)
- false | nil | key | %i(key)
- false | cert | key | []
+ true | nil | nil | false | %i(certificate key)
+ true | nil | nil | true | []
+ true | cert | nil | false | %i(key)
+ true | cert | nil | true | %i(key)
+ true | nil | key | false | %i(certificate key)
+ true | nil | key | true | %i(key)
+ true | cert | key | false | []
+ true | cert | key | true | []
+ false | nil | nil | false | []
+ false | nil | nil | true | []
+ false | cert | nil | false | %i(key)
+ false | cert | nil | true | %i(key)
+ false | nil | key | false | %i(key)
+ false | nil | key | true | %i(key)
+ false | cert | key | false | []
+ false | cert | key | true | []
end
with_them do
diff --git a/spec/models/project_services/bugzilla_service_spec.rb b/spec/models/project_services/bugzilla_service_spec.rb
index d5b0f94f461..74c85a13c88 100644
--- a/spec/models/project_services/bugzilla_service_spec.rb
+++ b/spec/models/project_services/bugzilla_service_spec.rb
@@ -44,7 +44,9 @@ describe BugzillaService do
# this will be removed as part of https://gitlab.com/gitlab-org/gitlab-ce/issues/63084
context 'when data are stored in properties' do
let(:properties) { access_params.merge(title: title, description: description) }
- let(:service) { create(:bugzilla_service, properties: properties) }
+ let(:service) do
+ create(:bugzilla_service, :without_properties_callback, properties: properties)
+ end
include_examples 'issue tracker fields'
end
@@ -60,7 +62,7 @@ describe BugzillaService do
context 'when data are stored in both properties and separated fields' do
let(:properties) { access_params.merge(title: 'wrong title', description: 'wrong description') }
let(:service) do
- create(:bugzilla_service, title: title, description: description, properties: properties)
+ create(:bugzilla_service, :without_properties_callback, title: title, description: description, properties: properties)
end
include_examples 'issue tracker fields'
diff --git a/spec/models/project_services/custom_issue_tracker_service_spec.rb b/spec/models/project_services/custom_issue_tracker_service_spec.rb
index 56b0bda6626..5259357a254 100644
--- a/spec/models/project_services/custom_issue_tracker_service_spec.rb
+++ b/spec/models/project_services/custom_issue_tracker_service_spec.rb
@@ -58,7 +58,9 @@ describe CustomIssueTrackerService do
# this will be removed as part of https://gitlab.com/gitlab-org/gitlab-ce/issues/63084
context 'when data are stored in properties' do
let(:properties) { access_params.merge(title: title, description: description) }
- let(:service) { create(:custom_issue_tracker_service, properties: properties) }
+ let(:service) do
+ create(:custom_issue_tracker_service, :without_properties_callback, properties: properties)
+ end
include_examples 'issue tracker fields'
end
@@ -74,7 +76,7 @@ describe CustomIssueTrackerService do
context 'when data are stored in both properties and separated fields' do
let(:properties) { access_params.merge(title: 'wrong title', description: 'wrong description') }
let(:service) do
- create(:custom_issue_tracker_service, title: title, description: description, properties: properties)
+ create(:custom_issue_tracker_service, :without_properties_callback, title: title, description: description, properties: properties)
end
include_examples 'issue tracker fields'
diff --git a/spec/models/project_services/drone_ci_service_spec.rb b/spec/models/project_services/drone_ci_service_spec.rb
index 22df19d943f..a771d1bf27f 100644
--- a/spec/models/project_services/drone_ci_service_spec.rb
+++ b/spec/models/project_services/drone_ci_service_spec.rb
@@ -101,6 +101,15 @@ describe DroneCiService, :use_clean_rails_memory_store_caching do
is_expected.to eq(:error)
end
+ Gitlab::HTTP::HTTP_ERRORS.each do |http_error|
+ it "sets commit status to :error with a #{http_error.name} error" do
+ WebMock.stub_request(:get, commit_status_path)
+ .to_raise(http_error)
+
+ is_expected.to eq(:error)
+ end
+ end
+
{
"killed" => :canceled,
"failure" => :failed,
diff --git a/spec/models/project_services/gitlab_issue_tracker_service_spec.rb b/spec/models/project_services/gitlab_issue_tracker_service_spec.rb
index a3726f09dc5..0c4fc290a13 100644
--- a/spec/models/project_services/gitlab_issue_tracker_service_spec.rb
+++ b/spec/models/project_services/gitlab_issue_tracker_service_spec.rb
@@ -61,7 +61,9 @@ describe GitlabIssueTrackerService do
# this will be removed as part of https://gitlab.com/gitlab-org/gitlab-ce/issues/63084
context 'when data are stored in properties' do
let(:properties) { access_params.merge(title: title, description: description) }
- let(:service) { create(:gitlab_issue_tracker_service, properties: properties) }
+ let(:service) do
+ create(:gitlab_issue_tracker_service, :without_properties_callback, properties: properties)
+ end
include_examples 'issue tracker fields'
end
@@ -77,7 +79,7 @@ describe GitlabIssueTrackerService do
context 'when data are stored in both properties and separated fields' do
let(:properties) { access_params.merge(title: 'wrong title', description: 'wrong description') }
let(:service) do
- create(:gitlab_issue_tracker_service, title: title, description: description, properties: properties)
+ create(:gitlab_issue_tracker_service, :without_properties_callback, title: title, description: description, properties: properties)
end
include_examples 'issue tracker fields'
diff --git a/spec/models/project_services/jira_service_spec.rb b/spec/models/project_services/jira_service_spec.rb
index 9b122d85293..235cf314af5 100644
--- a/spec/models/project_services/jira_service_spec.rb
+++ b/spec/models/project_services/jira_service_spec.rb
@@ -145,7 +145,9 @@ describe JiraService do
# this will be removed as part of https://gitlab.com/gitlab-org/gitlab-ce/issues/63084
context 'when data are stored in properties' do
let(:properties) { access_params.merge(title: title, description: description) }
- let(:service) { create(:jira_service, properties: properties) }
+ let(:service) do
+ create(:jira_service, :without_properties_callback, properties: properties)
+ end
include_examples 'issue tracker fields'
end
@@ -161,7 +163,7 @@ describe JiraService do
context 'when data are stored in both properties and separated fields' do
let(:properties) { access_params.merge(title: 'wrong title', description: 'wrong description') }
let(:service) do
- create(:jira_service, title: title, description: description, properties: properties)
+ create(:jira_service, :without_properties_callback, title: title, description: description, properties: properties)
end
include_examples 'issue tracker fields'
diff --git a/spec/models/project_services/redmine_service_spec.rb b/spec/models/project_services/redmine_service_spec.rb
index 806e3695962..c1ee6546b12 100644
--- a/spec/models/project_services/redmine_service_spec.rb
+++ b/spec/models/project_services/redmine_service_spec.rb
@@ -50,7 +50,9 @@ describe RedmineService do
# this will be removed as part of https://gitlab.com/gitlab-org/gitlab-ce/issues/63084
context 'when data are stored in properties' do
let(:properties) { access_params.merge(title: title, description: description) }
- let(:service) { create(:redmine_service, properties: properties) }
+ let(:service) do
+ create(:redmine_service, :without_properties_callback, properties: properties)
+ end
include_examples 'issue tracker fields'
end
@@ -66,7 +68,7 @@ describe RedmineService do
context 'when data are stored in both properties and separated fields' do
let(:properties) { access_params.merge(title: 'wrong title', description: 'wrong description') }
let(:service) do
- create(:redmine_service, title: title, description: description, properties: properties)
+ create(:redmine_service, :without_properties_callback, title: title, description: description, properties: properties)
end
include_examples 'issue tracker fields'
diff --git a/spec/models/project_services/youtrack_service_spec.rb b/spec/models/project_services/youtrack_service_spec.rb
index b47ef6702b4..c48bf487af0 100644
--- a/spec/models/project_services/youtrack_service_spec.rb
+++ b/spec/models/project_services/youtrack_service_spec.rb
@@ -47,7 +47,9 @@ describe YoutrackService do
# this will be removed as part of https://gitlab.com/gitlab-org/gitlab-ce/issues/63084
context 'when data are stored in properties' do
let(:properties) { access_params.merge(title: title, description: description) }
- let(:service) { create(:youtrack_service, properties: properties) }
+ let(:service) do
+ create(:youtrack_service, :without_properties_callback, properties: properties)
+ end
include_examples 'issue tracker fields'
end
@@ -63,7 +65,7 @@ describe YoutrackService do
context 'when data are stored in both properties and separated fields' do
let(:properties) { access_params.merge(title: 'wrong title', description: 'wrong description') }
let(:service) do
- create(:youtrack_service, title: title, description: description, properties: properties)
+ create(:youtrack_service, :without_properties_callback, title: title, description: description, properties: properties)
end
include_examples 'issue tracker fields'
diff --git a/spec/models/project_statistics_spec.rb b/spec/models/project_statistics_spec.rb
index 1cb49d83ffa..db3e4902c64 100644
--- a/spec/models/project_statistics_spec.rb
+++ b/spec/models/project_statistics_spec.rb
@@ -135,6 +135,49 @@ describe ProjectStatistics do
expect(statistics.wiki_size).to eq(0)
end
end
+
+ context 'when the column is namespace relatable' do
+ let(:namespace) { create(:group) }
+ let(:project) { create(:project, namespace: namespace) }
+
+ context 'when the feature flag is off' do
+ it 'does not schedule the aggregation worker' do
+ stub_feature_flags(update_statistics_namespace: false, namespace: namespace)
+
+ expect(Namespaces::ScheduleAggregationWorker)
+ .not_to receive(:perform_async)
+
+ statistics.refresh!(only: [:lfs_objects_size])
+ end
+ end
+
+ context 'when the feature flag is on' do
+ it 'schedules the aggregation worker' do
+ expect(Namespaces::ScheduleAggregationWorker)
+ .to receive(:perform_async)
+
+ statistics.refresh!(only: [:lfs_objects_size])
+ end
+ end
+
+ context 'when no argument is passed' do
+ it 'schedules the aggregation worker' do
+ expect(Namespaces::ScheduleAggregationWorker)
+ .to receive(:perform_async)
+
+ statistics.refresh!
+ end
+ end
+ end
+
+ context 'when the column is not namespace relatable' do
+ it 'does not schedules an aggregation worker' do
+ expect(Namespaces::ScheduleAggregationWorker)
+ .not_to receive(:perform_async)
+
+ statistics.refresh!(only: [:commit_count])
+ end
+ end
end
describe '#update_commit_count' do
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index 3d967aa4ab8..12dff440ce2 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -2426,16 +2426,15 @@ describe Repository do
# Gets the commit oid, and warms the cache
oid = project.commit.id
- expect(Gitlab::Git::Commit).not_to receive(:find).once
+ expect(Gitlab::Git::Commit).to receive(:find).once
- project.commit_by(oid: oid)
+ 2.times { project.commit_by(oid: oid) }
end
it 'caches nil values' do
expect(Gitlab::Git::Commit).to receive(:find).once
- project.commit_by(oid: '1' * 40)
- project.commit_by(oid: '1' * 40)
+ 2.times { project.commit_by(oid: '1' * 40) }
end
end
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index a4d177da0be..e4f84172215 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -2931,7 +2931,7 @@ describe User do
let(:user) { create(:user, username: username) }
context 'when the user is updated' do
- context 'when the username is changed' do
+ context 'when the username or name is changed' do
let(:new_username) { 'bar' }
it 'changes the namespace (just to compare to when username is not changed)' do
@@ -2942,16 +2942,24 @@ describe User do
end.to change { user.namespace.updated_at }
end
- it 'updates the namespace name' do
+ it 'updates the namespace path when the username was changed' do
user.update!(username: new_username)
- expect(user.namespace.name).to eq(new_username)
+ expect(user.namespace.path).to eq(new_username)
end
- it 'updates the namespace path' do
- user.update!(username: new_username)
+ it 'updates the namespace name if the name was changed' do
+ user.update!(name: 'New name')
- expect(user.namespace.path).to eq(new_username)
+ expect(user.namespace.name).to eq('New name')
+ end
+
+ it 'updates nested routes for the namespace if the name was changed' do
+ project = create(:project, namespace: user.namespace)
+
+ user.update!(name: 'New name')
+
+ expect(project.route.reload.name).to include('New name')
end
context 'when there is a validation error (namespace name taken) while updating namespace' do
diff --git a/spec/requests/api/graphql/mutations/notes/create/diff_note_spec.rb b/spec/requests/api/graphql/mutations/notes/create/diff_note_spec.rb
new file mode 100644
index 00000000000..b04fcb9aece
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/notes/create/diff_note_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Adding a DiffNote' do
+ include GraphqlHelpers
+
+ set(:current_user) { create(:user) }
+ let(:noteable) { create(:merge_request, source_project: project, target_project: project) }
+ let(:project) { create(:project, :repository) }
+ let(:diff_refs) { noteable.diff_refs }
+ let(:mutation) do
+ variables = {
+ noteable_id: GitlabSchema.id_from_object(noteable).to_s,
+ body: 'Body text',
+ position: {
+ paths: {
+ old_path: 'files/ruby/popen.rb',
+ new_path: 'files/ruby/popen2.rb'
+ },
+ new_line: 14,
+ base_sha: diff_refs.base_sha,
+ head_sha: diff_refs.head_sha,
+ start_sha: diff_refs.start_sha
+ }
+ }
+
+ graphql_mutation(:create_diff_note, variables)
+ end
+
+ def mutation_response
+ graphql_mutation_response(:create_diff_note)
+ end
+
+ it_behaves_like 'a Note mutation when the user does not have permission'
+
+ context 'when the user has permission' do
+ before do
+ project.add_developer(current_user)
+ end
+
+ it_behaves_like 'a Note mutation that creates a Note'
+
+ it_behaves_like 'a Note mutation when there are active record validation errors', model: DiffNote
+
+ context do
+ let(:diff_refs) { build(:merge_request).diff_refs } # Allow fake diff refs so arguments are valid
+
+ it_behaves_like 'a Note mutation when the given resource id is not for a Noteable'
+ end
+
+ it 'returns the note with the correct position' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(mutation_response['note']['body']).to eq('Body text')
+ mutation_position_response = mutation_response['note']['position']
+ expect(mutation_position_response['positionType']).to eq('text')
+ expect(mutation_position_response['filePath']).to eq('files/ruby/popen2.rb')
+ expect(mutation_position_response['oldPath']).to eq('files/ruby/popen.rb')
+ expect(mutation_position_response['newPath']).to eq('files/ruby/popen2.rb')
+ expect(mutation_position_response['newLine']).to eq(14)
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/notes/create/image_diff_note_spec.rb b/spec/requests/api/graphql/mutations/notes/create/image_diff_note_spec.rb
new file mode 100644
index 00000000000..3ba6c689024
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/notes/create/image_diff_note_spec.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Adding an image DiffNote' do
+ include GraphqlHelpers
+
+ set(:current_user) { create(:user) }
+ let(:noteable) { create(:merge_request, source_project: project, target_project: project) }
+ let(:project) { create(:project, :repository) }
+ let(:diff_refs) { noteable.diff_refs }
+ let(:mutation) do
+ variables = {
+ noteable_id: GitlabSchema.id_from_object(noteable).to_s,
+ body: 'Body text',
+ position: {
+ paths: {
+ old_path: 'files/images/any_image.png',
+ new_path: 'files/images/any_image2.png'
+ },
+ width: 100,
+ height: 200,
+ x: 1,
+ y: 2,
+ base_sha: diff_refs.base_sha,
+ head_sha: diff_refs.head_sha,
+ start_sha: diff_refs.start_sha
+ }
+ }
+
+ graphql_mutation(:create_image_diff_note, variables)
+ end
+
+ def mutation_response
+ graphql_mutation_response(:create_image_diff_note)
+ end
+
+ it_behaves_like 'a Note mutation when the user does not have permission'
+
+ context 'when the user has permission' do
+ before do
+ project.add_developer(current_user)
+ end
+
+ it_behaves_like 'a Note mutation that creates a Note'
+
+ it_behaves_like 'a Note mutation when there are active record validation errors', model: DiffNote
+
+ context do
+ let(:diff_refs) { build(:merge_request).diff_refs } # Allow fake diff refs so arguments are valid
+
+ it_behaves_like 'a Note mutation when the given resource id is not for a Noteable'
+ end
+
+ it 'returns the note with the correct position' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(mutation_response['note']['body']).to eq('Body text')
+ mutation_position_response = mutation_response['note']['position']
+ expect(mutation_position_response['filePath']).to eq('files/images/any_image2.png')
+ expect(mutation_position_response['oldPath']).to eq('files/images/any_image.png')
+ expect(mutation_position_response['newPath']).to eq('files/images/any_image2.png')
+ expect(mutation_position_response['positionType']).to eq('image')
+ expect(mutation_position_response['width']).to eq(100)
+ expect(mutation_position_response['height']).to eq(200)
+ expect(mutation_position_response['x']).to eq(1)
+ expect(mutation_position_response['y']).to eq(2)
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/notes/create/note_spec.rb b/spec/requests/api/graphql/mutations/notes/create/note_spec.rb
new file mode 100644
index 00000000000..14aaa430ac9
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/notes/create/note_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Adding a Note' do
+ include GraphqlHelpers
+
+ set(:current_user) { create(:user) }
+ let(:noteable) { create(:merge_request, source_project: project, target_project: project) }
+ let(:project) { create(:project) }
+ let(:discussion) { nil }
+ let(:mutation) do
+ variables = {
+ noteable_id: GitlabSchema.id_from_object(noteable).to_s,
+ discussion_id: (GitlabSchema.id_from_object(discussion).to_s if discussion),
+ body: 'Body text'
+ }
+
+ graphql_mutation(:create_note, variables)
+ end
+
+ def mutation_response
+ graphql_mutation_response(:create_note)
+ end
+
+ it_behaves_like 'a Note mutation when the user does not have permission'
+
+ context 'when the user has permission' do
+ before do
+ project.add_developer(current_user)
+ end
+
+ it_behaves_like 'a Note mutation that creates a Note'
+
+ it_behaves_like 'a Note mutation when there are active record validation errors'
+
+ it_behaves_like 'a Note mutation when the given resource id is not for a Noteable'
+
+ it 'returns the note' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(mutation_response['note']['body']).to eq('Body text')
+ end
+
+ describe 'creating Notes in reply to a discussion' do
+ context 'when the user does not have permission to create notes on the discussion' do
+ let(:discussion) { create(:discussion_note).to_discussion }
+
+ it_behaves_like 'a mutation that returns top-level errors',
+ errors: ["The discussion does not exist or you don't have permission to perform this action"]
+ end
+
+ context 'when the user has permission to create notes on the discussion' do
+ let(:discussion) { create(:discussion_note, project: project).to_discussion }
+
+ it 'creates a Note in a discussion' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(mutation_response['note']['discussion']['id']).to eq(discussion.to_global_id.to_s)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/notes/destroy_spec.rb b/spec/requests/api/graphql/mutations/notes/destroy_spec.rb
new file mode 100644
index 00000000000..337a6e6f6e6
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/notes/destroy_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Destroying a Note' do
+ include GraphqlHelpers
+
+ let!(:note) { create(:note) }
+ let(:mutation) do
+ variables = {
+ id: GitlabSchema.id_from_object(note).to_s
+ }
+
+ graphql_mutation(:destroy_note, variables)
+ end
+
+ def mutation_response
+ graphql_mutation_response(:destroy_note)
+ end
+
+ context 'when the user does not have permission' do
+ let(:current_user) { create(:user) }
+
+ it_behaves_like 'a mutation that returns top-level errors',
+ errors: ['The resource that you are attempting to access does not exist or you don\'t have permission to perform this action']
+
+ it 'does not destroy the Note' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ end.not_to change { Note.count }
+ end
+ end
+
+ context 'when the user has permission' do
+ let(:current_user) { note.author }
+
+ it_behaves_like 'a Note mutation when the given resource id is not for a Note'
+
+ it 'destroys the Note' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ end.to change { Note.count }.by(-1)
+ end
+
+ it 'returns an empty Note' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(mutation_response).to have_key('note')
+ expect(mutation_response['note']).to be_nil
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/notes/update_spec.rb b/spec/requests/api/graphql/mutations/notes/update_spec.rb
new file mode 100644
index 00000000000..958f640995a
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/notes/update_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Updating a Note' do
+ include GraphqlHelpers
+
+ let!(:note) { create(:note, note: original_body) }
+ let(:original_body) { 'Initial body text' }
+ let(:updated_body) { 'Updated body text' }
+ let(:mutation) do
+ variables = {
+ id: GitlabSchema.id_from_object(note).to_s,
+ body: updated_body
+ }
+
+ graphql_mutation(:update_note, variables)
+ end
+
+ def mutation_response
+ graphql_mutation_response(:update_note)
+ end
+
+ context 'when the user does not have permission' do
+ let(:current_user) { create(:user) }
+
+ it_behaves_like 'a mutation that returns top-level errors',
+ errors: ['The resource that you are attempting to access does not exist or you don\'t have permission to perform this action']
+
+ it 'does not update the Note' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(note.reload.note).to eq(original_body)
+ end
+ end
+
+ context 'when the user has permission' do
+ let(:current_user) { note.author }
+
+ it_behaves_like 'a Note mutation when the given resource id is not for a Note'
+
+ it 'updates the Note' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(note.reload.note).to eq(updated_body)
+ end
+
+ it 'returns the updated Note' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(mutation_response['note']['body']).to eq(updated_body)
+ end
+
+ context 'when there are ActiveRecord validation errors' do
+ let(:updated_body) { '' }
+
+ it_behaves_like 'a mutation that returns errors in the response', errors: ["Note can't be blank"]
+
+ it 'does not update the Note' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(note.reload.note).to eq(original_body)
+ end
+
+ it 'returns the Note with its original body' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(mutation_response['note']['body']).to eq(original_body)
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql_spec.rb b/spec/requests/api/graphql_spec.rb
index 67371cb35b6..54401ec4085 100644
--- a/spec/requests/api/graphql_spec.rb
+++ b/spec/requests/api/graphql_spec.rb
@@ -6,16 +6,6 @@ describe 'GraphQL' do
let(:query) { graphql_query_for('echo', 'text' => 'Hello world' ) }
- context 'graphql is disabled by feature flag' do
- before do
- stub_feature_flags(graphql: false)
- end
-
- it 'does not generate a route for GraphQL' do
- expect { post_graphql(query) }.to raise_error(ActionController::RoutingError)
- end
- end
-
context 'logging' do
shared_examples 'logging a graphql query' do
let(:expected_params) do
diff --git a/spec/requests/api/group_clusters_spec.rb b/spec/requests/api/group_clusters_spec.rb
new file mode 100644
index 00000000000..46e3dd650cc
--- /dev/null
+++ b/spec/requests/api/group_clusters_spec.rb
@@ -0,0 +1,452 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe API::GroupClusters do
+ include KubernetesHelpers
+
+ let(:current_user) { create(:user) }
+ let(:developer_user) { create(:user) }
+ let(:group) { create(:group, :private) }
+
+ before do
+ group.add_developer(developer_user)
+ group.add_maintainer(current_user)
+ end
+
+ describe 'GET /groups/:id/clusters' do
+ let!(:extra_cluster) { create(:cluster, :provided_by_gcp, :group) }
+
+ let!(:clusters) do
+ create_list(:cluster, 5, :provided_by_gcp, :group, :production_environment,
+ groups: [group])
+ end
+
+ context 'non-authorized user' do
+ it 'responds with 403' do
+ get api("/groups/#{group.id}/clusters", developer_user)
+
+ expect(response).to have_gitlab_http_status(403)
+ end
+ end
+
+ context 'authorized user' do
+ before do
+ get api("/groups/#{group.id}/clusters", current_user)
+ end
+
+ it 'responds with 200' do
+ expect(response).to have_gitlab_http_status(200)
+ end
+
+ it 'includes pagination headers' do
+ expect(response).to include_pagination_headers
+ end
+
+ it 'only include authorized clusters' do
+ cluster_ids = json_response.map { |cluster| cluster['id'] }
+
+ expect(cluster_ids).to match_array(clusters.pluck(:id))
+ expect(cluster_ids).not_to include(extra_cluster.id)
+ end
+ end
+ end
+
+ describe 'GET /groups/:id/clusters/:cluster_id' do
+ let(:cluster_id) { cluster.id }
+
+ let(:platform_kubernetes) do
+ create(:cluster_platform_kubernetes, :configured)
+ end
+
+ let(:cluster) do
+ create(:cluster, :group, :provided_by_gcp, :with_domain,
+ platform_kubernetes: platform_kubernetes,
+ user: current_user,
+ groups: [group])
+ end
+
+ context 'non-authorized user' do
+ it 'responds with 403' do
+ get api("/groups/#{group.id}/clusters/#{cluster_id}", developer_user)
+
+ expect(response).to have_gitlab_http_status(403)
+ end
+ end
+
+ context 'authorized user' do
+ before do
+ get api("/groups/#{group.id}/clusters/#{cluster_id}", current_user)
+ end
+
+ it 'returns specific cluster' do
+ expect(json_response['id']).to eq(cluster.id)
+ end
+
+ it 'returns cluster information' do
+ expect(json_response['provider_type']).to eq('gcp')
+ expect(json_response['platform_type']).to eq('kubernetes')
+ expect(json_response['environment_scope']).to eq('*')
+ expect(json_response['cluster_type']).to eq('group_type')
+ expect(json_response['domain']).to eq('example.com')
+ end
+
+ it 'returns group information' do
+ cluster_group = json_response['group']
+
+ expect(cluster_group['id']).to eq(group.id)
+ expect(cluster_group['name']).to eq(group.name)
+ expect(cluster_group['web_url']).to eq(group.web_url)
+ end
+
+ it 'returns kubernetes platform information' do
+ platform = json_response['platform_kubernetes']
+
+ expect(platform['api_url']).to eq('https://kubernetes.example.com')
+ expect(platform['ca_cert']).to be_present
+ end
+
+ it 'returns user information' do
+ user = json_response['user']
+
+ expect(user['id']).to eq(current_user.id)
+ expect(user['username']).to eq(current_user.username)
+ end
+
+ it 'returns GCP provider information' do
+ gcp_provider = json_response['provider_gcp']
+
+ expect(gcp_provider['cluster_id']).to eq(cluster.id)
+ expect(gcp_provider['status_name']).to eq('created')
+ expect(gcp_provider['gcp_project_id']).to eq('test-gcp-project')
+ expect(gcp_provider['zone']).to eq('us-central1-a')
+ expect(gcp_provider['machine_type']).to eq('n1-standard-2')
+ expect(gcp_provider['num_nodes']).to eq(3)
+ expect(gcp_provider['endpoint']).to eq('111.111.111.111')
+ end
+
+ context 'when cluster has no provider' do
+ let(:cluster) do
+ create(:cluster, :group, :provided_by_user,
+ groups: [group])
+ end
+
+ it 'does not include GCP provider info' do
+ expect(json_response['provider_gcp']).not_to be_present
+ end
+ end
+
+ context 'with non-existing cluster' do
+ let(:cluster_id) { 123 }
+
+ it 'returns 404' do
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+ end
+
+ shared_context 'kubernetes calls stubbed' do
+ before do
+ stub_kubeclient_discover(api_url)
+ end
+ end
+
+ describe 'POST /groups/:id/clusters/user' do
+ include_context 'kubernetes calls stubbed'
+
+ let(:api_url) { 'https://kubernetes.example.com' }
+ let(:authorization_type) { 'rbac' }
+
+ let(:platform_kubernetes_attributes) do
+ {
+ api_url: api_url,
+ token: 'sample-token',
+ authorization_type: authorization_type
+ }
+ end
+
+ let(:cluster_params) do
+ {
+ name: 'test-cluster',
+ domain: 'domain.example.com',
+ managed: false,
+ platform_kubernetes_attributes: platform_kubernetes_attributes
+ }
+ end
+
+ context 'non-authorized user' do
+ it 'responds with 403' do
+ post api("/groups/#{group.id}/clusters/user", developer_user), params: cluster_params
+
+ expect(response).to have_gitlab_http_status(403)
+ end
+ end
+
+ context 'authorized user' do
+ before do
+ post api("/groups/#{group.id}/clusters/user", current_user), params: cluster_params
+ end
+
+ context 'with valid params' do
+ it 'responds with 201' do
+ expect(response).to have_gitlab_http_status(201)
+ end
+
+ it 'creates a new Cluster::Cluster' do
+ cluster_result = Clusters::Cluster.find(json_response["id"])
+ platform_kubernetes = cluster_result.platform
+
+ expect(cluster_result).to be_user
+ expect(cluster_result).to be_kubernetes
+ expect(cluster_result.group).to eq(group)
+ expect(cluster_result.name).to eq('test-cluster')
+ expect(cluster_result.domain).to eq('domain.example.com')
+ expect(cluster_result.managed).to be_falsy
+ expect(platform_kubernetes.rbac?).to be_truthy
+ expect(platform_kubernetes.api_url).to eq(api_url)
+ expect(platform_kubernetes.token).to eq('sample-token')
+ end
+ end
+
+ context 'when user does not indicate authorization type' do
+ let(:platform_kubernetes_attributes) do
+ {
+ api_url: api_url,
+ token: 'sample-token'
+ }
+ end
+
+ it 'defaults to RBAC' do
+ cluster_result = Clusters::Cluster.find(json_response['id'])
+
+ expect(cluster_result.platform_kubernetes.rbac?).to be_truthy
+ end
+ end
+
+ context 'when user sets authorization type as ABAC' do
+ let(:authorization_type) { 'abac' }
+
+ it 'creates an ABAC cluster' do
+ cluster_result = Clusters::Cluster.find(json_response['id'])
+
+ expect(cluster_result.platform.abac?).to be_truthy
+ end
+ end
+
+ context 'with invalid params' do
+ let(:api_url) { 'invalid_api_url' }
+
+ it 'responds with 400' do
+ expect(response).to have_gitlab_http_status(400)
+ end
+
+ it 'does not create a new Clusters::Cluster' do
+ expect(group.reload.clusters).to be_empty
+ end
+
+ it 'returns validation errors' do
+ expect(json_response['message']['platform_kubernetes.api_url'].first).to be_present
+ end
+ end
+ end
+
+ context 'when user tries to add multiple clusters' do
+ before do
+ create(:cluster, :provided_by_gcp, :group,
+ groups: [group])
+
+ post api("/groups/#{group.id}/clusters/user", current_user), params: cluster_params
+ end
+
+ it 'responds with 400' do
+ expect(response).to have_gitlab_http_status(400)
+ expect(json_response['message']['base'].first).to include('Instance does not support multiple Kubernetes clusters')
+ end
+ end
+
+ context 'non-authorized user' do
+ before do
+ post api("/groups/#{group.id}/clusters/user", developer_user), params: cluster_params
+ end
+
+ it 'responds with 403' do
+ expect(response).to have_gitlab_http_status(403)
+
+ expect(json_response['message']).to eq('403 Forbidden')
+ end
+ end
+ end
+
+ describe 'PUT /groups/:id/clusters/:cluster_id' do
+ include_context 'kubernetes calls stubbed'
+
+ let(:api_url) { 'https://kubernetes.example.com' }
+
+ let(:update_params) do
+ {
+ domain: domain,
+ platform_kubernetes_attributes: platform_kubernetes_attributes
+ }
+ end
+
+ let(:domain) { 'new-domain.com' }
+ let(:platform_kubernetes_attributes) { {} }
+
+ let(:cluster) do
+ create(:cluster, :group, :provided_by_gcp,
+ groups: [group], domain: 'old-domain.com')
+ end
+
+ context 'non-authorized user' do
+ it 'responds with 403' do
+ put api("/groups/#{group.id}/clusters/#{cluster.id}", developer_user), params: update_params
+
+ expect(response).to have_gitlab_http_status(403)
+ end
+ end
+
+ context 'authorized user' do
+ before do
+ put api("/groups/#{group.id}/clusters/#{cluster.id}", current_user), params: update_params
+
+ cluster.reload
+ end
+
+ context 'with valid params' do
+ it 'responds with 200' do
+ expect(response).to have_gitlab_http_status(200)
+ end
+
+ it 'updates cluster attributes' do
+ expect(cluster.domain).to eq('new-domain.com')
+ end
+ end
+
+ context 'with invalid params' do
+ let(:domain) { 'invalid domain' }
+
+ it 'responds with 400' do
+ expect(response).to have_gitlab_http_status(400)
+ end
+
+ it 'does not update cluster attributes' do
+ expect(cluster.domain).to eq('old-domain.com')
+ end
+
+ it 'returns validation errors' do
+ expect(json_response['message']['domain'].first).to match('contains invalid characters (valid characters: [a-z0-9\\-])')
+ end
+ end
+
+ context 'with a GCP cluster' do
+ context 'when user tries to change GCP specific fields' do
+ let(:platform_kubernetes_attributes) do
+ {
+ api_url: 'https://new-api-url.com',
+ token: 'new-sample-token'
+ }
+ end
+
+ it 'responds with 400' do
+ expect(response).to have_gitlab_http_status(400)
+ end
+
+ it 'returns validation error' do
+ expect(json_response['message']['platform_kubernetes.base'].first).to eq('Cannot modify managed Kubernetes cluster')
+ end
+ end
+
+ context 'when user tries to change domain' do
+ let(:domain) { 'new-domain.com' }
+
+ it 'responds with 200' do
+ expect(response).to have_gitlab_http_status(200)
+ end
+ end
+ end
+
+ context 'with an user cluster' do
+ let(:api_url) { 'https://new-api-url.com' }
+
+ let(:cluster) do
+ create(:cluster, :group, :provided_by_user,
+ groups: [group])
+ end
+
+ let(:platform_kubernetes_attributes) do
+ {
+ api_url: api_url,
+ token: 'new-sample-token'
+ }
+ end
+
+ let(:update_params) do
+ {
+ name: 'new-name',
+ platform_kubernetes_attributes: platform_kubernetes_attributes
+ }
+ end
+
+ it 'responds with 200' do
+ expect(response).to have_gitlab_http_status(200)
+ end
+
+ it 'updates platform kubernetes attributes' do
+ platform_kubernetes = cluster.platform_kubernetes
+
+ expect(cluster.name).to eq('new-name')
+ expect(platform_kubernetes.api_url).to eq('https://new-api-url.com')
+ expect(platform_kubernetes.token).to eq('new-sample-token')
+ end
+ end
+
+ context 'with a cluster that does not belong to user' do
+ let(:cluster) { create(:cluster, :group, :provided_by_user) }
+
+ it 'responds with 404' do
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+ end
+
+ describe 'DELETE /groups/:id/clusters/:cluster_id' do
+ let(:cluster_params) { { cluster_id: cluster.id } }
+
+ let(:cluster) do
+ create(:cluster, :group, :provided_by_gcp,
+ groups: [group])
+ end
+
+ context 'non-authorized user' do
+ it 'responds with 403' do
+ delete api("/groups/#{group.id}/clusters/#{cluster.id}", developer_user), params: cluster_params
+
+ expect(response).to have_gitlab_http_status(403)
+ end
+ end
+
+ context 'authorized user' do
+ before do
+ delete api("/groups/#{group.id}/clusters/#{cluster.id}", current_user), params: cluster_params
+ end
+
+ it 'responds with 204' do
+ expect(response).to have_gitlab_http_status(204)
+ end
+
+ it 'deletes the cluster' do
+ expect(Clusters::Cluster.exists?(id: cluster.id)).to be_falsy
+ end
+
+ context 'with a cluster that does not belong to user' do
+ let(:cluster) { create(:cluster, :group, :provided_by_user) }
+
+ it 'responds with 404' do
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/internal_spec.rb b/spec/requests/api/internal_spec.rb
index fcbff19bd61..3ab1818bebb 100644
--- a/spec/requests/api/internal_spec.rb
+++ b/spec/requests/api/internal_spec.rb
@@ -474,7 +474,7 @@ describe API::Internal do
'ssh',
{
authentication_abilities: [:read_project, :download_code, :push_code],
- namespace_path: project.namespace.name,
+ namespace_path: project.namespace.path,
project_path: project.path,
redirected_path: nil
}
@@ -753,7 +753,7 @@ describe API::Internal do
end
describe 'GET /internal/merge_request_urls' do
- let(:repo_name) { "#{project.namespace.name}/#{project.path}" }
+ let(:repo_name) { "#{project.full_path}" }
let(:changes) { URI.escape("#{Gitlab::Git::BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/new_branch") }
before do
@@ -765,7 +765,7 @@ describe API::Internal do
expect(json_response).to match [{
"branch_name" => "new_branch",
- "url" => "http://#{Gitlab.config.gitlab.host}/#{project.namespace.name}/#{project.path}/merge_requests/new?merge_request%5Bsource_branch%5D=new_branch",
+ "url" => "http://#{Gitlab.config.gitlab.host}/#{project.full_path}/merge_requests/new?merge_request%5Bsource_branch%5D=new_branch",
"new_merge_request" => true
}]
end
@@ -786,7 +786,7 @@ describe API::Internal do
expect(json_response).to match [{
"branch_name" => "new_branch",
- "url" => "http://#{Gitlab.config.gitlab.host}/#{project.namespace.name}/#{project.path}/merge_requests/new?merge_request%5Bsource_branch%5D=new_branch",
+ "url" => "http://#{Gitlab.config.gitlab.host}/#{project.full_path}/merge_requests/new?merge_request%5Bsource_branch%5D=new_branch",
"new_merge_request" => true
}]
end
@@ -927,7 +927,7 @@ describe API::Internal do
expect(json_response['merge_request_urls']).to match [{
"branch_name" => branch_name,
- "url" => "http://#{Gitlab.config.gitlab.host}/#{project.namespace.name}/#{project.path}/merge_requests/new?merge_request%5Bsource_branch%5D=#{branch_name}",
+ "url" => "http://#{Gitlab.config.gitlab.host}/#{project.full_path}/merge_requests/new?merge_request%5Bsource_branch%5D=#{branch_name}",
"new_merge_request" => true
}]
end
@@ -970,7 +970,7 @@ describe API::Internal do
expect(json_response['merge_request_urls']).to match [{
'branch_name' => branch_name,
- 'url' => "http://#{Gitlab.config.gitlab.host}/#{project.namespace.name}/#{project.path}/merge_requests/1",
+ 'url' => "http://#{Gitlab.config.gitlab.host}/#{project.full_path}/merge_requests/1",
'new_merge_request' => false
}]
end
diff --git a/spec/requests/api/project_clusters_spec.rb b/spec/requests/api/project_clusters_spec.rb
index a6e08ab3ab6..e8ed016db69 100644
--- a/spec/requests/api/project_clusters_spec.rb
+++ b/spec/requests/api/project_clusters_spec.rb
@@ -257,12 +257,22 @@ describe API::ProjectClusters do
post api("/projects/#{project.id}/clusters/user", current_user), params: cluster_params
end
+ it 'responds with 400' do
+ expect(response).to have_gitlab_http_status(400)
+
+ expect(json_response['message']['base'].first).to eq('Instance does not support multiple Kubernetes clusters')
+ end
+ end
+
+ context 'non-authorized user' do
+ before do
+ post api("/projects/#{project.id}/clusters/user", developer_user), params: cluster_params
+ end
+
it 'responds with 403' do
expect(response).to have_gitlab_http_status(403)
- end
- it 'returns an appropriate message' do
- expect(json_response['message']).to include('Instance does not support multiple Kubernetes clusters')
+ expect(json_response['message']).to eq('403 Forbidden')
end
end
end
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index c67412a44c1..a2aae257352 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -1102,6 +1102,12 @@ describe API::Projects do
expect(json_response['wiki_enabled']).to be_present
expect(json_response['jobs_enabled']).to be_present
expect(json_response['snippets_enabled']).to be_present
+ expect(json_response['snippets_access_level']).to be_present
+ expect(json_response['repository_access_level']).to be_present
+ expect(json_response['issues_access_level']).to be_present
+ expect(json_response['merge_requests_access_level']).to be_present
+ expect(json_response['wiki_access_level']).to be_present
+ expect(json_response['builds_access_level']).to be_present
expect(json_response['resolve_outdated_diff_discussions']).to eq(project.resolve_outdated_diff_discussions)
expect(json_response['container_registry_enabled']).to be_present
expect(json_response['created_at']).to be_present
@@ -1913,6 +1919,34 @@ describe API::Projects do
end
end
+ it 'updates builds_access_level' do
+ project_param = { builds_access_level: 'private' }
+
+ put api("/projects/#{project3.id}", user), params: project_param
+
+ expect(response).to have_gitlab_http_status(200)
+
+ expect(json_response['builds_access_level']).to eq('private')
+ end
+
+ it 'updates build_git_strategy' do
+ project_param = { build_git_strategy: 'clone' }
+
+ put api("/projects/#{project3.id}", user), params: project_param
+
+ expect(response).to have_gitlab_http_status(200)
+
+ expect(json_response['build_git_strategy']).to eq('clone')
+ end
+
+ it 'rejects to update build_git_strategy when build_git_strategy is invalid' do
+ project_param = { build_git_strategy: 'invalid' }
+
+ put api("/projects/#{project3.id}", user), params: project_param
+
+ expect(response).to have_gitlab_http_status(400)
+ end
+
it 'updates merge_method' do
project_param = { merge_method: 'ff' }
@@ -1946,6 +1980,26 @@ describe API::Projects do
'-/system/project/avatar/'\
"#{project3.id}/banana_sample.gif")
end
+
+ it 'updates auto_devops_deploy_strategy' do
+ project_param = { auto_devops_deploy_strategy: 'timed_incremental' }
+
+ put api("/projects/#{project3.id}", user), params: project_param
+
+ expect(response).to have_gitlab_http_status(200)
+
+ expect(json_response['auto_devops_deploy_strategy']).to eq('timed_incremental')
+ end
+
+ it 'updates auto_devops_enabled' do
+ project_param = { auto_devops_enabled: false }
+
+ put api("/projects/#{project3.id}", user), params: project_param
+
+ expect(response).to have_gitlab_http_status(200)
+
+ expect(json_response['auto_devops_enabled']).to eq(false)
+ end
end
context 'when authenticated as project maintainer' do
diff --git a/spec/requests/api/user_counts_spec.rb b/spec/requests/api/user_counts_spec.rb
new file mode 100644
index 00000000000..c833bd047e2
--- /dev/null
+++ b/spec/requests/api/user_counts_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe API::UserCounts do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :public) }
+
+ let!(:merge_request) { create(:merge_request, :simple, author: user, assignees: [user], source_project: project, title: "Test") }
+
+ describe 'GET /user_counts' do
+ context 'when unauthenticated' do
+ it 'returns authentication error' do
+ get api('/user_counts')
+
+ expect(response.status).to eq(401)
+ end
+ end
+
+ context 'when authenticated' do
+ it 'returns open counts for current user' do
+ get api('/user_counts', user)
+
+ expect(response.status).to eq(200)
+ expect(json_response).to be_a Hash
+ expect(json_response['merge_requests']).to eq(1)
+ end
+
+ it 'updates the mr count when a new mr is assigned' do
+ create(:merge_request, source_project: project, author: user, assignees: [user])
+
+ get api('/user_counts', user)
+
+ expect(response.status).to eq(200)
+ expect(json_response).to be_a Hash
+ expect(json_response['merge_requests']).to eq(2)
+ end
+ end
+ end
+end
diff --git a/spec/routing/api_routing_spec.rb b/spec/routing/api_routing_spec.rb
deleted file mode 100644
index 3c48ead4ff2..00000000000
--- a/spec/routing/api_routing_spec.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-require 'spec_helper'
-
-describe 'api', 'routing' do
- context 'when graphql is disabled' do
- before do
- stub_feature_flags(graphql: false)
- end
-
- it 'does not route to the GraphqlController' do
- expect(post('/api/graphql')).not_to route_to('graphql#execute')
- end
- end
-
- context 'when graphql is enabled' do
- before do
- stub_feature_flags(graphql: true)
- end
-
- it 'routes to the GraphqlController' do
- expect(post('/api/graphql')).to route_to('graphql#execute')
- end
- end
-end
diff --git a/spec/routing/environments_spec.rb b/spec/routing/environments_spec.rb
index aacbe300966..28b3e79c1ff 100644
--- a/spec/routing/environments_spec.rb
+++ b/spec/routing/environments_spec.rb
@@ -9,7 +9,7 @@ describe 'environments routing' do
end
let(:environments_route) do
- "#{project.namespace.name}/#{project.name}/environments/"
+ "#{project.full_path}/environments/"
end
describe 'routing environment folders' do
@@ -36,13 +36,12 @@ describe 'environments routing' do
end
def get_folder(folder)
- get("#{project.namespace.name}/#{project.name}/" \
- "environments/folders/#{folder}")
+ get("#{project.full_path}/environments/folders/#{folder}")
end
def folder_action(**opts)
- options = { namespace_id: project.namespace.name,
- project_id: project.name }
+ options = { namespace_id: project.namespace.path,
+ project_id: project.path }
['projects/environments#folder', options.merge(opts)]
end
diff --git a/spec/routing/project_routing_spec.rb b/spec/routing/project_routing_spec.rb
index 6dde40d1cb6..8a3de2a52fc 100644
--- a/spec/routing/project_routing_spec.rb
+++ b/spec/routing/project_routing_spec.rb
@@ -713,4 +713,10 @@ describe 'project routing' do
end
end
end
+
+ describe Projects::DeployTokensController, 'routing' do
+ it 'routes to deploy_tokens#revoke' do
+ expect(put("/gitlab/gitlabhq/-/deploy_tokens/1/revoke")).to route_to("projects/deploy_tokens#revoke", namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1')
+ end
+ end
end
diff --git a/spec/rubocop/cop/gitlab/rails_logger_spec.rb b/spec/rubocop/cop/gitlab/rails_logger_spec.rb
new file mode 100644
index 00000000000..f0158ddcc5c
--- /dev/null
+++ b/spec/rubocop/cop/gitlab/rails_logger_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require 'rubocop'
+require 'rubocop/rspec/support'
+require_relative '../../../../rubocop/cop/gitlab/rails_logger'
+
+describe RuboCop::Cop::Gitlab::RailsLogger do
+ include CopHelper
+
+ subject(:cop) { described_class.new }
+
+ it 'flags the use of Rails.logger.error with a constant receiver' do
+ inspect_source("Rails.logger.error('some error')")
+
+ expect(cop.offenses.size).to eq(1)
+ end
+
+ it 'flags the use of Rails.logger.info with a constant receiver' do
+ inspect_source("Rails.logger.info('some info')")
+
+ expect(cop.offenses.size).to eq(1)
+ end
+
+ it 'flags the use of Rails.logger.warn with a constant receiver' do
+ inspect_source("Rails.logger.warn('some warning')")
+
+ expect(cop.offenses.size).to eq(1)
+ end
+
+ it 'does not flag the use of Rails.logger with a constant that is not Rails' do
+ inspect_source("AppLogger.error('some error')")
+
+ expect(cop.offenses.size).to eq(0)
+ end
+
+ it 'does not flag the use of logger with a send receiver' do
+ inspect_source("file_logger.info('important info')")
+
+ expect(cop.offenses.size).to eq(0)
+ end
+end
diff --git a/spec/rubocop/cop/qa/element_with_pattern_spec.rb b/spec/rubocop/cop/qa/element_with_pattern_spec.rb
index ef20d9a1f26..fee390caa9f 100644
--- a/spec/rubocop/cop/qa/element_with_pattern_spec.rb
+++ b/spec/rubocop/cop/qa/element_with_pattern_spec.rb
@@ -23,9 +23,9 @@ describe RuboCop::Cop::QA::ElementWithPattern do
expect_offense(<<-RUBY)
view 'app/views/shared/groups/_search_form.html.haml' do
element :groups_filter, 'search_field_tag :filter'
- ^^^^^^^^^^^^^^^^^^^^^^^^^^ Don't use a pattern for element, create a corresponding `qa-groups-filter` instead.
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^ Don't use a pattern for element, create a corresponding `data-qa-selector=groups_filter` instead.
element :groups_filter_placeholder, /Search by name/
- ^^^^^^^^^^^^^^ Don't use a pattern for element, create a corresponding `qa-groups-filter-placeholder` instead.
+ ^^^^^^^^^^^^^^ Don't use a pattern for element, create a corresponding `data-qa-selector=groups_filter_placeholder` instead.
end
RUBY
end
diff --git a/spec/serializers/environment_status_entity_spec.rb b/spec/serializers/environment_status_entity_spec.rb
index 8a6a38fe5f8..cb4749f019f 100644
--- a/spec/serializers/environment_status_entity_spec.rb
+++ b/spec/serializers/environment_status_entity_spec.rb
@@ -9,7 +9,7 @@ describe EnvironmentStatusEntity do
let(:project) { deployment.project }
let(:merge_request) { create(:merge_request, :deployed_review_app, deployment: deployment) }
- let(:environment_status) { EnvironmentStatus.new(environment, merge_request, merge_request.diff_head_sha) }
+ let(:environment_status) { EnvironmentStatus.new(project, environment, merge_request, merge_request.diff_head_sha) }
let(:entity) { described_class.new(environment_status, request: request) }
subject { entity.as_json }
@@ -55,8 +55,14 @@ describe EnvironmentStatusEntity do
before do
project.add_maintainer(user)
allow(deployment).to receive(:prometheus_adapter).and_return(prometheus_adapter)
- allow(prometheus_adapter).to receive(:query).with(:deployment, deployment).and_return(simple_metrics)
allow(entity).to receive(:deployment).and_return(deployment)
+
+ expect_next_instance_of(DeploymentMetrics) do |deployment_metrics|
+ allow(deployment_metrics).to receive(:prometheus_adapter).and_return(prometheus_adapter)
+
+ allow(prometheus_adapter).to receive(:query)
+ .with(:deployment, deployment).and_return(simple_metrics)
+ end
end
context 'when deployment succeeded' do
@@ -64,7 +70,7 @@ describe EnvironmentStatusEntity do
it 'returns metrics url' do
expect(subject[:metrics_url])
- .to eq("/#{project.namespace.name}/#{project.name}/environments/#{environment.id}/deployments/#{deployment.iid}/metrics")
+ .to eq("/#{project.full_path}/environments/#{environment.id}/deployments/#{deployment.iid}/metrics")
end
end
diff --git a/spec/services/audit_event_service_spec.rb b/spec/services/audit_event_service_spec.rb
index 32fd98e6ef9..e42bff607b2 100644
--- a/spec/services/audit_event_service_spec.rb
+++ b/spec/services/audit_event_service_spec.rb
@@ -10,11 +10,8 @@ describe AuditEventService do
let(:logger) { instance_double(Gitlab::AuditJsonLogger) }
describe '#security_event' do
- before do
- expect(service).to receive(:file_logger).and_return(logger)
- end
-
it 'creates an event and logs to a file' do
+ expect(service).to receive(:file_logger).and_return(logger)
expect(logger).to receive(:info).with(author_id: user.id,
entity_id: project.id,
entity_type: "Project",
@@ -22,5 +19,32 @@ describe AuditEventService do
expect { service.security_event }.to change(SecurityEvent, :count).by(1)
end
+
+ it 'formats from and to fields' do
+ service = described_class.new(
+ user, project,
+ {
+ from: true,
+ to: false,
+ action: :create,
+ target_id: 1
+ })
+ expect(service).to receive(:file_logger).and_return(logger)
+ expect(logger).to receive(:info).with(author_id: user.id,
+ entity_type: 'Project',
+ entity_id: project.id,
+ from: 'true',
+ to: 'false',
+ action: :create,
+ target_id: 1)
+
+ expect { service.security_event }.to change(SecurityEvent, :count).by(1)
+
+ details = SecurityEvent.last.details
+ expect(details[:from]).to be true
+ expect(details[:to]).to be false
+ expect(details[:action]).to eq(:create)
+ expect(details[:target_id]).to eq(1)
+ end
end
end
diff --git a/spec/services/boards/issues/move_service_spec.rb b/spec/services/boards/issues/move_service_spec.rb
index 16e2a2fba6b..1bfb5602df2 100644
--- a/spec/services/boards/issues/move_service_spec.rb
+++ b/spec/services/boards/issues/move_service_spec.rb
@@ -52,5 +52,91 @@ describe Boards::Issues::MoveService do
it_behaves_like 'issues move service', true
end
+
+ describe '#execute_multiple' do
+ set(:group) { create(:group) }
+ set(:user) { create(:user) }
+ set(:project) { create(:project, namespace: group) }
+ set(:board1) { create(:board, group: group) }
+ set(:development) { create(:group_label, group: group, name: 'Development') }
+ set(:testing) { create(:group_label, group: group, name: 'Testing') }
+ set(:list1) { create(:list, board: board1, label: development, position: 0) }
+ set(:list2) { create(:list, board: board1, label: testing, position: 1) }
+ let(:params) { { board_id: board1.id, from_list_id: list1.id, to_list_id: list2.id } }
+
+ before do
+ project.add_developer(user)
+ end
+
+ it 'returns false if list of issues is empty' do
+ expect(described_class.new(group, user, params).execute_multiple([])).to eq(false)
+ end
+
+ context 'moving multiple issues' do
+ let(:issue1) { create(:labeled_issue, project: project, labels: [development]) }
+ let(:issue2) { create(:labeled_issue, project: project, labels: [development]) }
+
+ it 'moves multiple issues from one list to another' do
+ expect(described_class.new(group, user, params).execute_multiple([issue1, issue2])).to be_truthy
+
+ expect(issue1.labels).to eq([testing])
+ expect(issue2.labels).to eq([testing])
+ end
+ end
+
+ context 'moving a single issue' do
+ let(:issue1) { create(:labeled_issue, project: project, labels: [development]) }
+
+ it 'moves one issue' do
+ expect(described_class.new(group, user, params).execute_multiple([issue1])).to be_truthy
+
+ expect(issue1.labels).to eq([testing])
+ end
+ end
+
+ context 'moving issues visually after an existing issue' do
+ let(:existing_issue) { create(:labeled_issue, project: project, labels: [testing], relative_position: 10) }
+ let(:issue1) { create(:labeled_issue, project: project, labels: [development]) }
+ let(:issue2) { create(:labeled_issue, project: project, labels: [development]) }
+
+ let(:move_params) do
+ params.dup.tap do |hash|
+ hash[:move_before_id] = existing_issue.id
+ end
+ end
+
+ it 'moves one issue' do
+ expect(described_class.new(group, user, move_params).execute_multiple([issue1, issue2])).to be_truthy
+
+ expect(issue1.labels).to eq([testing])
+ expect(issue2.labels).to eq([testing])
+
+ expect(issue1.relative_position > existing_issue.relative_position).to eq(true)
+ expect(issue2.relative_position > issue1.relative_position).to eq(true)
+ end
+ end
+
+ context 'moving issues visually before an existing issue' do
+ let(:existing_issue) { create(:labeled_issue, project: project, labels: [testing], relative_position: 10) }
+ let(:issue1) { create(:labeled_issue, project: project, labels: [development]) }
+ let(:issue2) { create(:labeled_issue, project: project, labels: [development]) }
+
+ let(:move_params) do
+ params.dup.tap do |hash|
+ hash[:move_after_id] = existing_issue.id
+ end
+ end
+
+ it 'moves one issue' do
+ expect(described_class.new(group, user, move_params).execute_multiple([issue1, issue2])).to be_truthy
+
+ expect(issue1.labels).to eq([testing])
+ expect(issue2.labels).to eq([testing])
+
+ expect(issue2.relative_position < existing_issue.relative_position).to eq(true)
+ expect(issue1.relative_position < issue2.relative_position).to eq(true)
+ end
+ end
+ end
end
end
diff --git a/spec/services/clusters/gcp/kubernetes/create_or_update_namespace_service_spec.rb b/spec/services/clusters/gcp/kubernetes/create_or_update_namespace_service_spec.rb
index be052a07da7..44407ae2793 100644
--- a/spec/services/clusters/gcp/kubernetes/create_or_update_namespace_service_spec.rb
+++ b/spec/services/clusters/gcp/kubernetes/create_or_update_namespace_service_spec.rb
@@ -34,6 +34,8 @@ describe Clusters::Gcp::Kubernetes::CreateOrUpdateNamespaceService, '#execute' d
stub_kubeclient_create_service_account(api_url, namespace: namespace)
stub_kubeclient_create_secret(api_url, namespace: namespace)
stub_kubeclient_put_secret(api_url, "#{namespace}-token", namespace: namespace)
+ stub_kubeclient_put_role(api_url, Clusters::Gcp::Kubernetes::GITLAB_KNATIVE_SERVING_ROLE_NAME, namespace: namespace)
+ stub_kubeclient_put_role_binding(api_url, Clusters::Gcp::Kubernetes::GITLAB_KNATIVE_SERVING_ROLE_BINDING_NAME, namespace: namespace)
stub_kubeclient_get_secret(
api_url,
diff --git a/spec/services/clusters/gcp/kubernetes/create_or_update_service_account_service_spec.rb b/spec/services/clusters/gcp/kubernetes/create_or_update_service_account_service_spec.rb
index 382b9043566..8b874989758 100644
--- a/spec/services/clusters/gcp/kubernetes/create_or_update_service_account_service_spec.rb
+++ b/spec/services/clusters/gcp/kubernetes/create_or_update_service_account_service_spec.rb
@@ -143,6 +143,8 @@ describe Clusters::Gcp::Kubernetes::CreateOrUpdateServiceAccountService do
stub_kubeclient_get_role_binding_error(api_url, role_binding_name, namespace: namespace)
stub_kubeclient_create_role_binding(api_url, namespace: namespace)
+ stub_kubeclient_put_role(api_url, Clusters::Gcp::Kubernetes::GITLAB_KNATIVE_SERVING_ROLE_NAME, namespace: namespace)
+ stub_kubeclient_put_role_binding(api_url, Clusters::Gcp::Kubernetes::GITLAB_KNATIVE_SERVING_ROLE_BINDING_NAME, namespace: namespace)
end
it_behaves_like 'creates service account and token'
@@ -169,6 +171,24 @@ describe Clusters::Gcp::Kubernetes::CreateOrUpdateServiceAccountService do
)
)
end
+
+ it 'creates a role and role binding granting knative serving permissions to the service account' do
+ subject
+
+ expect(WebMock).to have_requested(:put, api_url + "/apis/rbac.authorization.k8s.io/v1/namespaces/#{namespace}/roles/#{Clusters::Gcp::Kubernetes::GITLAB_KNATIVE_SERVING_ROLE_NAME}").with(
+ body: hash_including(
+ metadata: {
+ name: Clusters::Gcp::Kubernetes::GITLAB_KNATIVE_SERVING_ROLE_NAME,
+ namespace: namespace
+ },
+ rules: [{
+ apiGroups: %w(serving.knative.dev),
+ resources: %w(configurations configurationgenerations routes revisions revisionuids autoscalers services),
+ verbs: %w(get list create update delete patch watch)
+ }]
+ )
+ )
+ end
end
end
end
diff --git a/spec/services/issuable/bulk_update_service_spec.rb b/spec/services/issuable/bulk_update_service_spec.rb
index aebc5ba2874..b0bcd7a36ba 100644
--- a/spec/services/issuable/bulk_update_service_spec.rb
+++ b/spec/services/issuable/bulk_update_service_spec.rb
@@ -11,343 +11,371 @@ describe Issuable::BulkUpdateService do
.reverse_merge(issuable_ids: Array(issuables).map(&:id).join(','))
type = Array(issuables).first.model_name.param_key
- Issuable::BulkUpdateService.new(project, user, bulk_update_params).execute(type)
+ Issuable::BulkUpdateService.new(user, bulk_update_params).execute(type)
end
- describe 'close issues' do
- let(:issues) { create_list(:issue, 2, project: project) }
-
- it 'succeeds and returns the correct number of issues updated' do
- result = bulk_update(issues, state_event: 'close')
+ shared_examples 'updates milestones' do
+ it 'succeeds' do
+ result = bulk_update(issues, milestone_id: milestone.id)
expect(result[:success]).to be_truthy
expect(result[:count]).to eq(issues.count)
end
- it 'closes all the issues passed' do
- bulk_update(issues, state_event: 'close')
+ it 'updates the issues milestone' do
+ bulk_update(issues, milestone_id: milestone.id)
- expect(project.issues.opened).to be_empty
- expect(project.issues.closed).not_to be_empty
+ issues.each do |issue|
+ expect(issue.reload.milestone).to eq(milestone)
+ end
end
+ end
- context 'when issue for a different project is created' do
- let(:private_project) { create(:project, :private) }
- let(:issue) { create(:issue, project: private_project, author: user) }
+ context 'with project issues' do
+ describe 'close issues' do
+ let(:issues) { create_list(:issue, 2, project: project) }
- context 'when user has access to the project' do
- it 'closes all issues passed' do
- private_project.add_maintainer(user)
+ it 'succeeds and returns the correct number of issues updated' do
+ result = bulk_update(issues, state_event: 'close')
- bulk_update(issues + [issue], state_event: 'close')
+ expect(result[:success]).to be_truthy
+ expect(result[:count]).to eq(issues.count)
+ end
- expect(project.issues.opened).to be_empty
- expect(project.issues.closed).not_to be_empty
- expect(private_project.issues.closed).not_to be_empty
- end
+ it 'closes all the issues passed' do
+ bulk_update(issues, state_event: 'close')
+
+ expect(project.issues.opened).to be_empty
+ expect(project.issues.closed).not_to be_empty
end
- context 'when user does not have access to project' do
- it 'only closes all issues that the user has access to' do
- bulk_update(issues + [issue], state_event: 'close')
+ context 'when issue for a different project is created' do
+ let(:private_project) { create(:project, :private) }
+ let(:issue) { create(:issue, project: private_project, author: user) }
+
+ context 'when user has access to the project' do
+ it 'closes all issues passed' do
+ private_project.add_maintainer(user)
+
+ bulk_update(issues + [issue], state_event: 'close')
+
+ expect(project.issues.opened).to be_empty
+ expect(project.issues.closed).not_to be_empty
+ expect(private_project.issues.closed).not_to be_empty
+ end
+ end
+
+ context 'when user does not have access to project' do
+ it 'only closes all issues that the user has access to' do
+ bulk_update(issues + [issue], state_event: 'close')
- expect(project.issues.opened).to be_empty
- expect(project.issues.closed).not_to be_empty
- expect(private_project.issues.closed).to be_empty
+ expect(project.issues.opened).to be_empty
+ expect(project.issues.closed).not_to be_empty
+ expect(private_project.issues.closed).to be_empty
+ end
end
end
end
- end
- describe 'reopen issues' do
- let(:issues) { create_list(:closed_issue, 2, project: project) }
+ describe 'reopen issues' do
+ let(:issues) { create_list(:closed_issue, 2, project: project) }
- it 'succeeds and returns the correct number of issues updated' do
- result = bulk_update(issues, state_event: 'reopen')
+ it 'succeeds and returns the correct number of issues updated' do
+ result = bulk_update(issues, state_event: 'reopen')
- expect(result[:success]).to be_truthy
- expect(result[:count]).to eq(issues.count)
- end
+ expect(result[:success]).to be_truthy
+ expect(result[:count]).to eq(issues.count)
+ end
- it 'reopens all the issues passed' do
- bulk_update(issues, state_event: 'reopen')
+ it 'reopens all the issues passed' do
+ bulk_update(issues, state_event: 'reopen')
- expect(project.issues.closed).to be_empty
- expect(project.issues.opened).not_to be_empty
+ expect(project.issues.closed).to be_empty
+ expect(project.issues.opened).not_to be_empty
+ end
end
- end
- describe 'updating merge request assignee' do
- let(:merge_request) { create(:merge_request, target_project: project, source_project: project, assignees: [user]) }
+ describe 'updating merge request assignee' do
+ let(:merge_request) { create(:merge_request, target_project: project, source_project: project, assignees: [user]) }
- context 'when the new assignee ID is a valid user' do
- it 'succeeds' do
- new_assignee = create(:user)
- project.add_developer(new_assignee)
+ context 'when the new assignee ID is a valid user' do
+ it 'succeeds' do
+ new_assignee = create(:user)
+ project.add_developer(new_assignee)
- result = bulk_update(merge_request, assignee_ids: [user.id, new_assignee.id])
+ result = bulk_update(merge_request, assignee_ids: [user.id, new_assignee.id])
- expect(result[:success]).to be_truthy
- expect(result[:count]).to eq(1)
- end
+ expect(result[:success]).to be_truthy
+ expect(result[:count]).to eq(1)
+ end
- it 'updates the assignee to the user ID passed' do
- assignee = create(:user)
- project.add_developer(assignee)
+ it 'updates the assignee to the user ID passed' do
+ assignee = create(:user)
+ project.add_developer(assignee)
- expect { bulk_update(merge_request, assignee_ids: [assignee.id]) }
- .to change { merge_request.reload.assignee_ids }.from([user.id]).to([assignee.id])
+ expect { bulk_update(merge_request, assignee_ids: [assignee.id]) }
+ .to change { merge_request.reload.assignee_ids }.from([user.id]).to([assignee.id])
+ end
end
- end
- context "when the new assignee ID is #{IssuableFinder::NONE}" do
- it 'unassigns the issues' do
- expect { bulk_update(merge_request, assignee_ids: [IssuableFinder::NONE]) }
- .to change { merge_request.reload.assignee_ids }.to([])
+ context "when the new assignee ID is #{IssuableFinder::NONE}" do
+ it 'unassigns the issues' do
+ expect { bulk_update(merge_request, assignee_ids: [IssuableFinder::NONE]) }
+ .to change { merge_request.reload.assignee_ids }.to([])
+ end
end
- end
- context 'when the new assignee ID is not present' do
- it 'does not unassign' do
- expect { bulk_update(merge_request, assignee_ids: []) }
- .not_to change { merge_request.reload.assignee_ids }
+ context 'when the new assignee ID is not present' do
+ it 'does not unassign' do
+ expect { bulk_update(merge_request, assignee_ids: []) }
+ .not_to change { merge_request.reload.assignee_ids }
+ end
end
end
- end
- describe 'updating issue assignee' do
- let(:issue) { create(:issue, project: project, assignees: [user]) }
+ describe 'updating issue assignee' do
+ let(:issue) { create(:issue, project: project, assignees: [user]) }
- context 'when the new assignee ID is a valid user' do
- it 'succeeds' do
- new_assignee = create(:user)
- project.add_developer(new_assignee)
+ context 'when the new assignee ID is a valid user' do
+ it 'succeeds' do
+ new_assignee = create(:user)
+ project.add_developer(new_assignee)
- result = bulk_update(issue, assignee_ids: [new_assignee.id])
+ result = bulk_update(issue, assignee_ids: [new_assignee.id])
- expect(result[:success]).to be_truthy
- expect(result[:count]).to eq(1)
- end
+ expect(result[:success]).to be_truthy
+ expect(result[:count]).to eq(1)
+ end
- it 'updates the assignee to the user ID passed' do
- assignee = create(:user)
- project.add_developer(assignee)
- expect { bulk_update(issue, assignee_ids: [assignee.id]) }
- .to change { issue.reload.assignees.first }.from(user).to(assignee)
+ it 'updates the assignee to the user ID passed' do
+ assignee = create(:user)
+ project.add_developer(assignee)
+ expect { bulk_update(issue, assignee_ids: [assignee.id]) }
+ .to change { issue.reload.assignees.first }.from(user).to(assignee)
+ end
end
- end
- context "when the new assignee ID is #{IssuableFinder::NONE}" do
- it "unassigns the issues" do
- expect { bulk_update(issue, assignee_ids: [IssuableFinder::NONE.to_s]) }
- .to change { issue.reload.assignees.count }.from(1).to(0)
+ context "when the new assignee ID is #{IssuableFinder::NONE}" do
+ it "unassigns the issues" do
+ expect { bulk_update(issue, assignee_ids: [IssuableFinder::NONE.to_s]) }
+ .to change { issue.reload.assignees.count }.from(1).to(0)
+ end
end
- end
- context 'when the new assignee ID is not present' do
- it 'does not unassign' do
- expect { bulk_update(issue, assignee_ids: []) }
- .not_to change { issue.reload.assignees }
+ context 'when the new assignee ID is not present' do
+ it 'does not unassign' do
+ expect { bulk_update(issue, assignee_ids: []) }
+ .not_to change(issue.assignees, :count)
+ end
end
end
- end
-
- describe 'updating milestones' do
- let(:issue) { create(:issue, project: project) }
- let(:milestone) { create(:milestone, project: project) }
- it 'succeeds' do
- result = bulk_update(issue, milestone_id: milestone.id)
+ describe 'updating milestones' do
+ let(:issues) { [create(:issue, project: project)] }
+ let(:milestone) { create(:milestone, project: project) }
- expect(result[:success]).to be_truthy
- expect(result[:count]).to eq(1)
+ it_behaves_like 'updates milestones'
end
- it 'updates the issue milestone' do
- expect { bulk_update(issue, milestone_id: milestone.id) }
- .to change { issue.reload.milestone }.from(nil).to(milestone)
- end
- end
-
- describe 'updating labels' do
- def create_issue_with_labels(labels)
- create(:labeled_issue, project: project, labels: labels)
- end
+ describe 'updating labels' do
+ def create_issue_with_labels(labels)
+ create(:labeled_issue, project: project, labels: labels)
+ end
- let(:bug) { create(:label, project: project) }
- let(:regression) { create(:label, project: project) }
- let(:merge_requests) { create(:label, project: project) }
-
- let(:issue_all_labels) { create_issue_with_labels([bug, regression, merge_requests]) }
- let(:issue_bug_and_regression) { create_issue_with_labels([bug, regression]) }
- let(:issue_bug_and_merge_requests) { create_issue_with_labels([bug, merge_requests]) }
- let(:issue_no_labels) { create(:issue, project: project) }
- let(:issues) { [issue_all_labels, issue_bug_and_regression, issue_bug_and_merge_requests, issue_no_labels] }
-
- let(:labels) { [] }
- let(:add_labels) { [] }
- let(:remove_labels) { [] }
-
- let(:bulk_update_params) do
- {
- label_ids: labels.map(&:id),
- add_label_ids: add_labels.map(&:id),
- remove_label_ids: remove_labels.map(&:id)
- }
- end
+ let(:bug) { create(:label, project: project) }
+ let(:regression) { create(:label, project: project) }
+ let(:merge_requests) { create(:label, project: project) }
- before do
- bulk_update(issues, bulk_update_params)
- end
+ let(:issue_all_labels) { create_issue_with_labels([bug, regression, merge_requests]) }
+ let(:issue_bug_and_regression) { create_issue_with_labels([bug, regression]) }
+ let(:issue_bug_and_merge_requests) { create_issue_with_labels([bug, merge_requests]) }
+ let(:issue_no_labels) { create(:issue, project: project) }
+ let(:issues) { [issue_all_labels, issue_bug_and_regression, issue_bug_and_merge_requests, issue_no_labels] }
- context 'when label_ids are passed' do
- let(:issues) { [issue_all_labels, issue_no_labels] }
- let(:labels) { [bug, regression] }
+ let(:labels) { [] }
+ let(:add_labels) { [] }
+ let(:remove_labels) { [] }
- it 'updates the labels of all issues passed to the labels passed' do
- expect(issues.map(&:reload).map(&:label_ids)).to all(match_array(labels.map(&:id)))
+ let(:bulk_update_params) do
+ {
+ label_ids: labels.map(&:id),
+ add_label_ids: add_labels.map(&:id),
+ remove_label_ids: remove_labels.map(&:id)
+ }
end
- it 'does not update issues not passed in' do
- expect(issue_bug_and_regression.label_ids).to contain_exactly(bug.id, regression.id)
+ before do
+ bulk_update(issues, bulk_update_params)
end
- context 'when those label IDs are empty' do
- let(:labels) { [] }
+ context 'when label_ids are passed' do
+ let(:issues) { [issue_all_labels, issue_no_labels] }
+ let(:labels) { [bug, regression] }
- it 'updates the issues passed to have no labels' do
- expect(issues.map(&:reload).map(&:label_ids)).to all(be_empty)
+ it 'updates the labels of all issues passed to the labels passed' do
+ expect(issues.map(&:reload).map(&:label_ids)).to all(match_array(labels.map(&:id)))
end
- end
- end
- context 'when add_label_ids are passed' do
- let(:issues) { [issue_all_labels, issue_bug_and_merge_requests, issue_no_labels] }
- let(:add_labels) { [bug, regression, merge_requests] }
+ it 'does not update issues not passed in' do
+ expect(issue_bug_and_regression.label_ids).to contain_exactly(bug.id, regression.id)
+ end
- it 'adds those label IDs to all issues passed' do
- expect(issues.map(&:reload).map(&:label_ids)).to all(include(*add_labels.map(&:id)))
- end
+ context 'when those label IDs are empty' do
+ let(:labels) { [] }
- it 'does not update issues not passed in' do
- expect(issue_bug_and_regression.label_ids).to contain_exactly(bug.id, regression.id)
+ it 'updates the issues passed to have no labels' do
+ expect(issues.map(&:reload).map(&:label_ids)).to all(be_empty)
+ end
+ end
end
- end
- context 'when remove_label_ids are passed' do
- let(:issues) { [issue_all_labels, issue_bug_and_merge_requests, issue_no_labels] }
- let(:remove_labels) { [bug, regression, merge_requests] }
+ context 'when add_label_ids are passed' do
+ let(:issues) { [issue_all_labels, issue_bug_and_merge_requests, issue_no_labels] }
+ let(:add_labels) { [bug, regression, merge_requests] }
- it 'removes those label IDs from all issues passed' do
- expect(issues.map(&:reload).map(&:label_ids)).to all(be_empty)
- end
+ it 'adds those label IDs to all issues passed' do
+ expect(issues.map(&:reload).map(&:label_ids)).to all(include(*add_labels.map(&:id)))
+ end
- it 'does not update issues not passed in' do
- expect(issue_bug_and_regression.label_ids).to contain_exactly(bug.id, regression.id)
+ it 'does not update issues not passed in' do
+ expect(issue_bug_and_regression.label_ids).to contain_exactly(bug.id, regression.id)
+ end
end
- end
- context 'when add_label_ids and remove_label_ids are passed' do
- let(:issues) { [issue_all_labels, issue_bug_and_merge_requests, issue_no_labels] }
- let(:add_labels) { [bug] }
- let(:remove_labels) { [merge_requests] }
+ context 'when remove_label_ids are passed' do
+ let(:issues) { [issue_all_labels, issue_bug_and_merge_requests, issue_no_labels] }
+ let(:remove_labels) { [bug, regression, merge_requests] }
- it 'adds the label IDs to all issues passed' do
- expect(issues.map(&:reload).map(&:label_ids)).to all(include(bug.id))
- end
+ it 'removes those label IDs from all issues passed' do
+ expect(issues.map(&:reload).map(&:label_ids)).to all(be_empty)
+ end
- it 'removes the label IDs from all issues passed' do
- expect(issues.map(&:reload).map(&:label_ids).flatten).not_to include(merge_requests.id)
+ it 'does not update issues not passed in' do
+ expect(issue_bug_and_regression.label_ids).to contain_exactly(bug.id, regression.id)
+ end
end
- it 'does not update issues not passed in' do
- expect(issue_bug_and_regression.label_ids).to contain_exactly(bug.id, regression.id)
- end
- end
+ context 'when add_label_ids and remove_label_ids are passed' do
+ let(:issues) { [issue_all_labels, issue_bug_and_merge_requests, issue_no_labels] }
+ let(:add_labels) { [bug] }
+ let(:remove_labels) { [merge_requests] }
- context 'when add_label_ids and label_ids are passed' do
- let(:issues) { [issue_all_labels, issue_bug_and_regression, issue_bug_and_merge_requests] }
- let(:labels) { [merge_requests] }
- let(:add_labels) { [regression] }
+ it 'adds the label IDs to all issues passed' do
+ expect(issues.map(&:reload).map(&:label_ids)).to all(include(bug.id))
+ end
- it 'adds the label IDs to all issues passed' do
- expect(issues.map(&:reload).map(&:label_ids)).to all(include(regression.id))
- end
+ it 'removes the label IDs from all issues passed' do
+ expect(issues.map(&:reload).map(&:label_ids).flatten).not_to include(merge_requests.id)
+ end
- it 'ignores the label IDs parameter' do
- expect(issues.map(&:reload).map(&:label_ids)).to all(include(bug.id))
+ it 'does not update issues not passed in' do
+ expect(issue_bug_and_regression.label_ids).to contain_exactly(bug.id, regression.id)
+ end
end
- it 'does not update issues not passed in' do
- expect(issue_no_labels.label_ids).to be_empty
- end
- end
+ context 'when add_label_ids and label_ids are passed' do
+ let(:issues) { [issue_all_labels, issue_bug_and_regression, issue_bug_and_merge_requests] }
+ let(:labels) { [merge_requests] }
+ let(:add_labels) { [regression] }
- context 'when remove_label_ids and label_ids are passed' do
- let(:issues) { [issue_no_labels, issue_bug_and_regression] }
- let(:labels) { [merge_requests] }
- let(:remove_labels) { [regression] }
+ it 'adds the label IDs to all issues passed' do
+ expect(issues.map(&:reload).map(&:label_ids)).to all(include(regression.id))
+ end
- it 'removes the label IDs from all issues passed' do
- expect(issues.map(&:reload).map(&:label_ids).flatten).not_to include(regression.id)
- end
+ it 'ignores the label IDs parameter' do
+ expect(issues.map(&:reload).map(&:label_ids)).to all(include(bug.id))
+ end
- it 'ignores the label IDs parameter' do
- expect(issues.map(&:reload).map(&:label_ids).flatten).not_to include(merge_requests.id)
+ it 'does not update issues not passed in' do
+ expect(issue_no_labels.label_ids).to be_empty
+ end
end
- it 'does not update issues not passed in' do
- expect(issue_all_labels.label_ids).to contain_exactly(bug.id, regression.id, merge_requests.id)
- end
- end
+ context 'when remove_label_ids and label_ids are passed' do
+ let(:issues) { [issue_no_labels, issue_bug_and_regression] }
+ let(:labels) { [merge_requests] }
+ let(:remove_labels) { [regression] }
- context 'when add_label_ids, remove_label_ids, and label_ids are passed' do
- let(:issues) { [issue_bug_and_merge_requests, issue_no_labels] }
- let(:labels) { [regression] }
- let(:add_labels) { [bug] }
- let(:remove_labels) { [merge_requests] }
+ it 'removes the label IDs from all issues passed' do
+ expect(issues.map(&:reload).map(&:label_ids).flatten).not_to include(regression.id)
+ end
- it 'adds the label IDs to all issues passed' do
- expect(issues.map(&:reload).map(&:label_ids)).to all(include(bug.id))
- end
+ it 'ignores the label IDs parameter' do
+ expect(issues.map(&:reload).map(&:label_ids).flatten).not_to include(merge_requests.id)
+ end
- it 'removes the label IDs from all issues passed' do
- expect(issues.map(&:reload).map(&:label_ids).flatten).not_to include(merge_requests.id)
+ it 'does not update issues not passed in' do
+ expect(issue_all_labels.label_ids).to contain_exactly(bug.id, regression.id, merge_requests.id)
+ end
end
- it 'ignores the label IDs parameter' do
- expect(issues.map(&:reload).map(&:label_ids).flatten).not_to include(regression.id)
- end
+ context 'when add_label_ids, remove_label_ids, and label_ids are passed' do
+ let(:issues) { [issue_bug_and_merge_requests, issue_no_labels] }
+ let(:labels) { [regression] }
+ let(:add_labels) { [bug] }
+ let(:remove_labels) { [merge_requests] }
- it 'does not update issues not passed in' do
- expect(issue_bug_and_regression.label_ids).to contain_exactly(bug.id, regression.id)
+ it 'adds the label IDs to all issues passed' do
+ expect(issues.map(&:reload).map(&:label_ids)).to all(include(bug.id))
+ end
+
+ it 'removes the label IDs from all issues passed' do
+ expect(issues.map(&:reload).map(&:label_ids).flatten).not_to include(merge_requests.id)
+ end
+
+ it 'ignores the label IDs parameter' do
+ expect(issues.map(&:reload).map(&:label_ids).flatten).not_to include(regression.id)
+ end
+
+ it 'does not update issues not passed in' do
+ expect(issue_bug_and_regression.label_ids).to contain_exactly(bug.id, regression.id)
+ end
end
end
- end
- describe 'subscribe to issues' do
- let(:issues) { create_list(:issue, 2, project: project) }
+ describe 'subscribe to issues' do
+ let(:issues) { create_list(:issue, 2, project: project) }
- it 'subscribes the given user' do
- bulk_update(issues, subscription_event: 'subscribe')
+ it 'subscribes the given user' do
+ bulk_update(issues, subscription_event: 'subscribe')
- expect(issues).to all(be_subscribed(user, project))
+ expect(issues).to all(be_subscribed(user, project))
+ end
end
- end
- describe 'unsubscribe from issues' do
- let(:issues) do
- create_list(:closed_issue, 2, project: project) do |issue|
- issue.subscriptions.create(user: user, project: project, subscribed: true)
+ describe 'unsubscribe from issues' do
+ let(:issues) do
+ create_list(:closed_issue, 2, project: project) do |issue|
+ issue.subscriptions.create(user: user, project: project, subscribed: true)
+ end
+ end
+
+ it 'unsubscribes the given user' do
+ bulk_update(issues, subscription_event: 'unsubscribe')
+
+ issues.each do |issue|
+ expect(issue).not_to be_subscribed(user, project)
+ end
end
end
+ end
- it 'unsubscribes the given user' do
- bulk_update(issues, subscription_event: 'unsubscribe')
+ context 'with group issues' do
+ let(:group) { create(:group) }
- issues.each do |issue|
- expect(issue).not_to be_subscribed(user, project)
+ context 'updating milestone' do
+ let(:milestone) { create(:milestone, group: group) }
+ let(:project1) { create(:project, :repository, group: group) }
+ let(:project2) { create(:project, :repository, group: group) }
+ let(:issue1) { create(:issue, project: project1) }
+ let(:issue2) { create(:issue, project: project2) }
+ let(:issues) { [issue1, issue2] }
+
+ before do
+ group.add_maintainer(user)
end
+
+ it_behaves_like 'updates milestones'
end
end
end
diff --git a/spec/services/issuable/clone/content_rewriter_spec.rb b/spec/services/issuable/clone/content_rewriter_spec.rb
index 230e1123280..3479c20862a 100644
--- a/spec/services/issuable/clone/content_rewriter_spec.rb
+++ b/spec/services/issuable/clone/content_rewriter_spec.rb
@@ -165,5 +165,18 @@ describe Issuable::Clone::ContentRewriter do
expect(note.note_html).not_to eq(new_note.note_html)
end
end
+
+ context "discussion notes" do
+ let(:note) { create(:note, noteable: original_issue, note: "sample note", project: project1) }
+ let!(:discussion) { create(:discussion_note_on_issue, in_reply_to: note, note: "reply to sample note") }
+
+ it 'rewrites discussion correctly' do
+ subject.execute
+
+ expect(new_issue.notes.count).to eq(original_issue.notes.count)
+ expect(new_issue.notes.where(discussion_id: discussion.discussion_id).count).to eq(0)
+ expect(original_issue.notes.where(discussion_id: discussion.discussion_id).count).to eq(1)
+ end
+ end
end
end
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index 28fa5d12d9c..468e7c286d5 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -480,6 +480,22 @@ describe Issues::UpdateService, :mailer do
update_issue(description: "- [x] Task 1\n- [X] Task 2")
end
+ it 'does not check for spam on task status change' do
+ params = {
+ update_task: {
+ index: 1,
+ checked: false,
+ line_source: '- [x] Task 1',
+ line_number: 1
+ }
+ }
+ service = described_class.new(project, user, params)
+
+ expect(service).not_to receive(:spam_check)
+
+ service.execute(issue)
+ end
+
it 'creates system note about task status change' do
note1 = find_note('marked the task **Task 1** as completed')
note2 = find_note('marked the task **Task 2** as completed')
diff --git a/spec/services/merge_requests/get_urls_service_spec.rb b/spec/services/merge_requests/get_urls_service_spec.rb
index 0933c6d4336..9e7a5260ca4 100644
--- a/spec/services/merge_requests/get_urls_service_spec.rb
+++ b/spec/services/merge_requests/get_urls_service_spec.rb
@@ -8,8 +8,8 @@ describe MergeRequests::GetUrlsService do
let(:project) { create(:project, :public, :repository) }
let(:service) { described_class.new(project) }
let(:source_branch) { "merge-test" }
- let(:new_merge_request_url) { "http://#{Gitlab.config.gitlab.host}/#{project.namespace.name}/#{project.path}/merge_requests/new?merge_request%5Bsource_branch%5D=#{source_branch}" }
- let(:show_merge_request_url) { "http://#{Gitlab.config.gitlab.host}/#{project.namespace.name}/#{project.path}/merge_requests/#{merge_request.iid}" }
+ let(:new_merge_request_url) { "http://#{Gitlab.config.gitlab.host}/#{project.full_path}/merge_requests/new?merge_request%5Bsource_branch%5D=#{source_branch}" }
+ let(:show_merge_request_url) { "http://#{Gitlab.config.gitlab.host}/#{project.full_path}/merge_requests/#{merge_request.iid}" }
let(:new_branch_changes) { "#{Gitlab::Git::BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{source_branch}" }
let(:deleted_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 #{Gitlab::Git::BLANK_SHA} refs/heads/#{source_branch}" }
let(:existing_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{source_branch}" }
@@ -119,7 +119,7 @@ describe MergeRequests::GetUrlsService do
let(:new_branch_changes) { "#{Gitlab::Git::BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/new_branch" }
let(:existing_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/markdown" }
let(:changes) { "#{new_branch_changes}\n#{existing_branch_changes}" }
- let(:new_merge_request_url) { "http://#{Gitlab.config.gitlab.host}/#{project.namespace.name}/#{project.path}/merge_requests/new?merge_request%5Bsource_branch%5D=new_branch" }
+ let(:new_merge_request_url) { "http://#{Gitlab.config.gitlab.host}/#{project.full_path}/merge_requests/new?merge_request%5Bsource_branch%5D=new_branch" }
it 'returns 2 urls for both creating new and showing merge request' do
result = service.execute(changes)
diff --git a/spec/services/merge_requests/merge_service_spec.rb b/spec/services/merge_requests/merge_service_spec.rb
index aa759ac9edc..22578436c18 100644
--- a/spec/services/merge_requests/merge_service_spec.rb
+++ b/spec/services/merge_requests/merge_service_spec.rb
@@ -214,6 +214,19 @@ describe MergeRequests::MergeService do
allow(Rails.logger).to receive(:error)
end
+ context 'when source is missing' do
+ it 'logs and saves error' do
+ allow(merge_request).to receive(:diff_head_sha) { nil }
+
+ error_message = 'No source for merge'
+
+ service.execute(merge_request)
+
+ expect(merge_request.merge_error).to eq(error_message)
+ expect(Rails.logger).to have_received(:error).with(a_string_matching(error_message))
+ end
+ end
+
it 'logs and saves error if there is an exception' do
error_message = 'error message'
diff --git a/spec/services/merge_requests/merge_to_ref_service_spec.rb b/spec/services/merge_requests/merge_to_ref_service_spec.rb
index e2f201677fa..758679edc45 100644
--- a/spec/services/merge_requests/merge_to_ref_service_spec.rb
+++ b/spec/services/merge_requests/merge_to_ref_service_spec.rb
@@ -191,6 +191,28 @@ describe MergeRequests::MergeToRefService do
it { expect(todo).not_to be_done }
end
+ context 'when source is missing' do
+ it 'returns error' do
+ allow(merge_request).to receive(:diff_head_sha) { nil }
+
+ error_message = 'No source for merge'
+
+ result = service.execute(merge_request)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq(error_message)
+ end
+ end
+
+ context 'when target ref is passed as a parameter' do
+ let(:params) { { commit_message: 'merge train', target_ref: target_ref } }
+
+ it_behaves_like 'successfully merges to ref with merge method' do
+ let(:first_parent_ref) { 'refs/heads/master' }
+ let(:target_ref) { 'refs/merge-requests/1/train' }
+ end
+ end
+
describe 'cascading merge refs' do
set(:project) { create(:project, :repository) }
let(:params) { { commit_message: 'Cascading merge', first_parent_ref: first_parent_ref, target_ref: target_ref } }
diff --git a/spec/services/merge_requests/mergeability_check_service_spec.rb b/spec/services/merge_requests/mergeability_check_service_spec.rb
index 6efece64092..6e827f2ea5b 100644
--- a/spec/services/merge_requests/mergeability_check_service_spec.rb
+++ b/spec/services/merge_requests/mergeability_check_service_spec.rb
@@ -11,7 +11,11 @@ describe MergeRequests::MergeabilityCheckService do
end
it 'does not change the merge ref HEAD' do
- expect { subject }.not_to change(merge_request, :merge_ref_head)
+ merge_ref_head = merge_request.merge_ref_head
+
+ subject
+
+ expect(merge_request.reload.merge_ref_head).to eq merge_ref_head
end
it 'returns ServiceResponse.error' do
@@ -73,7 +77,7 @@ describe MergeRequests::MergeabilityCheckService do
it 'second call does not change the merge-ref' do
expect { subject }.to change(merge_request, :merge_ref_head).from(nil)
- expect { subject }.not_to change(merge_request, :merge_ref_head)
+ expect { subject }.not_to change(merge_request.merge_ref_head, :id)
end
end
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index f566d235787..7e5837a4798 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -598,7 +598,7 @@ describe MergeRequests::UpdateService, :mailer do
feature_visibility_attr = :"#{merge_request.model_name.plural}_access_level"
project.project_feature.update_attribute(feature_visibility_attr, ProjectFeature::PRIVATE)
- expect { update_merge_request(assignee_ids: [assignee]) }.not_to change { merge_request.reload.assignees }
+ expect { update_merge_request(assignee_ids: [assignee]) }.not_to change(merge_request.assignees, :count)
end
end
end
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index f25e2fe5e2b..3e3de051732 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -215,13 +215,14 @@ describe NotificationService, :mailer do
let(:project) { create(:project, :private) }
let(:issue) { create(:issue, project: project, assignees: [assignee]) }
let(:mentioned_issue) { create(:issue, assignees: issue.assignees) }
- let(:note) { create(:note_on_issue, noteable: issue, project_id: issue.project_id, note: '@mention referenced, @unsubscribed_mentioned and @outsider also') }
+ let(:author) { create(:user) }
+ let(:note) { create(:note_on_issue, author: author, noteable: issue, project_id: issue.project_id, note: '@mention referenced, @unsubscribed_mentioned and @outsider also') }
before do
- build_team(note.project)
+ build_team(project)
project.add_maintainer(issue.author)
project.add_maintainer(assignee)
- project.add_maintainer(note.author)
+ project.add_maintainer(author)
@u_custom_off = create_user_with_notification(:custom, 'custom_off')
project.add_guest(@u_custom_off)
@@ -240,7 +241,8 @@ describe NotificationService, :mailer do
describe '#new_note' do
it do
- add_users_with_subscription(note.project, issue)
+ add_users(project)
+ add_user_subscriptions(issue)
reset_delivered_emails!
expect(SentNotification).to receive(:record).with(issue, any_args).exactly(10).times
@@ -268,7 +270,8 @@ describe NotificationService, :mailer do
end
it "emails the note author if they've opted into notifications about their activity" do
- add_users_with_subscription(note.project, issue)
+ add_users(project)
+ add_user_subscriptions(issue)
reset_delivered_emails!
note.author.notified_of_own_activity = true
@@ -415,13 +418,15 @@ describe NotificationService, :mailer do
let(:project) { create(:project, :public) }
let(:issue) { create(:issue, project: project, assignees: [assignee]) }
let(:mentioned_issue) { create(:issue, assignees: issue.assignees) }
- let(:note) { create(:note_on_issue, noteable: issue, project_id: issue.project_id, note: '@all mentioned') }
+ let(:author) { create(:user) }
+ let(:note) { create(:note_on_issue, author: author, noteable: issue, project_id: issue.project_id, note: '@all mentioned') }
before do
- build_team(note.project)
- build_group(note.project)
- note.project.add_maintainer(note.author)
- add_users_with_subscription(note.project, issue)
+ build_team(project)
+ build_group(project)
+ add_users(project)
+ add_user_subscriptions(issue)
+ project.add_maintainer(author)
reset_delivered_emails!
end
@@ -473,17 +478,18 @@ describe NotificationService, :mailer do
context 'project snippet note' do
let!(:project) { create(:project, :public) }
let(:snippet) { create(:project_snippet, project: project, author: create(:user)) }
- let(:note) { create(:note_on_project_snippet, noteable: snippet, project_id: project.id, note: '@all mentioned') }
+ let(:author) { create(:user) }
+ let(:note) { create(:note_on_project_snippet, author: author, noteable: snippet, project_id: project.id, note: '@all mentioned') }
before do
build_team(project)
build_group(project)
+ project.add_maintainer(author)
# make sure these users can read the project snippet!
project.add_guest(@u_guest_watcher)
project.add_guest(@u_guest_custom)
add_member_for_parent_group(@pg_watcher, project)
- note.project.add_maintainer(note.author)
reset_delivered_emails!
end
@@ -708,10 +714,11 @@ describe NotificationService, :mailer do
let(:issue) { create :issue, project: project, assignees: [assignee], description: 'cc @participant @unsubscribed_mentioned' }
before do
- build_team(issue.project)
- build_group(issue.project)
+ build_team(project)
+ build_group(project)
- add_users_with_subscription(issue.project, issue)
+ add_users(project)
+ add_user_subscriptions(issue)
reset_delivered_emails!
update_custom_notification(:new_issue, @u_guest_custom, resource: project)
update_custom_notification(:new_issue, @u_custom_global)
@@ -1281,13 +1288,16 @@ describe NotificationService, :mailer do
let(:project) { create(:project, :public, :repository, namespace: group) }
let(:another_project) { create(:project, :public, namespace: group) }
let(:assignee) { create(:user) }
- let(:merge_request) { create :merge_request, source_project: project, assignees: [assignee], description: 'cc @participant' }
+ let(:assignees) { Array.wrap(assignee) }
+ let(:author) { create(:user) }
+ let(:merge_request) { create :merge_request, author: author, source_project: project, assignees: assignees, description: 'cc @participant' }
before do
- project.add_maintainer(merge_request.author)
- merge_request.assignees.each { |assignee| project.add_maintainer(assignee) }
- build_team(merge_request.target_project)
- add_users_with_subscription(merge_request.target_project, merge_request)
+ project.add_maintainer(author)
+ assignees.each { |assignee| project.add_maintainer(assignee) }
+ build_team(project)
+ add_users(project)
+ add_user_subscriptions(merge_request)
update_custom_notification(:new_merge_request, @u_guest_custom, resource: project)
update_custom_notification(:new_merge_request, @u_custom_global)
reset_delivered_emails!
@@ -1834,7 +1844,7 @@ describe NotificationService, :mailer do
describe 'ProjectMember' do
let(:project) { create(:project) }
- set(:added_user) { create(:user) }
+ let(:added_user) { create(:user) }
describe '#new_access_request' do
context 'for a project in a user namespace' do
@@ -2417,7 +2427,7 @@ describe NotificationService, :mailer do
should_not_email(user, recipients: email_recipients)
end
- def add_users_with_subscription(project, issuable)
+ def add_users(project)
@subscriber = create :user
@unsubscriber = create :user
@unsubscribed_mentioned = create :user, username: 'unsubscribed_mentioned'
@@ -2429,7 +2439,9 @@ describe NotificationService, :mailer do
project.add_maintainer(@unsubscriber)
project.add_maintainer(@watcher_and_subscriber)
project.add_maintainer(@unsubscribed_mentioned)
+ end
+ def add_user_subscriptions(issuable)
issuable.subscriptions.create(user: @unsubscribed_mentioned, project: project, subscribed: false)
issuable.subscriptions.create(user: @subscriber, project: project, subscribed: true)
issuable.subscriptions.create(user: @subscribed_participant, project: project, subscribed: true)
diff --git a/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb b/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
index 8d43ce4f662..af79a42b611 100644
--- a/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
+++ b/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
@@ -12,6 +12,12 @@ describe PagesDomains::ObtainLetsEncryptCertificateService do
stub_lets_encrypt_settings
end
+ around do |example|
+ Sidekiq::Testing.fake! do
+ example.run
+ end
+ end
+
def expect_to_create_acme_challenge
expect(::PagesDomains::CreateAcmeOrderService).to receive(:new).with(pages_domain)
.and_wrap_original do |m, *args|
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index 62fdc039b5e..95e0d8858b9 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -47,7 +47,7 @@ Dir[Rails.root.join("spec/support/**/*.rb")].each { |f| require f }
quality_level = Quality::TestLevel.new
RSpec.configure do |config|
- config.use_transactional_fixtures = false
+ config.use_transactional_fixtures = true
config.use_instantiated_fixtures = false
config.fixture_path = Rails.root
@@ -103,6 +103,7 @@ RSpec.configure do |config|
config.include RedisHelpers
config.include Rails.application.routes.url_helpers, type: :routing
config.include PolicyHelpers, type: :policy
+ config.include MemoryUsageHelper
if ENV['CI']
# This includes the first try, i.e. tests will be run 4 times before failing.
@@ -289,6 +290,16 @@ RSpec.configure do |config|
config.before(:each, :https_pages_disabled) do |_|
allow(Gitlab.config.pages).to receive(:external_https).and_return(false)
end
+
+ # We can't use an `around` hook here because the wrapping transaction
+ # is not yet opened at the time that is triggered
+ config.prepend_before do
+ Gitlab::Database.set_open_transactions_baseline
+ end
+
+ config.append_after do
+ Gitlab::Database.reset_open_transactions_baseline
+ end
end
# add simpler way to match asset paths containing digest strings
diff --git a/spec/support/database_cleaner.rb b/spec/support/database_cleaner.rb
index edd7de94203..f0dd6c52b74 100644
--- a/spec/support/database_cleaner.rb
+++ b/spec/support/database_cleaner.rb
@@ -26,31 +26,22 @@ RSpec.configure do |config|
end
config.append_after(:context) do
- DatabaseCleaner.clean_with(:deletion, cache_tables: false)
+ delete_from_all_tables!
end
- config.before do
- setup_database_cleaner
- DatabaseCleaner.strategy = :transaction
- end
+ config.around(:each, :delete) do |example|
+ self.class.use_transactional_tests = false
- config.before(:each, :js) do
- DatabaseCleaner.strategy = :deletion, { except: deletion_except_tables, cache_tables: false }
- end
+ example.run
- config.before(:each, :delete) do
- DatabaseCleaner.strategy = :deletion, { except: deletion_except_tables, cache_tables: false }
+ delete_from_all_tables!(except: deletion_except_tables)
end
- config.before(:each, :migration) do
- DatabaseCleaner.strategy = :deletion, { cache_tables: false }
- end
+ config.around(:each, :migration) do |example|
+ self.class.use_transactional_tests = false
- config.before do
- DatabaseCleaner.start
- end
+ example.run
- config.append_after do
- DatabaseCleaner.clean
+ delete_from_all_tables!
end
end
diff --git a/spec/support/db_cleaner.rb b/spec/support/db_cleaner.rb
index c69fa322073..08622dff6d9 100644
--- a/spec/support/db_cleaner.rb
+++ b/spec/support/db_cleaner.rb
@@ -1,4 +1,8 @@
module DbCleaner
+ def delete_from_all_tables!(except: nil)
+ DatabaseCleaner.clean_with(:deletion, cache_tables: false, except: except)
+ end
+
def deletion_except_tables
[]
end
diff --git a/spec/support/features/rss_shared_examples.rb b/spec/support/features/rss_shared_examples.rb
index 0de92aedba5..02d310a9afa 100644
--- a/spec/support/features/rss_shared_examples.rb
+++ b/spec/support/features/rss_shared_examples.rb
@@ -6,7 +6,7 @@ end
shared_examples "it has an RSS button with current_user's feed token" do
it "shows the RSS button with current_user's feed token" do
- expect(page).to have_css("a:has(.fa-rss)[href*='feed_token=#{user.feed_token}']")
+ expect(page).to have_css("a:has(.fa-rss)[href*='feed_token=#{user.feed_token}'], .js-rss-button[href*='feed_token=#{user.feed_token}']")
end
end
@@ -18,6 +18,6 @@ end
shared_examples "it has an RSS button without a feed token" do
it "shows the RSS button without a feed token" do
- expect(page).to have_css("a:has(.fa-rss):not([href*='feed_token'])")
+ expect(page).to have_css("a:has(.fa-rss):not([href*='feed_token']), .js-rss-button:not([href*='feed_token'])")
end
end
diff --git a/spec/support/helpers/git_http_helpers.rb b/spec/support/helpers/git_http_helpers.rb
index cd49bb148f2..c83860d7b51 100644
--- a/spec/support/helpers/git_http_helpers.rb
+++ b/spec/support/helpers/git_http_helpers.rb
@@ -1,4 +1,8 @@
+require_relative 'workhorse_helpers'
+
module GitHttpHelpers
+ include WorkhorseHelpers
+
def clone_get(project, options = {})
get "/#{project}/info/refs", params: { service: 'git-upload-pack' }, headers: auth_env(*options.values_at(:user, :password, :spnego_request_token))
end
diff --git a/spec/support/helpers/graphql_helpers.rb b/spec/support/helpers/graphql_helpers.rb
index 1a09d48f4cd..ec3c460cd37 100644
--- a/spec/support/helpers/graphql_helpers.rb
+++ b/spec/support/helpers/graphql_helpers.rb
@@ -57,7 +57,8 @@ module GraphqlHelpers
end
def variables_for_mutation(name, input)
- graphql_input = input.map { |name, value| [GraphqlHelpers.fieldnamerize(name), value] }.to_h
+ graphql_input = prepare_input_for_mutation(input)
+
result = { input_variable_name_for_mutation(name) => graphql_input }
# Avoid trying to serialize multipart data into JSON
@@ -68,6 +69,18 @@ module GraphqlHelpers
end
end
+ # Recursively convert a Hash with Ruby-style keys to GraphQL fieldname-style keys
+ #
+ # prepare_input_for_mutation({ 'my_key' => 1 })
+ # => { 'myKey' => 1}
+ def prepare_input_for_mutation(input)
+ input.map do |name, value|
+ value = prepare_input_for_mutation(value) if value.is_a?(Hash)
+
+ [GraphqlHelpers.fieldnamerize(name), value]
+ end.to_h
+ end
+
def input_variable_name_for_mutation(mutation_name)
mutation_name = GraphqlHelpers.fieldnamerize(mutation_name)
mutation_field = GitlabSchema.mutation.fields[mutation_name]
diff --git a/spec/support/helpers/kubernetes_helpers.rb b/spec/support/helpers/kubernetes_helpers.rb
index 3c7bcba2b42..278264f3df5 100644
--- a/spec/support/helpers/kubernetes_helpers.rb
+++ b/spec/support/helpers/kubernetes_helpers.rb
@@ -199,6 +199,11 @@ module KubernetesHelpers
.to_return(kube_response({}))
end
+ def stub_kubeclient_put_role(api_url, name, namespace: 'default')
+ WebMock.stub_request(:put, api_url + "/apis/rbac.authorization.k8s.io/v1/namespaces/#{namespace}/roles/#{name}")
+ .to_return(kube_response({}))
+ end
+
def kube_v1_secret_body(**options)
{
"kind" => "SecretList",
diff --git a/spec/support/helpers/memory_usage_helper.rb b/spec/support/helpers/memory_usage_helper.rb
new file mode 100644
index 00000000000..984ea8cc571
--- /dev/null
+++ b/spec/support/helpers/memory_usage_helper.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+module MemoryUsageHelper
+ extend ActiveSupport::Concern
+
+ def gather_memory_data(csv_path)
+ write_csv_entry(csv_path,
+ {
+ example_group_path: TestEnv.topmost_example_group[:location],
+ example_group_description: TestEnv.topmost_example_group[:description],
+ time: Time.current,
+ job_name: ENV['CI_JOB_NAME']
+ }.merge(get_memory_usage))
+ end
+
+ def write_csv_entry(path, entry)
+ CSV.open(path, "a", headers: entry.keys, write_headers: !File.exist?(path)) do |file|
+ file << entry.values
+ end
+ end
+
+ def get_memory_usage
+ output, status = Gitlab::Popen.popen(%w(free -m))
+ abort "`free -m` return code is #{status}: #{output}" unless status.zero?
+
+ result = output.split("\n")[1].split(" ")[1..-1]
+ attrs = %i(m_total m_used m_free m_shared m_buffers_cache m_available).freeze
+
+ attrs.zip(result).to_h
+ end
+
+ included do |config|
+ config.after(:all) do
+ gather_memory_data(ENV['MEMORY_TEST_PATH']) if ENV['MEMORY_TEST_PATH']
+ end
+ end
+end
diff --git a/spec/support/helpers/migrations_helpers.rb b/spec/support/helpers/migrations_helpers.rb
index cc1a28cb264..272b24f7541 100644
--- a/spec/support/helpers/migrations_helpers.rb
+++ b/spec/support/helpers/migrations_helpers.rb
@@ -18,8 +18,12 @@ module MigrationsHelpers
ActiveRecord::Migrator.migrations_paths
end
+ def migration_context
+ ActiveRecord::MigrationContext.new(migrations_paths)
+ end
+
def migrations
- ActiveRecord::Migrator.migrations(migrations_paths)
+ migration_context.migrations
end
def clear_schema_cache!
@@ -96,8 +100,7 @@ module MigrationsHelpers
def schema_migrate_down!
disable_migrations_output do
- ActiveRecord::Migrator.migrate(migrations_paths,
- migration_schema_version)
+ migration_context.down(migration_schema_version)
end
reset_column_in_all_models
@@ -107,7 +110,7 @@ module MigrationsHelpers
reset_column_in_all_models
disable_migrations_output do
- ActiveRecord::Migrator.migrate(migrations_paths)
+ migration_context.up
end
reset_column_in_all_models
@@ -123,7 +126,7 @@ module MigrationsHelpers
end
def migrate!
- ActiveRecord::Migrator.up(migrations_paths) do |migration|
+ migration_context.up do |migration|
migration.name == described_class.name
end
end
diff --git a/spec/support/helpers/reactive_caching_helpers.rb b/spec/support/helpers/reactive_caching_helpers.rb
index b76b53db0b9..528da37e8cf 100644
--- a/spec/support/helpers/reactive_caching_helpers.rb
+++ b/spec/support/helpers/reactive_caching_helpers.rb
@@ -10,7 +10,7 @@ module ReactiveCachingHelpers
def stub_reactive_cache(subject = nil, data = nil, *qualifiers)
allow(ReactiveCachingWorker).to receive(:perform_async)
allow(ReactiveCachingWorker).to receive(:perform_in)
- write_reactive_cache(subject, data, *qualifiers) unless data.nil?
+ write_reactive_cache(subject, data, *qualifiers) unless subject.nil?
end
def synchronous_reactive_cache(subject)
diff --git a/spec/support/helpers/test_env.rb b/spec/support/helpers/test_env.rb
index e63099d89b7..893b10ea752 100644
--- a/spec/support/helpers/test_env.rb
+++ b/spec/support/helpers/test_env.rb
@@ -2,6 +2,7 @@ require 'rspec/mocks'
require 'toml-rb'
module TestEnv
+ extend ActiveSupport::Concern
extend self
ComponentFailedToInstallError = Class.new(StandardError)
@@ -108,6 +109,12 @@ module TestEnv
setup_forked_repo
end
+ included do |config|
+ config.append_before do
+ set_current_example_group
+ end
+ end
+
def disable_mailer
allow_any_instance_of(NotificationService).to receive(:mailer)
.and_return(double.as_null_object)
@@ -297,8 +304,23 @@ module TestEnv
FileUtils.rm_rf(path)
end
+ def current_example_group
+ Thread.current[:current_example_group]
+ end
+
+ # looking for a top-level `describe`
+ def topmost_example_group
+ example_group = current_example_group
+ example_group = example_group[:parent_example_group] until example_group[:parent_example_group].nil?
+ example_group
+ end
+
private
+ def set_current_example_group
+ Thread.current[:current_example_group] = ::RSpec.current_example.metadata[:example_group]
+ end
+
# These are directories that should be preserved at cleanup time
def test_dirs
@test_dirs ||= %w[
diff --git a/spec/support/matchers/abort_matcher.rb b/spec/support/matchers/abort_matcher.rb
new file mode 100644
index 00000000000..ce1dd140210
--- /dev/null
+++ b/spec/support/matchers/abort_matcher.rb
@@ -0,0 +1,46 @@
+RSpec::Matchers.define :abort_execution do
+ match do |code_block|
+ @captured_stderr = StringIO.new
+ original_stderr = $stderr
+ $stderr = @captured_stderr
+
+ code_block.call
+
+ false
+ rescue SystemExit => e
+ captured = @captured_stderr.string.chomp
+ @actual_exit_code = e.status
+ break false unless e.status == 1
+
+ if @message
+ if @message.is_a? String
+ @message == captured
+ elsif @message.is_a? Regexp
+ @message.match?(captured)
+ else
+ raise ArgumentError, 'with_message must be either a String or a Regular Expression'
+ end
+ end
+
+ ensure
+ $stderr = original_stderr
+ end
+
+ chain :with_message do |message|
+ @message = message
+ end
+
+ failure_message do |block|
+ unless @actual_exit_code
+ break "expected #{block} to abort with '#{@message}' but didnt call abort."
+ end
+
+ if @actual_exit_code != 1
+ break "expected #{block} to abort with: '#{@message}' but exited with success instead."
+ end
+
+ "expected #{block} to abort with: '#{@message}' \n but received: '#{@captured_stderr.string.chomp}' instead."
+ end
+
+ supports_block_expectations
+end
diff --git a/spec/support/shared_contexts/email_shared_context.rb b/spec/support/shared_contexts/email_shared_context.rb
index 9d806fc524d..4f5d53f9317 100644
--- a/spec/support/shared_contexts/email_shared_context.rb
+++ b/spec/support/shared_contexts/email_shared_context.rb
@@ -1,5 +1,3 @@
-require 'gitlab/email/receiver'
-
shared_context :email_shared_context do
let(:mail_key) { "59d8df8370b7e95c5a49fbf86aeb2c93" }
let(:receiver) { Gitlab::Email::Receiver.new(email_raw) }
diff --git a/spec/support/shared_examples/graphql/notes_creation_shared_examples.rb b/spec/support/shared_examples/graphql/notes_creation_shared_examples.rb
new file mode 100644
index 00000000000..f2e1a95345b
--- /dev/null
+++ b/spec/support/shared_examples/graphql/notes_creation_shared_examples.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'a Note mutation that does not create a Note' do
+ it do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ end.not_to change { Note.count }
+ end
+end
+
+RSpec.shared_examples 'a Note mutation that creates a Note' do
+ it do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ end.to change { Note.count }.by(1)
+ end
+end
+
+RSpec.shared_examples 'a Note mutation when the user does not have permission' do
+ it_behaves_like 'a Note mutation that does not create a Note'
+
+ it_behaves_like 'a mutation that returns top-level errors',
+ errors: ['The resource that you are attempting to access does not exist or you don\'t have permission to perform this action']
+end
+
+RSpec.shared_examples 'a Note mutation when there are active record validation errors' do |model: Note|
+ before do
+ expect_next_instance_of(model) do |note|
+ expect(note).to receive(:valid?).at_least(:once).and_return(false)
+ expect(note).to receive_message_chain(
+ :errors,
+ :full_messages
+ ).and_return(['Error 1', 'Error 2'])
+ end
+ end
+
+ it_behaves_like 'a Note mutation that does not create a Note'
+
+ it_behaves_like 'a mutation that returns errors in the response', errors: ['Error 1', 'Error 2']
+
+ it 'returns an empty Note' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(mutation_response).to have_key('note')
+ expect(mutation_response['note']).to be_nil
+ end
+end
+
+RSpec.shared_examples 'a Note mutation when the given resource id is not for a Noteable' do
+ let(:noteable) { create(:label, project: project) }
+
+ it_behaves_like 'a Note mutation that does not create a Note'
+
+ it_behaves_like 'a mutation that returns top-level errors', errors: ['Cannot add notes to this resource']
+end
+
+RSpec.shared_examples 'a Note mutation when the given resource id is not for a Note' do
+ let(:note) { create(:issue) }
+
+ it_behaves_like 'a mutation that returns top-level errors', errors: ['Resource is not a note']
+end
diff --git a/spec/support/shared_examples/mentionable_shared_examples.rb b/spec/support/shared_examples/mentionable_shared_examples.rb
index 1226841f24c..fea52c2eeb2 100644
--- a/spec/support/shared_examples/mentionable_shared_examples.rb
+++ b/spec/support/shared_examples/mentionable_shared_examples.rb
@@ -76,6 +76,30 @@ shared_examples 'a mentionable' do
expect(refs).to include(ext_commit)
end
+ context 'when there are cached markdown fields' do
+ before do
+ if subject.is_a?(CacheMarkdownField)
+ subject.refresh_markdown_cache
+ end
+ end
+
+ it 'sends in cached markdown fields when appropriate' do
+ if subject.is_a?(CacheMarkdownField)
+ expect_next_instance_of(Gitlab::ReferenceExtractor) do |ext|
+ attrs = subject.class.mentionable_attrs.collect(&:first) & subject.cached_markdown_fields.markdown_fields
+ attrs.each do |field|
+ expect(ext).to receive(:analyze).with(subject.send(field), hash_including(rendered: anything))
+ end
+ end
+
+ expect(subject).not_to receive(:refresh_markdown_cache)
+ expect(subject).to receive(:cached_markdown_fields).at_least(:once).and_call_original
+
+ subject.all_references(author)
+ end
+ end
+ end
+
it 'creates cross-reference notes' do
mentioned_objects = [mentioned_issue, mentioned_mr, mentioned_commit,
ext_issue, ext_mr, ext_commit]
@@ -98,6 +122,33 @@ shared_examples 'an editable mentionable' do
[create(:issue, project: project), create(:issue, project: ext_proj)]
end
+ context 'when there are cached markdown fields' do
+ before do
+ if subject.is_a?(CacheMarkdownField)
+ subject.refresh_markdown_cache
+ end
+ end
+
+ it 'refreshes markdown cache if necessary' do
+ subject.save!
+
+ set_mentionable_text.call('This is a text')
+
+ if subject.is_a?(CacheMarkdownField)
+ expect_next_instance_of(Gitlab::ReferenceExtractor) do |ext|
+ subject.cached_markdown_fields.markdown_fields.each do |field|
+ expect(ext).to receive(:analyze).with(subject.send(field), hash_including(rendered: anything))
+ end
+ end
+
+ expect(subject).to receive(:refresh_markdown_cache)
+ expect(subject).to receive(:cached_markdown_fields).at_least(:once).and_call_original
+
+ subject.all_references(author)
+ end
+ end
+ end
+
it 'creates new cross-reference notes when the mentionable text is edited' do
subject.save
subject.create_cross_references!
diff --git a/spec/support/shared_examples/policies/clusterable_shared_examples.rb b/spec/support/shared_examples/policies/clusterable_shared_examples.rb
index d99f94c76c3..4f9873d53e4 100644
--- a/spec/support/shared_examples/policies/clusterable_shared_examples.rb
+++ b/spec/support/shared_examples/policies/clusterable_shared_examples.rb
@@ -24,14 +24,6 @@ shared_examples 'clusterable policies' do
context 'with no clusters' do
it { expect_allowed(:add_cluster) }
end
-
- context 'with an existing cluster' do
- before do
- cluster
- end
-
- it { expect_disallowed(:add_cluster) }
- end
end
end
end
diff --git a/spec/tasks/gitlab/storage_rake_spec.rb b/spec/tasks/gitlab/storage_rake_spec.rb
index 4b04d9cec39..0e47408fc72 100644
--- a/spec/tasks/gitlab/storage_rake_spec.rb
+++ b/spec/tasks/gitlab/storage_rake_spec.rb
@@ -50,7 +50,7 @@ describe 'rake gitlab:storage:*', :sidekiq do
expect(Project).not_to receive(:with_unmigrated_storage)
- expect { run_rake_task(task) }.to output(/This task requires database write access. Exiting./).to_stderr
+ expect { run_rake_task(task) }.to abort_execution.with_message(/This task requires database write access. Exiting./)
end
end
end
@@ -96,7 +96,7 @@ describe 'rake gitlab:storage:*', :sidekiq do
expect(Project).not_to receive(:with_unmigrated_storage)
- expect { run_rake_task(task) }.to output(/There is already a rollback operation in progress/).to_stderr
+ expect { run_rake_task(task) }.to abort_execution.with_message(/There is already a rollback operation in progress/)
end
end
end
@@ -105,14 +105,23 @@ describe 'rake gitlab:storage:*', :sidekiq do
it 'does nothing' do
expect(::HashedStorage::MigratorWorker).not_to receive(:perform_async)
- run_rake_task(task)
+ expect { run_rake_task(task) }.to abort_execution.with_message('There are no projects requiring storage migration. Nothing to do!')
end
end
context 'with 3 legacy projects' do
let(:projects) { create_list(:project, 3, :legacy_storage) }
- it_behaves_like "handles custom BATCH env var", ::HashedStorage::MigratorWorker
+ it 'enqueues migrations and count projects correctly' do
+ projects.map(&:id).sort.tap do |ids|
+ stub_env('ID_FROM', ids[0])
+ stub_env('ID_TO', ids[1])
+ end
+
+ expect { run_rake_task(task) }.to output(/Enqueuing migration of 2 projects in batches/).to_stdout
+ end
+
+ it_behaves_like 'handles custom BATCH env var', ::HashedStorage::MigratorWorker
end
context 'with same id in range' do
@@ -120,7 +129,7 @@ describe 'rake gitlab:storage:*', :sidekiq do
stub_env('ID_FROM', 99999)
stub_env('ID_TO', 99999)
- expect { run_rake_task(task) }.to output(/There are no projects requiring storage migration with ID=99999/).to_stderr
+ expect { run_rake_task(task) }.to abort_execution.with_message(/There are no projects requiring storage migration with ID=99999/)
end
it 'displays a message when project exists but its already migrated' do
@@ -128,7 +137,7 @@ describe 'rake gitlab:storage:*', :sidekiq do
stub_env('ID_FROM', project.id)
stub_env('ID_TO', project.id)
- expect { run_rake_task(task) }.to output(/There are no projects requiring storage migration with ID=#{project.id}/).to_stderr
+ expect { run_rake_task(task) }.to abort_execution.with_message(/There are no projects requiring storage migration with ID=#{project.id}/)
end
it 'enqueues migration when project can be found' do
@@ -153,7 +162,7 @@ describe 'rake gitlab:storage:*', :sidekiq do
expect(Project).not_to receive(:with_unmigrated_storage)
- expect { run_rake_task(task) }.to output(/There is already a migration operation in progress/).to_stderr
+ expect { run_rake_task(task) }.to abort_execution.with_message(/There is already a migration operation in progress/)
end
end
end
@@ -162,13 +171,22 @@ describe 'rake gitlab:storage:*', :sidekiq do
it 'does nothing' do
expect(::HashedStorage::RollbackerWorker).not_to receive(:perform_async)
- run_rake_task(task)
+ expect { run_rake_task(task) }.to abort_execution.with_message('There are no projects that can have storage rolledback. Nothing to do!')
end
end
context 'with 3 hashed projects' do
let(:projects) { create_list(:project, 3) }
+ it 'enqueues migrations and count projects correctly' do
+ projects.map(&:id).sort.tap do |ids|
+ stub_env('ID_FROM', ids[0])
+ stub_env('ID_TO', ids[1])
+ end
+
+ expect { run_rake_task(task) }.to output(/Enqueuing rollback of 2 projects in batches/).to_stdout
+ end
+
it_behaves_like "handles custom BATCH env var", ::HashedStorage::RollbackerWorker
end
end
diff --git a/spec/views/notify/pipeline_failed_email.html.haml_spec.rb b/spec/views/notify/pipeline_failed_email.html.haml_spec.rb
index 623237b111a..bf633a118ca 100644
--- a/spec/views/notify/pipeline_failed_email.html.haml_spec.rb
+++ b/spec/views/notify/pipeline_failed_email.html.haml_spec.rb
@@ -28,7 +28,7 @@ describe 'notify/pipeline_failed_email.html.haml' do
expect(rendered).to have_content "Your pipeline has failed"
expect(rendered).to have_content pipeline.project.name
- expect(rendered).to have_content pipeline.git_commit_message.truncate(50).gsub!(/\s+/, ' ')
+ expect(rendered).to have_content pipeline.git_commit_message.truncate(50).gsub(/\s+/, ' ')
expect(rendered).to have_content pipeline.commit.author_name
expect(rendered).to have_content "##{pipeline.id}"
expect(rendered).to have_content pipeline.user.name
@@ -45,7 +45,7 @@ describe 'notify/pipeline_failed_email.html.haml' do
expect(rendered).to have_content "Your pipeline has failed"
expect(rendered).to have_content pipeline.project.name
- expect(rendered).to have_content pipeline.git_commit_message.truncate(50).gsub!(/\s+/, ' ')
+ expect(rendered).to have_content pipeline.git_commit_message.truncate(50).gsub(/\s+/, ' ')
expect(rendered).to have_content pipeline.commit.author_name
expect(rendered).to have_content "##{pipeline.id}"
expect(rendered).to have_content "by API"
diff --git a/spec/views/notify/pipeline_failed_email.text.erb_spec.rb b/spec/views/notify/pipeline_failed_email.text.erb_spec.rb
index 81245239eba..060274eb56a 100644
--- a/spec/views/notify/pipeline_failed_email.text.erb_spec.rb
+++ b/spec/views/notify/pipeline_failed_email.text.erb_spec.rb
@@ -30,7 +30,7 @@ describe 'notify/pipeline_failed_email.text.erb' do
expect(rendered).to have_content('Your pipeline has failed')
expect(rendered).to have_content(pipeline.project.name)
- expect(rendered).to have_content(pipeline.git_commit_message.truncate(50).gsub!(/\s+/, ' '))
+ expect(rendered).to have_content(pipeline.git_commit_message.truncate(50).gsub(/\s+/, ' '))
expect(rendered).to have_content(pipeline.commit.author_name)
expect(rendered).to have_content("##{pipeline.id}")
expect(rendered).to have_content(pipeline.user.name)
diff --git a/spec/views/notify/pipeline_success_email.html.haml_spec.rb b/spec/views/notify/pipeline_success_email.html.haml_spec.rb
index a876bf13e11..46a6c908863 100644
--- a/spec/views/notify/pipeline_success_email.html.haml_spec.rb
+++ b/spec/views/notify/pipeline_success_email.html.haml_spec.rb
@@ -28,7 +28,7 @@ describe 'notify/pipeline_success_email.html.haml' do
expect(rendered).to have_content "Your pipeline has passed"
expect(rendered).to have_content pipeline.project.name
- expect(rendered).to have_content pipeline.git_commit_message.truncate(50).gsub!(/\s+/, ' ')
+ expect(rendered).to have_content pipeline.git_commit_message.truncate(50).gsub(/\s+/, ' ')
expect(rendered).to have_content pipeline.commit.author_name
expect(rendered).to have_content "##{pipeline.id}"
expect(rendered).to have_content pipeline.user.name
@@ -45,7 +45,7 @@ describe 'notify/pipeline_success_email.html.haml' do
expect(rendered).to have_content "Your pipeline has passed"
expect(rendered).to have_content pipeline.project.name
- expect(rendered).to have_content pipeline.git_commit_message.truncate(50).gsub!(/\s+/, ' ')
+ expect(rendered).to have_content pipeline.git_commit_message.truncate(50).gsub(/\s+/, ' ')
expect(rendered).to have_content pipeline.commit.author_name
expect(rendered).to have_content "##{pipeline.id}"
expect(rendered).to have_content "by API"
diff --git a/spec/workers/project_cache_worker_spec.rb b/spec/workers/project_cache_worker_spec.rb
index 51afb076da1..edc55920b8e 100644
--- a/spec/workers/project_cache_worker_spec.rb
+++ b/spec/workers/project_cache_worker_spec.rb
@@ -36,6 +36,11 @@ describe ProjectCacheWorker do
end
context 'with an existing project' do
+ before do
+ lease_key = "namespace:namespaces_root_statistics:#{project.namespace_id}"
+ stub_exclusive_lease_taken(lease_key, timeout: Namespace::AggregationSchedule::DEFAULT_LEASE_TIMEOUT)
+ end
+
it 'refreshes the method caches' do
expect_any_instance_of(Repository).to receive(:refresh_method_caches)
.with(%i(readme))
@@ -81,6 +86,10 @@ describe ProjectCacheWorker do
expect(UpdateProjectStatisticsWorker).not_to receive(:perform_in)
+ expect(Namespaces::ScheduleAggregationWorker)
+ .not_to receive(:perform_async)
+ .with(project.namespace_id)
+
worker.update_statistics(project, statistics)
end
end
@@ -98,6 +107,11 @@ describe ProjectCacheWorker do
.with(lease_timeout, project.id, statistics)
.and_call_original
+ expect(Namespaces::ScheduleAggregationWorker)
+ .to receive(:perform_async)
+ .with(project.namespace_id)
+ .twice
+
worker.update_statistics(project, statistics)
end
end