summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
Diffstat (limited to 'spec')
-rw-r--r--spec/controllers/admin/integrations_controller_spec.rb9
-rw-r--r--spec/controllers/admin/runners_controller_spec.rb24
-rw-r--r--spec/controllers/admin/users_controller_spec.rb11
-rw-r--r--spec/controllers/groups/settings/integrations_controller_spec.rb7
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb39
-rw-r--r--spec/controllers/projects/merge_requests_controller_spec.rb4
-rw-r--r--spec/controllers/repositories/git_http_controller_spec.rb38
-rw-r--r--spec/factories/ci/bridge.rb16
-rw-r--r--spec/factories/ci/job_artifacts.rb2
-rw-r--r--spec/factories/clusters/applications/helm.rb9
-rw-r--r--spec/factories/diff_position.rb10
-rw-r--r--spec/factories/import_failures.rb23
-rw-r--r--spec/factories/projects.rb4
-rw-r--r--spec/factories/terraform/state.rb4
-rw-r--r--spec/factories/users.rb9
-rw-r--r--spec/features/admin/admin_mode/workers_spec.rb8
-rw-r--r--spec/features/admin/admin_settings_spec.rb15
-rw-r--r--spec/features/cycle_analytics_spec.rb12
-rw-r--r--spec/features/issues/csv_spec.rb100
-rw-r--r--spec/features/merge_request/user_resolves_wip_mr_spec.rb48
-rw-r--r--spec/features/projects/environments_pod_logs_spec.rb4
-rw-r--r--spec/features/projects/snippets/create_snippet_spec.rb1
-rw-r--r--spec/features/projects/snippets/user_updates_snippet_spec.rb1
-rw-r--r--spec/features/snippets/spam_snippets_spec.rb1
-rw-r--r--spec/features/snippets/user_creates_snippet_spec.rb1
-rw-r--r--spec/features/snippets/user_edits_snippet_spec.rb1
-rw-r--r--spec/features/static_site_editor_spec.rb19
-rw-r--r--spec/finders/autocomplete/move_to_project_finder_spec.rb13
-rw-r--r--spec/finders/metrics/dashboards/annotations_finder_spec.rb107
-rw-r--r--spec/fixtures/api/schemas/cluster_status.json3
-rw-r--r--spec/fixtures/lib/elasticsearch/pods_query.json28
-rw-r--r--spec/fixtures/lib/elasticsearch/pods_response.json75
-rw-r--r--spec/frontend/blob/components/blob_edit_content_spec.js2
-rw-r--r--spec/frontend/ci_variable_list/components/ci_key_field_spec.js244
-rw-r--r--spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js153
-rw-r--r--spec/frontend/clusters/services/application_state_machine_spec.js16
-rw-r--r--spec/frontend/diffs/components/commit_item_spec.js4
-rw-r--r--spec/frontend/diffs/components/diff_table_cell_spec.js15
-rw-r--r--spec/frontend/diffs/store/actions_spec.js1
-rw-r--r--spec/frontend/diffs/store/getters_versions_dropdowns_spec.js99
-rw-r--r--spec/frontend/diffs/store/mutations_spec.js67
-rw-r--r--spec/frontend/fixtures/merge_requests_diffs.rb7
-rw-r--r--spec/frontend/helpers/dom_events_helper.js10
-rw-r--r--spec/frontend/jira_import/components/jira_import_app_spec.js207
-rw-r--r--spec/frontend/jira_import/components/jira_import_form_spec.js136
-rw-r--r--spec/frontend/jira_import/components/jira_import_progress_spec.js70
-rw-r--r--spec/frontend/jira_import/components/jira_import_setup_spec.js17
-rw-r--r--spec/frontend/jira_import/utils_spec.js27
-rw-r--r--spec/frontend/logs/mock_data.js76
-rw-r--r--spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap169
-rw-r--r--spec/frontend/monitoring/components/charts/annotations_spec.js11
-rw-r--r--spec/frontend/monitoring/components/charts/options_spec.js29
-rw-r--r--spec/frontend/monitoring/components/charts/time_series_spec.js55
-rw-r--r--spec/frontend/monitoring/components/dashboard_spec.js132
-rw-r--r--spec/frontend/monitoring/components/dashboard_template_spec.js2
-rw-r--r--spec/frontend/monitoring/components/dashboard_url_time_spec.js3
-rw-r--r--spec/frontend/monitoring/components/panel_type_spec.js93
-rw-r--r--spec/frontend/monitoring/fixture_data.js49
-rw-r--r--spec/frontend/monitoring/init_utils.js57
-rw-r--r--spec/frontend/monitoring/mock_data.js348
-rw-r--r--spec/frontend/monitoring/store/actions_spec.js75
-rw-r--r--spec/frontend/monitoring/store/getters_spec.js90
-rw-r--r--spec/frontend/monitoring/store/mutations_spec.js7
-rw-r--r--spec/frontend/monitoring/store/utils_spec.js31
-rw-r--r--spec/frontend/monitoring/store_utils.js34
-rw-r--r--spec/frontend/monitoring/utils_spec.js11
-rw-r--r--spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js31
-rw-r--r--spec/frontend/pipelines/graph/action_component_spec.js9
-rw-r--r--spec/frontend/pipelines/graph/graph_component_spec.js305
-rw-r--r--spec/frontend/pipelines/graph/job_group_dropdown_spec.js (renamed from spec/javascripts/pipelines/graph/job_group_dropdown_spec.js)19
-rw-r--r--spec/frontend/pipelines/graph/job_item_spec.js8
-rw-r--r--spec/frontend/pipelines/graph/job_name_component_spec.js36
-rw-r--r--spec/frontend/pipelines/graph/linked_pipeline_spec.js24
-rw-r--r--spec/frontend/pipelines/graph/linked_pipelines_column_spec.js38
-rw-r--r--spec/frontend/pipelines/graph/linked_pipelines_mock_data.js4084
-rw-r--r--spec/frontend/pipelines/graph/mock_data.js (renamed from spec/javascripts/pipelines/graph/mock_data.js)0
-rw-r--r--spec/frontend/pipelines/graph/stage_column_component_spec.js136
-rw-r--r--spec/frontend/registry/explorer/pages/list_spec.js49
-rw-r--r--spec/frontend/registry/explorer/stores/actions_spec.js29
-rw-r--r--spec/frontend/registry/explorer/stores/mutations_spec.js22
-rw-r--r--spec/frontend/repository/router_spec.js17
-rw-r--r--spec/frontend/sidebar/sidebar_assignees_spec.js74
-rw-r--r--spec/frontend/snippet/snippet_edit_spec.js45
-rw-r--r--spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap1
-rw-r--r--spec/frontend/snippets/components/edit_spec.js279
-rw-r--r--spec/frontend/snippets/components/snippet_header_spec.js10
-rw-r--r--spec/frontend/static_site_editor/components/invalid_content_message_spec.js23
-rw-r--r--spec/frontend/static_site_editor/components/publish_toolbar_spec.js4
-rw-r--r--spec/frontend/static_site_editor/components/saved_changes_message_spec.js28
-rw-r--r--spec/frontend/static_site_editor/components/static_site_editor_spec.js79
-rw-r--r--spec/frontend/static_site_editor/components/submit_changes_error_spec.js48
-rw-r--r--spec/frontend/static_site_editor/mock_data.js4
-rw-r--r--spec/frontend/static_site_editor/store/actions_spec.js19
-rw-r--r--spec/frontend/static_site_editor/store/mutations_spec.js29
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/awards_list_spec.js.snap287
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap8
-rw-r--r--spec/frontend/vue_shared/components/awards_list_spec.js213
-rw-r--r--spec/frontend/vue_shared/components/form/__snapshots__/title_spec.js.snap4
-rw-r--r--spec/frontend/vue_shared/components/user_popover/user_popover_spec.js73
-rw-r--r--spec/graphql/resolvers/metrics/dashboards/annotation_resolver_spec.rb60
-rw-r--r--spec/graphql/types/metrics/dashboard_type_spec.rb11
-rw-r--r--spec/graphql/types/metrics/dashboards/annotation_type_spec.rb17
-rw-r--r--spec/initializers/lograge_spec.rb4
-rw-r--r--spec/javascripts/filtered_search/filtered_search_manager_spec.js102
-rw-r--r--spec/javascripts/monitoring/components/dashboard_resize_spec.js61
-rw-r--r--spec/javascripts/monitoring/fixture_data.js1
-rw-r--r--spec/javascripts/monitoring/store_utils.js1
-rw-r--r--spec/javascripts/pipelines/graph/graph_component_spec.js274
-rw-r--r--spec/javascripts/pipelines/graph/job_name_component_spec.js27
-rw-r--r--spec/javascripts/pipelines/graph/linked_pipelines_column_spec.js43
-rw-r--r--spec/javascripts/pipelines/graph/linked_pipelines_mock_data.js3
-rw-r--r--spec/javascripts/pipelines/graph/stage_column_component_spec.js122
-rw-r--r--spec/javascripts/sidebar/sidebar_assignees_spec.js64
-rw-r--r--spec/lib/api/entities/project_import_failed_relation_spec.rb23
-rw-r--r--spec/lib/api/entities/project_import_status_spec.rb49
-rw-r--r--spec/lib/api/entities/user_spec.rb26
-rw-r--r--spec/lib/api/validations/validators/limit_spec.rb25
-rw-r--r--spec/lib/banzai/pipeline_spec.rb64
-rw-r--r--spec/lib/csv_builder_spec.rb109
-rw-r--r--spec/lib/gitlab/application_context_spec.rb12
-rw-r--r--spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/jwt_spec.rb124
-rw-r--r--spec/lib/gitlab/ci/status/bridge/factory_spec.rb72
-rw-r--r--spec/lib/gitlab/current_settings_spec.rb27
-rw-r--r--spec/lib/gitlab/cycle_analytics/group_stage_summary_spec.rb66
-rw-r--r--spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb90
-rw-r--r--spec/lib/gitlab/data_builder/pipeline_spec.rb2
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb6
-rw-r--r--spec/lib/gitlab/diff/formatters/text_formatter_spec.rb3
-rw-r--r--spec/lib/gitlab/diff/highlight_cache_spec.rb52
-rw-r--r--spec/lib/gitlab/diff/position_spec.rb1
-rw-r--r--spec/lib/gitlab/elasticsearch/logs/lines_spec.rb (renamed from spec/lib/gitlab/elasticsearch/logs_spec.rb)2
-rw-r--r--spec/lib/gitlab/elasticsearch/logs/pods_spec.rb35
-rw-r--r--spec/lib/gitlab/file_hook_spec.rb2
-rw-r--r--spec/lib/gitlab/gitaly_client_spec.rb9
-rw-r--r--spec/lib/gitlab/grape_logging/loggers/perf_logger_spec.rb2
-rw-r--r--spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb4
-rw-r--r--spec/lib/gitlab/import_export/group/legacy_tree_restorer_spec.rb (renamed from spec/lib/gitlab/import_export/group/tree_restorer_spec.rb)2
-rw-r--r--spec/lib/gitlab/import_export/project/import_task_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/project/tree_restorer_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml2
-rw-r--r--spec/lib/gitlab/instrumentation_helper_spec.rb10
-rw-r--r--spec/lib/gitlab/json_spec.rb91
-rw-r--r--spec/lib/gitlab/kubernetes/helm/base_command_spec.rb52
-rw-r--r--spec/lib/gitlab/kubernetes/helm/init_command_spec.rb52
-rw-r--r--spec/lib/gitlab/kubernetes/helm/install_command_spec.rb16
-rw-r--r--spec/lib/gitlab/kubernetes/helm/patch_command_spec.rb16
-rw-r--r--spec/lib/gitlab/project_template_spec.rb1
-rw-r--r--spec/lib/gitlab/prometheus/adapter_spec.rb8
-rw-r--r--spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb18
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/server_spec.rb13
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb13
-rw-r--r--spec/lib/gitlab/sidekiq_middleware_spec.rb47
-rw-r--r--spec/lib/gitlab/slash_commands/presenters/issue_show_spec.rb4
-rw-r--r--spec/lib/gitlab/utils_spec.rb18
-rw-r--r--spec/lib/marginalia_spec.rb30
-rw-r--r--spec/mailers/emails/issues_spec.rb49
-rw-r--r--spec/migrations/cleanup_empty_commit_user_mentions_spec.rb2
-rw-r--r--spec/migrations/migrate_commit_notes_mentions_to_db_spec.rb2
-rw-r--r--spec/models/ci/bridge_spec.rb2
-rw-r--r--spec/models/ci/build_spec.rb108
-rw-r--r--spec/models/ci/job_artifact_spec.rb21
-rw-r--r--spec/models/ci/processable_spec.rb78
-rw-r--r--spec/models/ci/runner_spec.rb30
-rw-r--r--spec/models/clusters/applications/fluentd_spec.rb50
-rw-r--r--spec/models/clusters/applications/ingress_spec.rb6
-rw-r--r--spec/models/clusters/cluster_spec.rb3
-rw-r--r--spec/models/concerns/issuable_spec.rb34
-rw-r--r--spec/models/cycle_analytics/group_level_spec.rb2
-rw-r--r--spec/models/diff_note_position_spec.rb7
-rw-r--r--spec/models/import_failure_spec.rb23
-rw-r--r--spec/models/jira_import_state_spec.rb20
-rw-r--r--spec/models/merge_request_diff_spec.rb39
-rw-r--r--spec/models/merge_request_spec.rb66
-rw-r--r--spec/models/metrics/dashboard/annotation_spec.rb26
-rw-r--r--spec/models/project_feature_spec.rb4
-rw-r--r--spec/models/project_import_state_spec.rb21
-rw-r--r--spec/models/project_services/prometheus_service_spec.rb44
-rw-r--r--spec/models/project_spec.rb40
-rw-r--r--spec/models/resource_milestone_event_spec.rb26
-rw-r--r--spec/models/terraform/state_spec.rb25
-rw-r--r--spec/models/user_spec.rb44
-rw-r--r--spec/models/user_type_enums_spec.rb13
-rw-r--r--spec/policies/global_policy_spec.rb33
-rw-r--r--spec/requests/api/deploy_tokens_spec.rb7
-rw-r--r--spec/requests/api/graphql/metrics/dashboard/annotations_spec.rb109
-rw-r--r--spec/requests/api/graphql/mutations/jira_import/start_spec.rb12
-rw-r--r--spec/requests/api/graphql/project/merge_request_spec.rb11
-rw-r--r--spec/requests/api/markdown_spec.rb2
-rw-r--r--spec/requests/api/merge_requests_spec.rb24
-rw-r--r--spec/requests/api/project_statistics_spec.rb8
-rw-r--r--spec/requests/api/projects_spec.rb4
-rw-r--r--spec/requests/api/terraform/state_spec.rb238
-rw-r--r--spec/routing/openid_connect_spec.rb5
-rw-r--r--spec/routing/project_routing_spec.rb7
-rw-r--r--spec/rubocop/cop/rspec/modify_sidekiq_middleware_spec.rb39
-rw-r--r--spec/rubocop/cop/static_translation_definition_spec.rb109
-rw-r--r--spec/serializers/analytics_summary_serializer_spec.rb5
-rw-r--r--spec/serializers/discussion_entity_spec.rb10
-rw-r--r--spec/serializers/merge_request_basic_entity_spec.rb17
-rw-r--r--spec/serializers/merge_request_poll_cached_widget_entity_spec.rb6
-rw-r--r--spec/serializers/merge_request_poll_widget_entity_spec.rb4
-rw-r--r--spec/serializers/merge_request_serializer_spec.rb16
-rw-r--r--spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb22
-rw-r--r--spec/services/auto_merge_service_spec.rb69
-rw-r--r--spec/services/ci/create_cross_project_pipeline_service_spec.rb40
-rw-r--r--spec/services/ci/update_runner_service_spec.rb13
-rw-r--r--spec/services/emails/destroy_service_spec.rb5
-rw-r--r--spec/services/git/process_ref_changes_service_spec.rb43
-rw-r--r--spec/services/issues/export_csv_service_spec.rb170
-rw-r--r--spec/services/jira_import/start_import_service_spec.rb35
-rw-r--r--spec/services/merge_requests/merge_orchestration_service_spec.rb116
-rw-r--r--spec/services/merge_requests/pushed_branches_service_spec.rb42
-rw-r--r--spec/services/merge_requests/update_service_spec.rb14
-rw-r--r--spec/services/metrics/dashboard/transient_embed_service_spec.rb50
-rw-r--r--spec/services/personal_access_tokens/create_service_spec.rb24
-rw-r--r--spec/services/pod_logs/base_service_spec.rb27
-rw-r--r--spec/services/pod_logs/elasticsearch_service_spec.rb63
-rw-r--r--spec/services/pod_logs/kubernetes_service_spec.rb32
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb29
-rw-r--r--spec/services/resources/create_access_token_service_spec.rb163
-rw-r--r--spec/services/snippets/create_service_spec.rb37
-rw-r--r--spec/services/terraform/remote_state_handler_spec.rb143
-rw-r--r--spec/services/users/build_service_spec.rb20
-rw-r--r--spec/services/x509_certificate_revoke_service_spec.rb2
-rw-r--r--spec/spec_helper.rb17
-rw-r--r--spec/support/helpers/api_helpers.rb11
-rw-r--r--spec/support/helpers/migrations_helpers.rb3
-rw-r--r--spec/support/import_export/configuration_helper.rb4
-rw-r--r--spec/support/matchers/exclude_matcher.rb3
-rw-r--r--spec/support/shared_examples/controllers/deploy_token_shared_examples.rb14
-rw-r--r--spec/support/shared_examples/models/diff_positionable_note_shared_examples.rb1
-rw-r--r--spec/support/shared_examples/quick_actions/merge_request/merge_quick_action_shared_examples.rb19
-rw-r--r--spec/support/shared_examples/requests/api/diff_discussions_shared_examples.rb8
-rw-r--r--spec/support/sidekiq_middleware.rb16
-rw-r--r--spec/uploaders/records_uploads_spec.rb6
-rw-r--r--spec/uploaders/terraform/state_uploader_spec.rb6
-rw-r--r--spec/views/shared/projects/_project.html.haml_spec.rb2
-rw-r--r--spec/workers/concerns/cronjob_queue_spec.rb22
-rw-r--r--spec/workers/create_commit_signature_worker_spec.rb59
-rw-r--r--spec/workers/expire_pipeline_cache_worker_spec.rb8
-rw-r--r--spec/workers/export_csv_worker_spec.rb34
-rw-r--r--spec/workers/gitlab/jira_import/stage/finish_import_worker_spec.rb18
-rw-r--r--spec/workers/post_receive_spec.rb3
244 files changed, 11340 insertions, 2546 deletions
diff --git a/spec/controllers/admin/integrations_controller_spec.rb b/spec/controllers/admin/integrations_controller_spec.rb
index 8e48ecddd0f..817223bd91a 100644
--- a/spec/controllers/admin/integrations_controller_spec.rb
+++ b/spec/controllers/admin/integrations_controller_spec.rb
@@ -49,11 +49,12 @@ describe Admin::IntegrationsController do
end
context 'invalid params' do
- let(:url) { 'https://jira.localhost' }
+ let(:url) { 'invalid' }
- it 'updates the integration' do
- expect(response).to have_gitlab_http_status(:found)
- expect(integration.reload.url).to eq(url)
+ it 'does not update the integration' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:edit)
+ expect(integration.reload.url).not_to eq(url)
end
end
end
diff --git a/spec/controllers/admin/runners_controller_spec.rb b/spec/controllers/admin/runners_controller_spec.rb
index 7582006df36..803fcf90135 100644
--- a/spec/controllers/admin/runners_controller_spec.rb
+++ b/spec/controllers/admin/runners_controller_spec.rb
@@ -72,6 +72,30 @@ describe Admin::RunnersController do
expect(response).to have_gitlab_http_status(:ok)
end
+
+ describe 'Cost factors values' do
+ context 'when it is Gitlab.com' do
+ before do
+ expect(Gitlab).to receive(:com?).at_least(:once) { true }
+ end
+
+ it 'renders cost factors fields' do
+ get :show, params: { id: runner.id }
+
+ expect(response.body).to match /Private projects Minutes cost factor/
+ expect(response.body).to match /Public projects Minutes cost factor/
+ end
+ end
+
+ context 'when it is not Gitlab.com' do
+ it 'does not show cost factor fields' do
+ get :show, params: { id: runner.id }
+
+ expect(response.body).not_to match /Private projects Minutes cost factor/
+ expect(response.body).not_to match /Public projects Minutes cost factor/
+ end
+ end
+ end
end
describe '#update' do
diff --git a/spec/controllers/admin/users_controller_spec.rb b/spec/controllers/admin/users_controller_spec.rb
index a4ce510b413..387fc0407b6 100644
--- a/spec/controllers/admin/users_controller_spec.rb
+++ b/spec/controllers/admin/users_controller_spec.rb
@@ -340,6 +340,17 @@ describe Admin::UsersController do
end
end
+ describe "DELETE #remove_email" do
+ it 'deletes the email' do
+ email = create(:email, user: user)
+
+ delete :remove_email, params: { id: user.username, email_id: email.id }
+
+ expect(user.reload.emails).not_to include(email)
+ expect(flash[:notice]).to eq('Successfully removed email.')
+ end
+ end
+
describe "POST impersonate" do
context "when the user is blocked" do
before do
diff --git a/spec/controllers/groups/settings/integrations_controller_spec.rb b/spec/controllers/groups/settings/integrations_controller_spec.rb
index 6df1ad8a383..76cd74de183 100644
--- a/spec/controllers/groups/settings/integrations_controller_spec.rb
+++ b/spec/controllers/groups/settings/integrations_controller_spec.rb
@@ -100,11 +100,12 @@ describe Groups::Settings::IntegrationsController do
end
context 'invalid params' do
- let(:url) { 'https://jira.localhost' }
+ let(:url) { 'invalid' }
it 'does not update the integration' do
- expect(response).to have_gitlab_http_status(:found)
- expect(integration.reload.url).to eq(url)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:edit)
+ expect(integration.reload.url).not_to eq(url)
end
end
end
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index 9526e14a748..862a4bd3559 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -1427,6 +1427,45 @@ describe Projects::IssuesController do
end
end
+ describe 'POST export_csv' do
+ let(:viewer) { user }
+ let(:issue) { create(:issue, project: project) }
+
+ before do
+ project.add_developer(user)
+ end
+
+ def request_csv
+ post :export_csv, params: { namespace_id: project.namespace.to_param, project_id: project.to_param }
+ end
+
+ context 'when logged in' do
+ before do
+ sign_in(viewer)
+ end
+
+ it 'allows CSV export' do
+ expect(ExportCsvWorker).to receive(:perform_async).with(viewer.id, project.id, anything)
+
+ request_csv
+
+ expect(response).to redirect_to(project_issues_path(project))
+ expect(response.flash[:notice]).to match(/\AYour CSV export has started/i)
+ end
+ end
+
+ context 'when not logged in' do
+ let(:project) { create(:project_empty_repo, :public) }
+
+ it 'redirects to the sign in page' do
+ request_csv
+
+ expect(ExportCsvWorker).not_to receive(:perform_async)
+ expect(response).to redirect_to(new_user_session_path)
+ end
+ end
+ end
+
describe 'GET #discussions' do
let!(:discussion) { create(:discussion_note_on_issue, noteable: issue, project: issue.project) }
diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb
index 5104c83283d..aaeaf53d100 100644
--- a/spec/controllers/projects/merge_requests_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests_controller_spec.rb
@@ -1245,7 +1245,7 @@ describe Projects::MergeRequestsController do
end
it 'renders MergeRequest as JSON' do
- expect(json_response.keys).to include('id', 'iid')
+ expect(json_response.keys).to include('id', 'iid', 'title', 'has_ci', 'merge_status', 'can_be_merged', 'current_user')
end
end
@@ -1279,7 +1279,7 @@ describe Projects::MergeRequestsController do
it 'renders MergeRequest as JSON' do
subject
- expect(json_response.keys).to include('id', 'iid')
+ expect(json_response.keys).to include('id', 'iid', 'title', 'has_ci', 'merge_status', 'can_be_merged', 'current_user')
end
end
diff --git a/spec/controllers/repositories/git_http_controller_spec.rb b/spec/controllers/repositories/git_http_controller_spec.rb
index e565c757f95..59455d90c25 100644
--- a/spec/controllers/repositories/git_http_controller_spec.rb
+++ b/spec/controllers/repositories/git_http_controller_spec.rb
@@ -95,7 +95,7 @@ describe Repositories::GitHttpController do
allow(controller).to receive(:access_check).and_return(nil)
end
- after do
+ def send_request
post :git_upload_pack, params: params
end
@@ -106,16 +106,46 @@ describe Repositories::GitHttpController do
it 'does not update project statistics' do
expect(ProjectDailyStatisticsWorker).not_to receive(:perform_async)
+
+ send_request
end
end
if expected
- it 'updates project statistics' do
- expect(ProjectDailyStatisticsWorker).to receive(:perform_async)
+ context 'when project_statistics_sync feature flag is disabled' do
+ before do
+ stub_feature_flags(project_statistics_sync: false)
+ end
+
+ it 'updates project statistics async' do
+ expect(ProjectDailyStatisticsWorker).to receive(:perform_async)
+
+ send_request
+ end
+ end
+
+ it 'updates project statistics sync' do
+ expect { send_request }.to change {
+ Projects::DailyStatisticsFinder.new(project).total_fetch_count
+ }.from(0).to(1)
end
else
+ context 'when project_statistics_sync feature flag is disabled' do
+ before do
+ stub_feature_flags(project_statistics_sync: false)
+ end
+
+ it 'does not update project statistics' do
+ expect(ProjectDailyStatisticsWorker).not_to receive(:perform_async)
+
+ send_request
+ end
+ end
+
it 'does not update project statistics' do
- expect(ProjectDailyStatisticsWorker).not_to receive(:perform_async)
+ expect { send_request }.not_to change {
+ Projects::DailyStatisticsFinder.new(project).total_fetch_count
+ }.from(0)
end
end
end
diff --git a/spec/factories/ci/bridge.rb b/spec/factories/ci/bridge.rb
index bacf163896c..4c1d5f07a42 100644
--- a/spec/factories/ci/bridge.rb
+++ b/spec/factories/ci/bridge.rb
@@ -7,7 +7,7 @@ FactoryBot.define do
stage_idx { 0 }
ref { 'master' }
tag { false }
- created_at { 'Di 29. Okt 09:50:00 CET 2013' }
+ created_at { '2013-10-29 09:50:00 CET' }
status { :created }
scheduling_type { 'stage' }
@@ -39,5 +39,19 @@ FactoryBot.define do
)
end
end
+
+ trait :started do
+ started_at { '2013-10-29 09:51:28 CET' }
+ end
+
+ trait :finished do
+ started
+ finished_at { '2013-10-29 09:53:28 CET' }
+ end
+
+ trait :failed do
+ finished
+ status { 'failed' }
+ end
end
end
diff --git a/spec/factories/ci/job_artifacts.rb b/spec/factories/ci/job_artifacts.rb
index a259c5142fc..82383cfa2b0 100644
--- a/spec/factories/ci/job_artifacts.rb
+++ b/spec/factories/ci/job_artifacts.rb
@@ -13,7 +13,7 @@ FactoryBot.define do
end
trait :remote_store do
- file_store { JobArtifactUploader::Store::REMOTE }
+ file_store { JobArtifactUploader::Store::REMOTE}
end
after :build do |artifact|
diff --git a/spec/factories/clusters/applications/helm.rb b/spec/factories/clusters/applications/helm.rb
index 0a4f0fba9ab..728c83e01b4 100644
--- a/spec/factories/clusters/applications/helm.rb
+++ b/spec/factories/clusters/applications/helm.rb
@@ -139,5 +139,14 @@ FactoryBot.define do
cluster factory: %i(cluster provided_by_gcp)
end
end
+
+ factory :clusters_applications_fluentd, class: 'Clusters::Applications::Fluentd' do
+ host { 'example.com' }
+ cluster factory: %i(cluster with_installed_helm provided_by_gcp)
+
+ trait :no_helm_installed do
+ cluster factory: %i(cluster provided_by_gcp)
+ end
+ end
end
end
diff --git a/spec/factories/diff_position.rb b/spec/factories/diff_position.rb
index a43c5afdff4..685272acf5c 100644
--- a/spec/factories/diff_position.rb
+++ b/spec/factories/diff_position.rb
@@ -34,10 +34,20 @@ FactoryBot.define do
position_type { 'text' }
old_line { 10 }
new_line { 10 }
+ line_range { nil }
trait :added do
old_line { nil }
end
+
+ trait :multi_line do
+ line_range do
+ {
+ start_line_code: Gitlab::Git.diff_line_code(file, 10, 10),
+ end_line_code: Gitlab::Git.diff_line_code(file, 12, 13)
+ }
+ end
+ end
end
factory :image_diff_position do
diff --git a/spec/factories/import_failures.rb b/spec/factories/import_failures.rb
new file mode 100644
index 00000000000..376b2ff39e2
--- /dev/null
+++ b/spec/factories/import_failures.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'securerandom'
+
+FactoryBot.define do
+ factory :import_failure do
+ association :project, factory: :project
+
+ created_at { Time.parse('2020-01-01T00:00:00Z') }
+ exception_class { 'RuntimeError' }
+ exception_message { 'Something went wrong' }
+ source { 'method_call' }
+ correlation_id_value { SecureRandom.uuid }
+
+ trait :hard_failure do
+ retry_count { 0 }
+ end
+
+ trait :soft_failure do
+ retry_count { 1 }
+ end
+ end
+end
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index 2b468ef92e1..64321c9f319 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -37,6 +37,8 @@ FactoryBot.define do
group_runners_enabled { nil }
import_status { nil }
import_jid { nil }
+ import_correlation_id { nil }
+ import_last_error { nil }
forward_deployment_enabled { nil }
end
@@ -78,6 +80,8 @@ FactoryBot.define do
import_state = project.import_state || project.build_import_state
import_state.status = evaluator.import_status
import_state.jid = evaluator.import_jid
+ import_state.correlation_id_value = evaluator.import_correlation_id
+ import_state.last_error = evaluator.import_last_error
import_state.save
end
end
diff --git a/spec/factories/terraform/state.rb b/spec/factories/terraform/state.rb
index 4b83128ff6e..74950ccf93e 100644
--- a/spec/factories/terraform/state.rb
+++ b/spec/factories/terraform/state.rb
@@ -4,8 +4,10 @@ FactoryBot.define do
factory :terraform_state, class: 'Terraform::State' do
project { create(:project) }
+ sequence(:name) { |n| "state-#{n}" }
+
trait :with_file do
- file { fixture_file_upload('spec/fixtures/terraform/terraform.tfstate') }
+ file { fixture_file_upload('spec/fixtures/terraform/terraform.tfstate', 'application/json') }
end
end
end
diff --git a/spec/factories/users.rb b/spec/factories/users.rb
index 0ce567e11fe..f274503f0e7 100644
--- a/spec/factories/users.rb
+++ b/spec/factories/users.rb
@@ -27,6 +27,10 @@ FactoryBot.define do
user_type { :alert_bot }
end
+ trait :project_bot do
+ user_type { :project_bot }
+ end
+
trait :external do
external { true }
end
@@ -83,12 +87,17 @@ FactoryBot.define do
transient do
developer_projects { [] }
+ maintainer_projects { [] }
end
after(:create) do |user, evaluator|
evaluator.developer_projects.each do |project|
project.add_developer(user)
end
+
+ evaluator.maintainer_projects.each do |project|
+ project.add_maintainer(user)
+ end
end
factory :omniauth_user do
diff --git a/spec/features/admin/admin_mode/workers_spec.rb b/spec/features/admin/admin_mode/workers_spec.rb
index e33c9d7e64c..0ca61e6c193 100644
--- a/spec/features/admin/admin_mode/workers_spec.rb
+++ b/spec/features/admin/admin_mode/workers_spec.rb
@@ -8,8 +8,6 @@ describe 'Admin mode for workers', :do_not_mock_admin_mode, :request_store, :cle
let(:user_to_delete) { create(:user) }
before do
- add_sidekiq_middleware
-
sign_in(user)
end
@@ -60,12 +58,6 @@ describe 'Admin mode for workers', :do_not_mock_admin_mode, :request_store, :cle
end
end
- def add_sidekiq_middleware
- Sidekiq::Testing.server_middleware do |chain|
- chain.add Gitlab::SidekiqMiddleware::AdminMode::Server
- end
- end
-
def execute_jobs_signed_out(user)
gitlab_sign_out
diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb
index 8eb15bb6bf5..1a3da8cb373 100644
--- a/spec/features/admin/admin_settings_spec.rb
+++ b/spec/features/admin/admin_settings_spec.rb
@@ -348,12 +348,19 @@ describe 'Admin updates settings', :clean_gitlab_redis_shared_state, :do_not_moc
it 'loads usage ping payload on click', :js do
allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
- expect(page).to have_button 'Preview payload'
+ page.within('#js-usage-settings') do
+ expected_payload_content = /(?=.*"uuid")(?=.*"hostname")/m
- find('.js-usage-ping-payload-trigger').click
+ expect(page).not_to have_content expected_payload_content
- expect(page).to have_selector '.js-usage-ping-payload'
- expect(page).to have_button 'Hide payload'
+ click_button('Preview payload')
+
+ wait_for_requests
+
+ expect(page).to have_selector '.js-usage-ping-payload'
+ expect(page).to have_button 'Hide payload'
+ expect(page).to have_content expected_payload_content
+ end
end
end
diff --git a/spec/features/cycle_analytics_spec.rb b/spec/features/cycle_analytics_spec.rb
index 4a20d1b7d60..50d9cb1c833 100644
--- a/spec/features/cycle_analytics_spec.rb
+++ b/spec/features/cycle_analytics_spec.rb
@@ -30,6 +30,7 @@ describe 'Value Stream Analytics', :js do
expect(new_issues_counter).to have_content('-')
expect(commits_counter).to have_content('-')
expect(deploys_counter).to have_content('-')
+ expect(deployment_frequency_counter).to have_content('-')
end
it 'shows active stage with empty message' do
@@ -53,6 +54,7 @@ describe 'Value Stream Analytics', :js do
expect(new_issues_counter).to have_content('1')
expect(commits_counter).to have_content('2')
expect(deploys_counter).to have_content('1')
+ expect(deployment_frequency_counter).to have_content('0')
end
it 'shows data on each stage', :sidekiq_might_not_need_inline do
@@ -134,7 +136,15 @@ describe 'Value Stream Analytics', :js do
end
def deploys_counter
- find(:xpath, "//p[contains(text(),'Deploy')]/preceding-sibling::h3")
+ find(:xpath, "//p[contains(text(),'Deploy')]/preceding-sibling::h3", match: :first)
+ end
+
+ def deployment_frequency_counter_selector
+ "//p[contains(text(),'Deployment Frequency')]/preceding-sibling::h3"
+ end
+
+ def deployment_frequency_counter
+ find(:xpath, deployment_frequency_counter_selector)
end
def expect_issue_to_be_present
diff --git a/spec/features/issues/csv_spec.rb b/spec/features/issues/csv_spec.rb
new file mode 100644
index 00000000000..193c83d2a40
--- /dev/null
+++ b/spec/features/issues/csv_spec.rb
@@ -0,0 +1,100 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Issues csv' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :public) }
+ let(:milestone) { create(:milestone, title: 'v1.0', project: project) }
+ let(:idea_label) { create(:label, project: project, title: 'Idea') }
+ let(:feature_label) { create(:label, project: project, title: 'Feature', priority: 10) }
+ let!(:issue) { create(:issue, project: project, author: user) }
+
+ before do
+ sign_in(user)
+ end
+
+ def request_csv(params = {})
+ visit project_issues_path(project, params)
+ page.within('.nav-controls') do
+ click_on 'Export as CSV'
+ end
+ click_on 'Export issues'
+ end
+
+ def attachment
+ ActionMailer::Base.deliveries.last.attachments.first
+ end
+
+ def csv
+ CSV.parse(attachment.decode_body, headers: true)
+ end
+
+ it 'triggers an email export' do
+ expect(ExportCsvWorker).to receive(:perform_async).with(user.id, project.id, hash_including("project_id" => project.id))
+
+ request_csv
+ end
+
+ it "doesn't send request params to ExportCsvWorker" do
+ expect(ExportCsvWorker).to receive(:perform_async).with(anything, anything, hash_excluding("controller" => anything, "action" => anything))
+
+ request_csv
+ end
+
+ it 'displays flash message' do
+ request_csv
+
+ expect(page).to have_content 'CSV export has started'
+ expect(page).to have_content "emailed to #{user.notification_email}"
+ end
+
+ it 'includes a csv attachment', :sidekiq_might_not_need_inline do
+ request_csv
+
+ expect(attachment.content_type).to include('text/csv')
+ end
+
+ it 'ignores pagination', :sidekiq_might_not_need_inline do
+ create_list(:issue, 30, project: project, author: user)
+
+ request_csv
+
+ expect(csv.count).to eq 31
+ end
+
+ it 'uses filters from issue index', :sidekiq_might_not_need_inline do
+ request_csv(state: :closed)
+
+ expect(csv.count).to eq 0
+ end
+
+ it 'ignores sorting from issue index', :sidekiq_might_not_need_inline do
+ issue2 = create(:labeled_issue, project: project, author: user, labels: [feature_label])
+
+ request_csv(sort: :label_priority)
+
+ expected = [issue.iid.to_s, issue2.iid.to_s]
+ expect(csv.map { |row| row['Issue ID'] }).to eq expected
+ end
+
+ it 'uses array filters, such as label_name', :sidekiq_might_not_need_inline do
+ issue.update!(labels: [idea_label])
+
+ request_csv("label_name[]" => 'Bug')
+
+ expect(csv.count).to eq 0
+ end
+
+ it 'avoids excessive database calls' do
+ control_count = ActiveRecord::QueryRecorder.new { request_csv }.count
+ create_list(:labeled_issue,
+ 10,
+ project: project,
+ assignees: [user],
+ author: user,
+ milestone: milestone,
+ labels: [feature_label, idea_label])
+ expect { request_csv }.not_to exceed_query_limit(control_count + 5)
+ end
+end
diff --git a/spec/features/merge_request/user_resolves_wip_mr_spec.rb b/spec/features/merge_request/user_resolves_wip_mr_spec.rb
new file mode 100644
index 00000000000..93ef0801791
--- /dev/null
+++ b/spec/features/merge_request/user_resolves_wip_mr_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Merge request > User resolves Work in Progress', :js do
+ let(:project) { create(:project, :public, :repository) }
+ let(:user) { project.creator }
+ let(:merge_request) do
+ create(:merge_request_with_diffs, source_project: project,
+ author: user,
+ title: 'WIP: Bug NS-04',
+ merge_params: { force_remove_source_branch: '1' })
+ end
+ let(:pipeline) do
+ create(:ci_pipeline, project: project,
+ sha: merge_request.diff_head_sha,
+ ref: merge_request.source_branch,
+ head_pipeline_of: merge_request)
+ end
+
+ before do
+ project.add_maintainer(user)
+ end
+
+ context 'when there is active pipeline for merge request' do
+ before do
+ create(:ci_build, pipeline: pipeline)
+ sign_in(user)
+ visit project_merge_request_path(project, merge_request)
+ wait_for_requests
+ end
+
+ it 'retains merge request data after clicking Resolve WIP status' do
+ expect(page.find('.ci-widget-content')).to have_content("Pipeline ##{pipeline.id}")
+ expect(page).to have_content "This is a Work in Progress"
+
+ click_button('Resolve WIP status')
+
+ wait_for_requests
+
+ # If we don't disable the wait here, the test will wait until the
+ # merge request widget refreshes, which masks missing elements
+ # that should already be present.
+ expect(page.find('.ci-widget-content', wait: 0)).to have_content("Pipeline ##{pipeline.id}")
+ expect(page).not_to have_content('This is a Work in Progress')
+ end
+ end
+end
diff --git a/spec/features/projects/environments_pod_logs_spec.rb b/spec/features/projects/environments_pod_logs_spec.rb
index 2b2327940a5..a51f121bf59 100644
--- a/spec/features/projects/environments_pod_logs_spec.rb
+++ b/spec/features/projects/environments_pod_logs_spec.rb
@@ -57,7 +57,9 @@ describe 'Environment > Pod Logs', :js do
expect(item.text).to eq(pod_names[i])
end
end
- expect(page).to have_content("Dec 13 14:04:22.123Z | kube-pod | Log 1 Dec 13 14:04:23.123Z | kube-pod | Log 2 Dec 13 14:04:24.123Z | kube-pod | Log 3")
+ expect(page).to have_content("kube-pod | Log 1")
+ expect(page).to have_content("kube-pod | Log 2")
+ expect(page).to have_content("kube-pod | Log 3")
end
end
end
diff --git a/spec/features/projects/snippets/create_snippet_spec.rb b/spec/features/projects/snippets/create_snippet_spec.rb
index b55a42e07a9..d883a1fc39c 100644
--- a/spec/features/projects/snippets/create_snippet_spec.rb
+++ b/spec/features/projects/snippets/create_snippet_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
shared_examples_for 'snippet editor' do
before do
+ stub_feature_flags(snippets_edit_vue: false)
stub_feature_flags(monaco_snippets: flag)
end
diff --git a/spec/features/projects/snippets/user_updates_snippet_spec.rb b/spec/features/projects/snippets/user_updates_snippet_spec.rb
index bad3fde8a4a..cf501e55e23 100644
--- a/spec/features/projects/snippets/user_updates_snippet_spec.rb
+++ b/spec/features/projects/snippets/user_updates_snippet_spec.rb
@@ -11,6 +11,7 @@ describe 'Projects > Snippets > User updates a snippet', :js do
before do
stub_feature_flags(snippets_vue: false)
+ stub_feature_flags(snippets_edit_vue: false)
stub_feature_flags(version_snippets: version_snippet_enabled)
project.add_maintainer(user)
diff --git a/spec/features/snippets/spam_snippets_spec.rb b/spec/features/snippets/spam_snippets_spec.rb
index e9534dedcd3..69e3f190725 100644
--- a/spec/features/snippets/spam_snippets_spec.rb
+++ b/spec/features/snippets/spam_snippets_spec.rb
@@ -10,6 +10,7 @@ shared_examples_for 'snippet editor' do
before do
stub_feature_flags(allow_possible_spam: false)
stub_feature_flags(snippets_vue: false)
+ stub_feature_flags(snippets_edit_vue: false)
stub_feature_flags(monaco_snippets: flag)
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
diff --git a/spec/features/snippets/user_creates_snippet_spec.rb b/spec/features/snippets/user_creates_snippet_spec.rb
index 93da976dee0..5d3a84dd7bc 100644
--- a/spec/features/snippets/user_creates_snippet_spec.rb
+++ b/spec/features/snippets/user_creates_snippet_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
shared_examples_for 'snippet editor' do
before do
stub_feature_flags(snippets_vue: false)
+ stub_feature_flags(snippets_edit_vue: false)
stub_feature_flags(monaco_snippets: flag)
sign_in(user)
visit new_snippet_path
diff --git a/spec/features/snippets/user_edits_snippet_spec.rb b/spec/features/snippets/user_edits_snippet_spec.rb
index 0bbb92b1f3f..b4f8fbfa47e 100644
--- a/spec/features/snippets/user_edits_snippet_spec.rb
+++ b/spec/features/snippets/user_edits_snippet_spec.rb
@@ -14,6 +14,7 @@ describe 'User edits snippet', :js do
before do
stub_feature_flags(snippets_vue: false)
+ stub_feature_flags(snippets_edit_vue: false)
stub_feature_flags(version_snippets: version_snippet_enabled)
sign_in(user)
diff --git a/spec/features/static_site_editor_spec.rb b/spec/features/static_site_editor_spec.rb
new file mode 100644
index 00000000000..c457002f888
--- /dev/null
+++ b/spec/features/static_site_editor_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Static Site Editor' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :public, :repository) }
+
+ before do
+ project.add_maintainer(user)
+ sign_in(user)
+
+ visit project_show_sse_path(project, 'master/README.md')
+ end
+
+ it 'renders Static Site Editor page' do
+ expect(page).to have_selector('#static-site-editor')
+ end
+end
diff --git a/spec/finders/autocomplete/move_to_project_finder_spec.rb b/spec/finders/autocomplete/move_to_project_finder_spec.rb
index 9129a3b65be..f2da82bb9be 100644
--- a/spec/finders/autocomplete/move_to_project_finder_spec.rb
+++ b/spec/finders/autocomplete/move_to_project_finder_spec.rb
@@ -62,19 +62,20 @@ describe Autocomplete::MoveToProjectFinder do
expect(finder.execute.to_a).to eq([other_reporter_project])
end
- it 'returns a page of projects ordered by name' do
+ it 'returns a page of projects ordered by star count' do
stub_const('Autocomplete::MoveToProjectFinder::LIMIT', 2)
- projects = create_list(:project, 3) do |project|
- project.add_developer(user)
- end
+ projects = [
+ create(:project, namespace: user.namespace, star_count: 1),
+ create(:project, namespace: user.namespace, star_count: 5),
+ create(:project, namespace: user.namespace)
+ ]
finder = described_class.new(user, project_id: project.id)
page = finder.execute.to_a
- expected_projects = projects.sort_by(&:name).first(2)
expect(page.length).to eq(2)
- expect(page).to eq(expected_projects)
+ expect(page).to eq([projects[1], projects[0]])
end
end
diff --git a/spec/finders/metrics/dashboards/annotations_finder_spec.rb b/spec/finders/metrics/dashboards/annotations_finder_spec.rb
new file mode 100644
index 00000000000..222875ba2e2
--- /dev/null
+++ b/spec/finders/metrics/dashboards/annotations_finder_spec.rb
@@ -0,0 +1,107 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Metrics::Dashboards::AnnotationsFinder do
+ describe '#execute' do
+ subject(:annotations) { described_class.new(dashboard: dashboard, params: params).execute }
+
+ let_it_be(:current_user) { create(:user) }
+ let(:path) { 'config/prometheus/common_metrics.yml' }
+ let(:params) { {} }
+ let(:environment) { create(:environment) }
+ let(:dashboard) { PerformanceMonitoring::PrometheusDashboard.new(path: path, environment: environment) }
+
+ context 'there are no annotations records' do
+ it 'returns empty array' do
+ expect(annotations).to be_empty
+ end
+ end
+
+ context 'with annotation records' do
+ let!(:nine_minutes_old_annotation) { create(:metrics_dashboard_annotation, environment: environment, starting_at: 9.minutes.ago, dashboard_path: path) }
+ let!(:fifteen_minutes_old_annotation) { create(:metrics_dashboard_annotation, environment: environment, starting_at: 15.minutes.ago, dashboard_path: path) }
+ let!(:just_created_annotation) { create(:metrics_dashboard_annotation, environment: environment, dashboard_path: path) }
+ let!(:annotation_for_different_env) { create(:metrics_dashboard_annotation, dashboard_path: path) }
+ let!(:annotation_for_different_dashboard) { create(:metrics_dashboard_annotation, dashboard_path: '.gitlab/dashboards/test.yml') }
+
+ it 'loads annotations' do
+ expect(annotations).to match_array [fifteen_minutes_old_annotation, nine_minutes_old_annotation, just_created_annotation]
+ end
+
+ context 'when the from filter is present' do
+ let(:params) do
+ {
+ from: 14.minutes.ago
+ }
+ end
+
+ it 'loads only younger annotations' do
+ expect(annotations).to match_array [nine_minutes_old_annotation, just_created_annotation]
+ end
+ end
+
+ context 'when the to filter is present' do
+ let(:params) do
+ {
+ to: 5.minutes.ago
+ }
+ end
+
+ it 'loads only older annotations' do
+ expect(annotations).to match_array [fifteen_minutes_old_annotation, nine_minutes_old_annotation]
+ end
+ end
+
+ context 'when from and to filters are present' do
+ context 'and to is bigger than from' do
+ let(:params) do
+ {
+ from: 14.minutes.ago,
+ to: 5.minutes.ago
+ }
+ end
+
+ it 'loads only annotations assigned to this interval' do
+ expect(annotations).to match_array [nine_minutes_old_annotation]
+ end
+ end
+
+ context 'and from is bigger than to' do
+ let(:params) do
+ {
+ to: 14.minutes.ago,
+ from: 5.minutes.ago
+ }
+ end
+
+ it 'ignores to parameter and returns annotations starting at from filter' do
+ expect(annotations).to match_array [just_created_annotation]
+ end
+ end
+
+ context 'when from or to filters are empty strings' do
+ let(:params) do
+ {
+ from: '',
+ to: ''
+ }
+ end
+
+ it 'ignores this parameters' do
+ expect(annotations).to match_array [fifteen_minutes_old_annotation, nine_minutes_old_annotation, just_created_annotation]
+ end
+ end
+ end
+
+ context 'dashboard environment is missing' do
+ let(:dashboard) { PerformanceMonitoring::PrometheusDashboard.new(path: path, environment: nil) }
+
+ it 'returns empty relation', :aggregate_failures do
+ expect(annotations).to be_kind_of ::ActiveRecord::Relation
+ expect(annotations).to be_empty
+ end
+ end
+ end
+ end
+end
diff --git a/spec/fixtures/api/schemas/cluster_status.json b/spec/fixtures/api/schemas/cluster_status.json
index ba97b7c82cb..ce62655648b 100644
--- a/spec/fixtures/api/schemas/cluster_status.json
+++ b/spec/fixtures/api/schemas/cluster_status.json
@@ -39,6 +39,9 @@
"stack": { "type": ["string", "null"] },
"modsecurity_enabled": { "type": ["boolean", "null"] },
"modsecurity_mode": {"type": ["integer", "0"]},
+ "host": {"type": ["string", "null"]},
+ "port": {"type": ["integer", "514"]},
+ "protocol": {"type": ["integer", "0"]},
"update_available": { "type": ["boolean", "null"] },
"can_uninstall": { "type": "boolean" },
"available_domains": {
diff --git a/spec/fixtures/lib/elasticsearch/pods_query.json b/spec/fixtures/lib/elasticsearch/pods_query.json
new file mode 100644
index 00000000000..90d162b871a
--- /dev/null
+++ b/spec/fixtures/lib/elasticsearch/pods_query.json
@@ -0,0 +1,28 @@
+{
+ "aggs": {
+ "pods": {
+ "aggs": {
+ "containers": {
+ "terms": {
+ "field": "kubernetes.container.name",
+ "size": 500
+ }
+ }
+ },
+ "terms": {
+ "field": "kubernetes.pod.name",
+ "size": 500
+ }
+ }
+ },
+ "query": {
+ "bool": {
+ "must": {
+ "match_phrase": {
+ "kubernetes.namespace": "autodevops-deploy-9-production"
+ }
+ }
+ }
+ },
+ "size": 0
+}
diff --git a/spec/fixtures/lib/elasticsearch/pods_response.json b/spec/fixtures/lib/elasticsearch/pods_response.json
new file mode 100644
index 00000000000..d923f914d7c
--- /dev/null
+++ b/spec/fixtures/lib/elasticsearch/pods_response.json
@@ -0,0 +1,75 @@
+{
+ "took": 8540,
+ "timed_out": false,
+ "_shards": {
+ "total": 153,
+ "successful": 153,
+ "skipped": 0,
+ "failed": 0
+ },
+ "hits": {
+ "total": 62143,
+ "max_score": 0.0,
+ "hits": [
+
+ ]
+ },
+ "aggregations": {
+ "pods": {
+ "doc_count_error_upper_bound": 0,
+ "sum_other_doc_count": 0,
+ "buckets": [
+ {
+ "key": "runner-gitlab-runner-7bbfb5dcb5-p6smb",
+ "doc_count": 19795,
+ "containers": {
+ "doc_count_error_upper_bound": 0,
+ "sum_other_doc_count": 0,
+ "buckets": [
+ {
+ "key": "runner-gitlab-runner",
+ "doc_count": 19795
+ }
+ ]
+ }
+ },
+ {
+ "key": "elastic-stack-elasticsearch-master-1",
+ "doc_count": 13185,
+ "containers": {
+ "doc_count_error_upper_bound": 0,
+ "sum_other_doc_count": 0,
+ "buckets": [
+ {
+ "key": "elasticsearch",
+ "doc_count": 13158
+ },
+ {
+ "key": "chown",
+ "doc_count": 24
+ },
+ {
+ "key": "sysctl",
+ "doc_count": 3
+ }
+ ]
+ }
+ },
+ {
+ "key": "ingress-nginx-ingress-controller-76449bcc8d-8qgl6",
+ "doc_count": 3437,
+ "containers": {
+ "doc_count_error_upper_bound": 0,
+ "sum_other_doc_count": 0,
+ "buckets": [
+ {
+ "key": "nginx-ingress-controller",
+ "doc_count": 3437
+ }
+ ]
+ }
+ }
+ ]
+ }
+ }
+}
diff --git a/spec/frontend/blob/components/blob_edit_content_spec.js b/spec/frontend/blob/components/blob_edit_content_spec.js
index 189d2629efa..971ef72521d 100644
--- a/spec/frontend/blob/components/blob_edit_content_spec.js
+++ b/spec/frontend/blob/components/blob_edit_content_spec.js
@@ -80,7 +80,7 @@ describe('Blob Header Editing', () => {
getValue: jest.fn().mockReturnValue(value),
};
- editorEl.trigger('focusout');
+ editorEl.trigger('keyup');
return nextTick().then(() => {
expect(wrapper.emitted().input[0]).toEqual([value]);
diff --git a/spec/frontend/ci_variable_list/components/ci_key_field_spec.js b/spec/frontend/ci_variable_list/components/ci_key_field_spec.js
new file mode 100644
index 00000000000..bcc29f22dd1
--- /dev/null
+++ b/spec/frontend/ci_variable_list/components/ci_key_field_spec.js
@@ -0,0 +1,244 @@
+import { mount } from '@vue/test-utils';
+import { GlButton, GlFormInput } from '@gitlab/ui';
+import { AWS_ACCESS_KEY_ID, AWS_DEFAULT_REGION } from '~/ci_variable_list/constants';
+import CiKeyField from '~/ci_variable_list/components/ci_key_field.vue';
+
+import {
+ awsTokens,
+ awsTokenList,
+} from '~/ci_variable_list/components/ci_variable_autocomplete_tokens';
+
+const doTimes = (num, fn) => {
+ for (let i = 0; i < num; i += 1) {
+ fn();
+ }
+};
+
+describe('Ci Key field', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = mount({
+ data() {
+ return {
+ inputVal: '',
+ tokens: awsTokenList,
+ };
+ },
+ components: { CiKeyField },
+ template: `
+ <div>
+ <ci-key-field
+ v-model="inputVal"
+ :token-list="tokens"
+ />
+ </div>
+ `,
+ });
+ };
+
+ const findDropdown = () => wrapper.find('#ci-variable-dropdown');
+ const findDropdownOptions = () => wrapper.findAll(GlButton).wrappers.map(item => item.text());
+ const findInput = () => wrapper.find(GlFormInput);
+ const findInputValue = () => findInput().element.value;
+ const setInput = val => findInput().setValue(val);
+ const clickDown = () => findInput().trigger('keydown.down');
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('match and filter functionality', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('is closed when the input is empty', () => {
+ expect(findInput().isVisible()).toBe(true);
+ expect(findInputValue()).toBe('');
+ expect(findDropdown().isVisible()).toBe(false);
+ });
+
+ it('is open when the input text matches a token', () => {
+ setInput('AWS');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findDropdown().isVisible()).toBe(true);
+ });
+ });
+
+ it('shows partial matches at string start', () => {
+ setInput('AWS');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findDropdown().isVisible()).toBe(true);
+ expect(findDropdownOptions()).toEqual(awsTokenList);
+ });
+ });
+
+ it('shows partial matches mid-string', () => {
+ setInput('D');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findDropdown().isVisible()).toBe(true);
+ expect(findDropdownOptions()).toEqual([
+ awsTokens[AWS_ACCESS_KEY_ID].name,
+ awsTokens[AWS_DEFAULT_REGION].name,
+ ]);
+ });
+ });
+
+ it('is closed when the text does not match', () => {
+ setInput('elephant');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findDropdown().isVisible()).toBe(false);
+ });
+ });
+ });
+
+ describe('keyboard navigation in dropdown', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ describe('on down arrow + enter', () => {
+ it('selects the next item in the list and closes the dropdown', () => {
+ setInput('AWS');
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ findInput().trigger('keydown.down');
+ findInput().trigger('keydown.enter');
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ expect(findInputValue()).toBe(awsTokenList[0]);
+ });
+ });
+
+ it('loops to the top when it reaches the bottom', () => {
+ setInput('AWS');
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ doTimes(findDropdownOptions().length + 1, clickDown);
+ findInput().trigger('keydown.enter');
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ expect(findInputValue()).toBe(awsTokenList[0]);
+ });
+ });
+ });
+
+ describe('on up arrow + enter', () => {
+ it('selects the previous item in the list and closes the dropdown', () => {
+ setInput('AWS');
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ doTimes(3, clickDown);
+ findInput().trigger('keydown.up');
+ findInput().trigger('keydown.enter');
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ expect(findInputValue()).toBe(awsTokenList[1]);
+ });
+ });
+
+ it('loops to the bottom when it reaches the top', () => {
+ setInput('AWS');
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ findInput().trigger('keydown.down');
+ findInput().trigger('keydown.up');
+ findInput().trigger('keydown.enter');
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ expect(findInputValue()).toBe(awsTokenList[awsTokenList.length - 1]);
+ });
+ });
+ });
+
+ describe('on enter with no item highlighted', () => {
+ it('does not select any item and closes the dropdown', () => {
+ setInput('AWS');
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ findInput().trigger('keydown.enter');
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ expect(findInputValue()).toBe('AWS');
+ });
+ });
+ });
+
+ describe('on click', () => {
+ it('selects the clicked item regardless of arrow highlight', () => {
+ setInput('AWS');
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ wrapper.find(GlButton).trigger('click');
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ expect(findInputValue()).toBe(awsTokenList[0]);
+ });
+ });
+ });
+
+ describe('on tab', () => {
+ it('selects entered text, closes dropdown', () => {
+ setInput('AWS');
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ findInput().trigger('keydown.tab');
+ doTimes(2, clickDown);
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ expect(findInputValue()).toBe('AWS');
+ expect(findDropdown().isVisible()).toBe(false);
+ });
+ });
+ });
+
+ describe('on esc', () => {
+ describe('when dropdown is open', () => {
+ it('closes dropdown and does not select anything', () => {
+ setInput('AWS');
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ findInput().trigger('keydown.esc');
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ expect(findInputValue()).toBe('AWS');
+ expect(findDropdown().isVisible()).toBe(false);
+ });
+ });
+ });
+
+ describe('when dropdown is closed', () => {
+ it('clears the input field', () => {
+ setInput('elephant');
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ expect(findDropdown().isVisible()).toBe(false);
+ findInput().trigger('keydown.esc');
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ expect(findInputValue()).toBe('');
+ });
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js b/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
index 70edd36669b..7b8d69df35e 100644
--- a/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
+++ b/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
@@ -1,7 +1,10 @@
import Vuex from 'vuex';
-import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { createLocalVue, shallowMount, mount } from '@vue/test-utils';
import { GlDeprecatedButton } from '@gitlab/ui';
+import { AWS_ACCESS_KEY_ID } from '~/ci_variable_list/constants';
import CiVariableModal from '~/ci_variable_list/components/ci_variable_modal.vue';
+import CiKeyField from '~/ci_variable_list/components/ci_key_field.vue';
+import { awsTokens } from '~/ci_variable_list/components/ci_variable_autocomplete_tokens';
import createStore from '~/ci_variable_list/store';
import mockData from '../services/mock_data';
import ModalStub from '../stubs';
@@ -13,14 +16,17 @@ describe('Ci variable modal', () => {
let wrapper;
let store;
- const createComponent = () => {
+ const createComponent = (method, options = {}) => {
store = createStore();
- wrapper = shallowMount(CiVariableModal, {
+ wrapper = method(CiVariableModal, {
+ attachToDocument: true,
+ provide: { glFeatures: { ciKeyAutocomplete: true } },
stubs: {
GlModal: ModalStub,
},
localVue,
store,
+ ...options,
});
};
@@ -34,22 +40,46 @@ describe('Ci variable modal', () => {
.findAll(GlDeprecatedButton)
.at(1);
- beforeEach(() => {
- createComponent();
- jest.spyOn(store, 'dispatch').mockImplementation();
- });
-
afterEach(() => {
wrapper.destroy();
});
- it('button is disabled when no key/value pair are present', () => {
- expect(addOrUpdateButton(1).attributes('disabled')).toBeTruthy();
+ describe('Feature flag', () => {
+ describe('when off', () => {
+ beforeEach(() => {
+ createComponent(shallowMount, { provide: { glFeatures: { ciKeyAutocomplete: false } } });
+ });
+
+ it('does not render the autocomplete dropdown', () => {
+ expect(wrapper.contains(CiKeyField)).toBe(false);
+ });
+ });
+
+ describe('when on', () => {
+ beforeEach(() => {
+ createComponent(shallowMount);
+ });
+ it('renders the autocomplete dropdown', () => {
+ expect(wrapper.find(CiKeyField).exists()).toBe(true);
+ });
+ });
+ });
+
+ describe('Basic interactions', () => {
+ beforeEach(() => {
+ createComponent(shallowMount);
+ });
+
+ it('button is disabled when no key/value pair are present', () => {
+ expect(addOrUpdateButton(1).attributes('disabled')).toBeTruthy();
+ });
});
describe('Adding a new variable', () => {
beforeEach(() => {
const [variable] = mockData.mockVariables;
+ createComponent(shallowMount);
+ jest.spyOn(store, 'dispatch').mockImplementation();
store.state.variable = variable;
});
@@ -71,6 +101,8 @@ describe('Ci variable modal', () => {
describe('Editing a variable', () => {
beforeEach(() => {
const [variable] = mockData.mockVariables;
+ createComponent(shallowMount);
+ jest.spyOn(store, 'dispatch').mockImplementation();
store.state.variableBeingEdited = variable;
});
@@ -96,4 +128,105 @@ describe('Ci variable modal', () => {
expect(store.dispatch).toHaveBeenCalledWith('deleteVariable', mockData.mockVariables[0]);
});
});
+
+ describe('Validations', () => {
+ const maskError = 'This variable can not be masked.';
+
+ describe('when the key state is invalid', () => {
+ beforeEach(() => {
+ const [variable] = mockData.mockVariables;
+ const invalidKeyVariable = {
+ ...variable,
+ key: AWS_ACCESS_KEY_ID,
+ value: 'AKIAIOSFODNN7EXAMPLEjdhy',
+ secret_value: 'AKIAIOSFODNN7EXAMPLEjdhy',
+ };
+ createComponent(mount);
+ store.state.variable = invalidKeyVariable;
+ });
+
+ it('disables the submit button', () => {
+ expect(addOrUpdateButton(1).attributes('disabled')).toBeTruthy();
+ });
+
+ it('shows the correct error text', () => {
+ const errorText = awsTokens[AWS_ACCESS_KEY_ID].invalidMessage;
+ expect(findModal().text()).toContain(errorText);
+ });
+ });
+
+ describe('when the mask state is invalid', () => {
+ beforeEach(() => {
+ const [variable] = mockData.mockVariables;
+ const invalidMaskVariable = {
+ ...variable,
+ key: 'qs',
+ value: 'd:;',
+ secret_value: 'd:;',
+ masked: true,
+ };
+ createComponent(mount);
+ store.state.variable = invalidMaskVariable;
+ });
+
+ it('disables the submit button', () => {
+ expect(addOrUpdateButton(1).attributes('disabled')).toBeTruthy();
+ });
+
+ it('shows the correct error text', () => {
+ expect(findModal().text()).toContain(maskError);
+ });
+ });
+
+ describe('when the mask and key states are invalid', () => {
+ beforeEach(() => {
+ const [variable] = mockData.mockVariables;
+ const invalidMaskandKeyVariable = {
+ ...variable,
+ key: AWS_ACCESS_KEY_ID,
+ value: 'AKIAIOSFODNN7EXAMPLEjdhyd:;',
+ secret_value: 'AKIAIOSFODNN7EXAMPLEjdhyd:;',
+ masked: true,
+ };
+ createComponent(mount);
+ store.state.variable = invalidMaskandKeyVariable;
+ });
+
+ it('disables the submit button', () => {
+ expect(addOrUpdateButton(1).attributes('disabled')).toBeTruthy();
+ });
+
+ it('shows the correct error text', () => {
+ const errorText = awsTokens[AWS_ACCESS_KEY_ID].invalidMessage;
+ expect(findModal().text()).toContain(maskError);
+ expect(findModal().text()).toContain(errorText);
+ });
+ });
+
+ describe('when both states are valid', () => {
+ beforeEach(() => {
+ const [variable] = mockData.mockVariables;
+ const validMaskandKeyVariable = {
+ ...variable,
+ key: AWS_ACCESS_KEY_ID,
+ value: 'AKIAIOSFODNN7EXAMPLE',
+ secret_value: 'AKIAIOSFODNN7EXAMPLE',
+ masked: true,
+ };
+ createComponent(mount);
+ store.state.variable = validMaskandKeyVariable;
+ store.state.maskableRegex = /^[a-zA-Z0-9_+=/@:-]{8,}$/;
+ });
+
+ it('does not disable the submit button', () => {
+ expect(addOrUpdateButton(1).attributes('disabled')).toBeFalsy();
+ });
+
+ it('shows no error text', () => {
+ const errorText = awsTokens[AWS_ACCESS_KEY_ID].invalidMessage;
+ expect(findModal().text()).not.toContain(maskError);
+ expect(findModal().text()).not.toContain(errorText);
+ });
+ });
+ });
});
diff --git a/spec/frontend/clusters/services/application_state_machine_spec.js b/spec/frontend/clusters/services/application_state_machine_spec.js
index 8632c5c4e26..b27cd2c80fd 100644
--- a/spec/frontend/clusters/services/application_state_machine_spec.js
+++ b/spec/frontend/clusters/services/application_state_machine_spec.js
@@ -161,4 +161,20 @@ describe('applicationStateMachine', () => {
});
});
});
+
+ describe('current state is undefined', () => {
+ it('returns the current state without having any effects', () => {
+ const currentAppState = {};
+ expect(transitionApplicationState(currentAppState, INSTALLABLE)).toEqual(currentAppState);
+ });
+ });
+
+ describe('with event is undefined', () => {
+ it('returns the current state without having any effects', () => {
+ const currentAppState = {
+ status: NO_STATUS,
+ };
+ expect(transitionApplicationState(currentAppState, undefined)).toEqual(currentAppState);
+ });
+ });
});
diff --git a/spec/frontend/diffs/components/commit_item_spec.js b/spec/frontend/diffs/components/commit_item_spec.js
index 517d050eb54..6bb3a0dcf21 100644
--- a/spec/frontend/diffs/components/commit_item_spec.js
+++ b/spec/frontend/diffs/components/commit_item_spec.js
@@ -59,9 +59,7 @@ describe('diffs/components/commit_item', () => {
expect(titleElement.text()).toBe(commit.title_html);
});
- // https://gitlab.com/gitlab-org/gitlab/-/issues/209776
- // eslint-disable-next-line jest/no-disabled-tests
- it.skip('renders commit description', () => {
+ it('renders commit description', () => {
const descElement = getDescElement();
const descExpandElement = getDescExpandElement();
diff --git a/spec/frontend/diffs/components/diff_table_cell_spec.js b/spec/frontend/diffs/components/diff_table_cell_spec.js
index 1af0746f3bd..e871d86d901 100644
--- a/spec/frontend/diffs/components/diff_table_cell_spec.js
+++ b/spec/frontend/diffs/components/diff_table_cell_spec.js
@@ -85,15 +85,18 @@ describe('DiffTableCell', () => {
describe('comment button', () => {
it.each`
- showCommentButton | userData | query | expectation
- ${true} | ${TEST_USER} | ${'diff_head=false'} | ${true}
- ${true} | ${TEST_USER} | ${'diff_head=true'} | ${false}
- ${false} | ${TEST_USER} | ${'bogus'} | ${false}
- ${true} | ${null} | ${''} | ${false}
+ showCommentButton | userData | query | mergeRefHeadComments | expectation
+ ${true} | ${TEST_USER} | ${'diff_head=false'} | ${false} | ${true}
+ ${true} | ${TEST_USER} | ${'diff_head=true'} | ${true} | ${true}
+ ${true} | ${TEST_USER} | ${'diff_head=true'} | ${false} | ${false}
+ ${false} | ${TEST_USER} | ${'diff_head=true'} | ${true} | ${false}
+ ${false} | ${TEST_USER} | ${'bogus'} | ${true} | ${false}
+ ${true} | ${null} | ${''} | ${true} | ${false}
`(
'exists is $expectation - with showCommentButton ($showCommentButton) userData ($userData) query ($query)',
- ({ showCommentButton, userData, query, expectation }) => {
+ ({ showCommentButton, userData, query, mergeRefHeadComments, expectation }) => {
store.state.notes.userData = userData;
+ gon.features = { mergeRefHeadComments };
setWindowLocation({ href: `${TEST_HOST}?${query}` });
createComponent({ showCommentButton });
diff --git a/spec/frontend/diffs/store/actions_spec.js b/spec/frontend/diffs/store/actions_spec.js
index 8a1c3e56e5a..ceccce6312f 100644
--- a/spec/frontend/diffs/store/actions_spec.js
+++ b/spec/frontend/diffs/store/actions_spec.js
@@ -466,6 +466,7 @@ describe('DiffsStoreActions', () => {
old_path: 'file2',
line_code: 'ABC_1_1',
position_type: 'text',
+ line_range: null,
},
},
hash: 'ABC_123',
diff --git a/spec/frontend/diffs/store/getters_versions_dropdowns_spec.js b/spec/frontend/diffs/store/getters_versions_dropdowns_spec.js
index 3e5ba66d5e4..0343ef75732 100644
--- a/spec/frontend/diffs/store/getters_versions_dropdowns_spec.js
+++ b/spec/frontend/diffs/store/getters_versions_dropdowns_spec.js
@@ -1,6 +1,9 @@
import * as getters from '~/diffs/store/getters';
import state from '~/diffs/store/modules/diff_state';
-import { DIFF_COMPARE_BASE_VERSION_INDEX } from '~/diffs/constants';
+import {
+ DIFF_COMPARE_BASE_VERSION_INDEX,
+ DIFF_COMPARE_HEAD_VERSION_INDEX,
+} from '~/diffs/constants';
import diffsMockData from '../mock_data/merge_request_diffs';
describe('Compare diff version dropdowns', () => {
@@ -37,47 +40,93 @@ describe('Compare diff version dropdowns', () => {
describe('diffCompareDropdownTargetVersions', () => {
// diffCompareDropdownTargetVersions slices the array at the first position
- // and appends a "base" version which is why we use diffsMockData[1] below
- // This is to display "base" at the end of the target dropdown
- const expectedFirstVersion = {
- ...diffsMockData[1],
- href: expect.any(String),
- versionName: expect.any(String),
+ // and appends a "base" and "head" version at the end of the list so that
+ // "base" and "head" appear at the bottom of the dropdown
+ // this is also why we use diffsMockData[1] for the "first" version
+
+ let expectedFirstVersion;
+ let expectedBaseVersion;
+ let expectedHeadVersion;
+ const originalLocation = window.location;
+
+ const setupTest = includeDiffHeadParam => {
+ const diffHeadParam = includeDiffHeadParam ? '?diff_head=true' : '';
+
+ Object.defineProperty(window, 'location', {
+ writable: true,
+ value: { href: `https://example.gitlab.com${diffHeadParam}` },
+ });
+
+ expectedFirstVersion = {
+ ...diffsMockData[1],
+ href: expect.any(String),
+ versionName: expect.any(String),
+ selected: false,
+ };
+
+ expectedBaseVersion = {
+ versionName: 'baseVersion',
+ version_index: DIFF_COMPARE_BASE_VERSION_INDEX,
+ href: 'basePath',
+ isBase: true,
+ selected: false,
+ };
+
+ expectedHeadVersion = {
+ versionName: 'baseVersion',
+ version_index: DIFF_COMPARE_HEAD_VERSION_INDEX,
+ href: 'headPath',
+ isHead: true,
+ selected: false,
+ };
};
- const expectedBaseVersion = {
- versionName: 'baseVersion',
- version_index: DIFF_COMPARE_BASE_VERSION_INDEX,
- href: 'basePath',
- isBase: true,
+ const assertVersions = targetVersions => {
+ // base and head should be the last two versions in that order
+ const targetBaseVersion = targetVersions[targetVersions.length - 2];
+ const targetHeadVersion = targetVersions[targetVersions.length - 1];
+ expect(targetVersions[0]).toEqual(expectedFirstVersion);
+ expect(targetBaseVersion).toEqual(expectedBaseVersion);
+ expect(targetHeadVersion).toEqual(expectedHeadVersion);
};
+ afterEach(() => {
+ window.location = originalLocation;
+ });
+
it('base version selected', () => {
- expectedFirstVersion.selected = false;
+ setupTest();
expectedBaseVersion.selected = true;
- const targetVersions = getters.diffCompareDropdownTargetVersions(localState, {
- selectedTargetIndex: DIFF_COMPARE_BASE_VERSION_INDEX,
- });
+ const targetVersions = getters.diffCompareDropdownTargetVersions(localState, getters);
+ assertVersions(targetVersions);
+ });
- const lastVersion = targetVersions[targetVersions.length - 1];
- expect(targetVersions[0]).toEqual(expectedFirstVersion);
- expect(lastVersion).toEqual(expectedBaseVersion);
+ it('head version selected', () => {
+ setupTest(true);
+
+ expectedHeadVersion.selected = true;
+
+ const targetVersions = getters.diffCompareDropdownTargetVersions(localState, getters);
+ assertVersions(targetVersions);
});
it('first version selected', () => {
- expectedFirstVersion.selected = true;
- expectedBaseVersion.selected = false;
+ // NOTE: It should not be possible to have both "diff_head=true" and
+ // have anything other than the head version selected, but the user could
+ // manually add "?diff_head=true" to the url. In this instance we still
+ // want the actual selected version to display as "selected"
+ // Passing in "true" here asserts that first version is still selected
+ // even if "diff_head" is present in the url
+ setupTest(true);
+ expectedFirstVersion.selected = true;
localState.startVersion = expectedFirstVersion;
const targetVersions = getters.diffCompareDropdownTargetVersions(localState, {
selectedTargetIndex: expectedFirstVersion.version_index,
});
-
- const lastVersion = targetVersions[targetVersions.length - 1];
- expect(targetVersions[0]).toEqual(expectedFirstVersion);
- expect(lastVersion).toEqual(expectedBaseVersion);
+ assertVersions(targetVersions);
});
});
diff --git a/spec/frontend/diffs/store/mutations_spec.js b/spec/frontend/diffs/store/mutations_spec.js
index c44feaf4b63..858ab5be167 100644
--- a/spec/frontend/diffs/store/mutations_spec.js
+++ b/spec/frontend/diffs/store/mutations_spec.js
@@ -615,6 +615,73 @@ describe('DiffsStoreMutations', () => {
expect(state.diffFiles[0].highlighted_diff_lines[0].discussions.length).toEqual(1);
expect(state.diffFiles[0].highlighted_diff_lines[0].discussions[0].id).toEqual(1);
});
+
+ it('should add discussions by line_codes and positions attributes', () => {
+ const diffPosition = {
+ base_sha: 'ed13df29948c41ba367caa757ab3ec4892509910',
+ head_sha: 'b921914f9a834ac47e6fd9420f78db0f83559130',
+ new_line: null,
+ new_path: '500-lines-4.txt',
+ old_line: 5,
+ old_path: '500-lines-4.txt',
+ start_sha: 'ed13df29948c41ba367caa757ab3ec4892509910',
+ };
+
+ const state = {
+ latestDiff: true,
+ diffFiles: [
+ {
+ file_hash: 'ABC',
+ parallel_diff_lines: [
+ {
+ left: {
+ line_code: 'ABC_1',
+ discussions: [],
+ },
+ right: {
+ line_code: 'ABC_1',
+ discussions: [],
+ },
+ },
+ ],
+ highlighted_diff_lines: [
+ {
+ line_code: 'ABC_1',
+ discussions: [],
+ },
+ ],
+ },
+ ],
+ };
+ const discussion = {
+ id: 1,
+ line_code: 'ABC_2',
+ line_codes: ['ABC_1'],
+ diff_discussion: true,
+ resolvable: true,
+ original_position: {},
+ position: {},
+ positions: [diffPosition],
+ diff_file: {
+ file_hash: state.diffFiles[0].file_hash,
+ },
+ };
+
+ const diffPositionByLineCode = {
+ ABC_1: diffPosition,
+ };
+
+ mutations[types.SET_LINE_DISCUSSIONS_FOR_FILE](state, {
+ discussion,
+ diffPositionByLineCode,
+ });
+
+ expect(state.diffFiles[0].parallel_diff_lines[0].left.discussions).toHaveLength(1);
+ expect(state.diffFiles[0].parallel_diff_lines[0].left.discussions[0].id).toBe(1);
+
+ expect(state.diffFiles[0].highlighted_diff_lines[0].discussions).toHaveLength(1);
+ expect(state.diffFiles[0].highlighted_diff_lines[0].discussions[0].id).toBe(1);
+ });
});
describe('REMOVE_LINE_DISCUSSIONS', () => {
diff --git a/spec/frontend/fixtures/merge_requests_diffs.rb b/spec/frontend/fixtures/merge_requests_diffs.rb
index 7997ee79a01..76bb8567a64 100644
--- a/spec/frontend/fixtures/merge_requests_diffs.rb
+++ b/spec/frontend/fixtures/merge_requests_diffs.rb
@@ -10,7 +10,6 @@ describe Projects::MergeRequests::DiffsController, '(JavaScript fixtures)', type
let(:project) { create(:project, :repository, namespace: namespace, path: 'merge-requests-project') }
let(:merge_request) { create(:merge_request, :with_diffs, source_project: project, target_project: project, description: '- [ ] Task List Item') }
let(:path) { "files/ruby/popen.rb" }
- let(:selected_commit) { merge_request.all_commits[0] }
let(:position) do
build(:text_diff_position, :added,
file: path,
@@ -34,11 +33,11 @@ describe Projects::MergeRequests::DiffsController, '(JavaScript fixtures)', type
end
it 'merge_request_diffs/with_commit.json' do
- # Create a user that matches the selected commit author
+ # Create a user that matches the project.commit author
# This is so that the "author" information will be populated
- create(:user, email: selected_commit.author_email, name: selected_commit.author_name)
+ create(:user, email: project.commit.author_email, name: project.commit.author_name)
- render_merge_request(merge_request, commit_id: selected_commit.sha)
+ render_merge_request(merge_request, commit_id: project.commit.sha)
end
it 'merge_request_diffs/inline_changes_tab_with_comments.json' do
diff --git a/spec/frontend/helpers/dom_events_helper.js b/spec/frontend/helpers/dom_events_helper.js
new file mode 100644
index 00000000000..b66c12daf4f
--- /dev/null
+++ b/spec/frontend/helpers/dom_events_helper.js
@@ -0,0 +1,10 @@
+export const triggerDOMEvent = type => {
+ window.document.dispatchEvent(
+ new Event(type, {
+ bubbles: true,
+ cancelable: true,
+ }),
+ );
+};
+
+export default () => {};
diff --git a/spec/frontend/jira_import/components/jira_import_app_spec.js b/spec/frontend/jira_import/components/jira_import_app_spec.js
index fb3ffe1ede3..ce32559d5c9 100644
--- a/spec/frontend/jira_import/components/jira_import_app_spec.js
+++ b/spec/frontend/jira_import/components/jira_import_app_spec.js
@@ -1,38 +1,213 @@
+import { GlAlert, GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
import JiraImportApp from '~/jira_import/components/jira_import_app.vue';
+import JiraImportForm from '~/jira_import/components/jira_import_form.vue';
+import JiraImportProgress from '~/jira_import/components/jira_import_progress.vue';
import JiraImportSetup from '~/jira_import/components/jira_import_setup.vue';
+import initiateJiraImportMutation from '~/jira_import/queries/initiate_jira_import.mutation.graphql';
+import { IMPORT_STATE } from '~/jira_import/utils';
+
+const mountComponent = ({
+ isJiraConfigured = true,
+ errorMessage = '',
+ showAlert = true,
+ status = IMPORT_STATE.NONE,
+ loading = false,
+ mutate = jest.fn(() => Promise.resolve()),
+} = {}) =>
+ shallowMount(JiraImportApp, {
+ propsData: {
+ isJiraConfigured,
+ inProgressIllustration: 'in-progress-illustration.svg',
+ issuesPath: 'gitlab-org/gitlab-test/-/issues',
+ jiraProjects: [
+ ['My Jira Project', 'MJP'],
+ ['My Second Jira Project', 'MSJP'],
+ ['Migrate to GitLab', 'MTG'],
+ ],
+ projectPath: 'gitlab-org/gitlab-test',
+ setupIllustration: 'setup-illustration.svg',
+ },
+ data() {
+ return {
+ errorMessage,
+ showAlert,
+ jiraImportDetails: {
+ status,
+ import: {
+ jiraProjectKey: 'MTG',
+ scheduledAt: '2020-04-08T12:17:25+00:00',
+ scheduledBy: {
+ name: 'Jane Doe',
+ },
+ },
+ },
+ };
+ },
+ mocks: {
+ $apollo: {
+ loading,
+ mutate,
+ },
+ },
+ });
describe('JiraImportApp', () => {
let wrapper;
+ const getFormComponent = () => wrapper.find(JiraImportForm);
+
+ const getProgressComponent = () => wrapper.find(JiraImportProgress);
+
+ const getSetupComponent = () => wrapper.find(JiraImportSetup);
+
+ const getAlert = () => wrapper.find(GlAlert);
+
+ const getLoadingIcon = () => wrapper.find(GlLoadingIcon);
+
afterEach(() => {
wrapper.destroy();
wrapper = null;
});
- describe('set up Jira integration page', () => {
+ describe('when Jira integration is not configured', () => {
+ beforeEach(() => {
+ wrapper = mountComponent({ isJiraConfigured: false });
+ });
+
+ it('shows the "Set up Jira integration" screen', () => {
+ expect(getSetupComponent().exists()).toBe(true);
+ });
+
+ it('does not show loading icon', () => {
+ expect(getLoadingIcon().exists()).toBe(false);
+ });
+
+ it('does not show the "Import in progress" screen', () => {
+ expect(getProgressComponent().exists()).toBe(false);
+ });
+
+ it('does not show the "Import Jira project" form', () => {
+ expect(getFormComponent().exists()).toBe(false);
+ });
+ });
+
+ describe('when Jira integration is configured but data is being fetched', () => {
+ beforeEach(() => {
+ wrapper = mountComponent({ loading: true });
+ });
+
+ it('does not show the "Set up Jira integration" screen', () => {
+ expect(getSetupComponent().exists()).toBe(false);
+ });
+
+ it('shows loading icon', () => {
+ expect(getLoadingIcon().exists()).toBe(true);
+ });
+
+ it('does not show the "Import in progress" screen', () => {
+ expect(getProgressComponent().exists()).toBe(false);
+ });
+
+ it('does not show the "Import Jira project" form', () => {
+ expect(getFormComponent().exists()).toBe(false);
+ });
+ });
+
+ describe('when Jira integration is configured but import is in progress', () => {
+ beforeEach(() => {
+ wrapper = mountComponent({ status: IMPORT_STATE.SCHEDULED });
+ });
+
+ it('does not show the "Set up Jira integration" screen', () => {
+ expect(getSetupComponent().exists()).toBe(false);
+ });
+
+ it('does not show loading icon', () => {
+ expect(getLoadingIcon().exists()).toBe(false);
+ });
+
+ it('shows the "Import in progress" screen', () => {
+ expect(getProgressComponent().exists()).toBe(true);
+ });
+
+ it('does not show the "Import Jira project" form', () => {
+ expect(getFormComponent().exists()).toBe(false);
+ });
+ });
+
+ describe('when Jira integration is configured and there is no import in progress', () => {
beforeEach(() => {
- wrapper = shallowMount(JiraImportApp, {
- propsData: {
- isJiraConfigured: true,
- projectPath: 'gitlab-org/gitlab-test',
- setupIllustration: 'illustration.svg',
+ wrapper = mountComponent();
+ });
+
+ it('does not show the "Set up Jira integration" screen', () => {
+ expect(getSetupComponent().exists()).toBe(false);
+ });
+
+ it('does not show loading icon', () => {
+ expect(getLoadingIcon().exists()).toBe(false);
+ });
+
+ it('does not show the Import in progress" screen', () => {
+ expect(getProgressComponent().exists()).toBe(false);
+ });
+
+ it('shows the "Import Jira project" form', () => {
+ expect(getFormComponent().exists()).toBe(true);
+ });
+ });
+
+ describe('initiating a Jira import', () => {
+ it('calls the mutation with the expected arguments', () => {
+ const mutate = jest.fn(() => Promise.resolve());
+
+ wrapper = mountComponent({ mutate });
+
+ const mutationArguments = {
+ mutation: initiateJiraImportMutation,
+ variables: {
+ input: {
+ jiraProjectKey: 'MTG',
+ projectPath: 'gitlab-org/gitlab-test',
+ },
},
- });
+ };
+
+ getFormComponent().vm.$emit('initiateJiraImport', 'MTG');
+
+ expect(mutate).toHaveBeenCalledWith(expect.objectContaining(mutationArguments));
});
- it('is shown when Jira integration is not configured', () => {
- wrapper.setProps({
- isJiraConfigured: false,
- });
+ it('shows alert message with error message on error', () => {
+ const mutate = jest.fn(() => Promise.reject());
+
+ wrapper = mountComponent({ mutate });
+
+ getFormComponent().vm.$emit('initiateJiraImport', 'MTG');
+
+ // One tick doesn't update the dom to the desired state so we have two ticks here
+ return Vue.nextTick()
+ .then(Vue.nextTick)
+ .then(() => {
+ expect(getAlert().text()).toBe('There was an error importing the Jira project.');
+ });
+ });
+ });
- return wrapper.vm.$nextTick(() => {
- expect(wrapper.find(JiraImportSetup).exists()).toBe(true);
- });
+ it('can dismiss alert message', () => {
+ wrapper = mountComponent({
+ errorMessage: 'There was an error importing the Jira project.',
+ showAlert: true,
});
- it('is not shown when Jira integration is configured', () => {
- expect(wrapper.find(JiraImportSetup).exists()).toBe(false);
+ expect(getAlert().exists()).toBe(true);
+
+ getAlert().vm.$emit('dismiss');
+
+ return Vue.nextTick().then(() => {
+ expect(getAlert().exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/jira_import/components/jira_import_form_spec.js b/spec/frontend/jira_import/components/jira_import_form_spec.js
index 315ccccd991..0987eb11693 100644
--- a/spec/frontend/jira_import/components/jira_import_form_spec.js
+++ b/spec/frontend/jira_import/components/jira_import_form_spec.js
@@ -1,62 +1,126 @@
-import { GlAvatar, GlNewButton, GlFormSelect, GlLabel } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { GlAvatar, GlButton, GlFormSelect, GlLabel } from '@gitlab/ui';
+import { mount, shallowMount } from '@vue/test-utils';
import JiraImportForm from '~/jira_import/components/jira_import_form.vue';
+const mountComponent = ({ mountType } = {}) => {
+ const mountFunction = mountType === 'mount' ? mount : shallowMount;
+
+ return mountFunction(JiraImportForm, {
+ propsData: {
+ issuesPath: 'gitlab-org/gitlab-test/-/issues',
+ jiraProjects: [
+ {
+ text: 'My Jira Project',
+ value: 'MJP',
+ },
+ {
+ text: 'My Second Jira Project',
+ value: 'MSJP',
+ },
+ {
+ text: 'Migrate to GitLab',
+ value: 'MTG',
+ },
+ ],
+ },
+ });
+};
+
describe('JiraImportForm', () => {
let wrapper;
- beforeEach(() => {
- wrapper = shallowMount(JiraImportForm);
- });
+ const getCancelButton = () => wrapper.findAll(GlButton).at(1);
afterEach(() => {
wrapper.destroy();
wrapper = null;
});
- it('shows a dropdown to choose the Jira project to import from', () => {
- expect(wrapper.find(GlFormSelect).exists()).toBe(true);
- });
+ describe('select dropdown', () => {
+ it('is shown', () => {
+ wrapper = mountComponent();
- it('shows a label which will be applied to imported Jira projects', () => {
- expect(wrapper.find(GlLabel).attributes('title')).toBe('jira-import::KEY-1');
- });
+ expect(wrapper.find(GlFormSelect).exists()).toBe(true);
+ });
- it('shows information to the user', () => {
- expect(wrapper.find('p').text()).toBe(
- "For each Jira issue successfully imported, we'll create a new GitLab issue with the following data:",
- );
- });
+ it('contains a list of Jira projects to select from', () => {
+ wrapper = mountComponent({ mountType: 'mount' });
- it('shows jira.issue.summary for the Title', () => {
- expect(wrapper.find('[id="jira-project-title"]').text()).toBe('jira.issue.summary');
+ const optionItems = ['My Jira Project', 'My Second Jira Project', 'Migrate to GitLab'];
+
+ wrapper
+ .find(GlFormSelect)
+ .findAll('option')
+ .wrappers.forEach((optionEl, index) => {
+ expect(optionEl.text()).toBe(optionItems[index]);
+ });
+ });
});
- it('shows an avatar for the Reporter', () => {
- expect(wrapper.find(GlAvatar).exists()).toBe(true);
+ describe('form information', () => {
+ beforeEach(() => {
+ wrapper = mountComponent();
+ });
+
+ it('shows a label which will be applied to imported Jira projects', () => {
+ expect(wrapper.find(GlLabel).attributes('title')).toBe('jira-import::KEY-1');
+ });
+
+ it('shows information to the user', () => {
+ expect(wrapper.find('p').text()).toBe(
+ "For each Jira issue successfully imported, we'll create a new GitLab issue with the following data:",
+ );
+ });
+
+ it('shows jira.issue.summary for the Title', () => {
+ expect(wrapper.find('[id="jira-project-title"]').text()).toBe('jira.issue.summary');
+ });
+
+ it('shows an avatar for the Reporter', () => {
+ expect(wrapper.find(GlAvatar).exists()).toBe(true);
+ });
+
+ it('shows jira.issue.description.content for the Description', () => {
+ expect(wrapper.find('[id="jira-project-description"]').text()).toBe(
+ 'jira.issue.description.content',
+ );
+ });
});
- it('shows jira.issue.description.content for the Description', () => {
- expect(wrapper.find('[id="jira-project-description"]').text()).toBe(
- 'jira.issue.description.content',
- );
+ describe('Next button', () => {
+ beforeEach(() => {
+ wrapper = mountComponent();
+ });
+
+ it('is shown', () => {
+ expect(wrapper.find(GlButton).text()).toBe('Next');
+ });
});
- it('shows a Next button', () => {
- const nextButton = wrapper
- .findAll(GlNewButton)
- .at(0)
- .text();
+ describe('Cancel button', () => {
+ beforeEach(() => {
+ wrapper = mountComponent();
+ });
+
+ it('is shown', () => {
+ expect(getCancelButton().text()).toBe('Cancel');
+ });
- expect(nextButton).toBe('Next');
+ it('links to the Issues page', () => {
+ expect(getCancelButton().attributes('href')).toBe('gitlab-org/gitlab-test/-/issues');
+ });
});
- it('shows a Cancel button', () => {
- const cancelButton = wrapper
- .findAll(GlNewButton)
- .at(1)
- .text();
+ it('emits an "initiateJiraImport" event with the selected dropdown value when submitted', () => {
+ const selectedOption = 'MTG';
+
+ wrapper = mountComponent();
+ wrapper.setData({
+ selectedOption,
+ });
+
+ wrapper.find('form').trigger('submit');
- expect(cancelButton).toBe('Cancel');
+ expect(wrapper.emitted('initiateJiraImport')[0]).toEqual([selectedOption]);
});
});
diff --git a/spec/frontend/jira_import/components/jira_import_progress_spec.js b/spec/frontend/jira_import/components/jira_import_progress_spec.js
new file mode 100644
index 00000000000..9a6fc3b5925
--- /dev/null
+++ b/spec/frontend/jira_import/components/jira_import_progress_spec.js
@@ -0,0 +1,70 @@
+import { GlEmptyState } from '@gitlab/ui';
+import { mount, shallowMount } from '@vue/test-utils';
+import JiraImportProgress from '~/jira_import/components/jira_import_progress.vue';
+
+describe('JiraImportProgress', () => {
+ let wrapper;
+
+ const getGlEmptyStateAttribute = attribute => wrapper.find(GlEmptyState).attributes(attribute);
+
+ const getParagraphText = () => wrapper.find('p').text();
+
+ const mountComponent = ({ mountType = 'shallowMount' } = {}) => {
+ const mountFunction = mountType === 'shallowMount' ? shallowMount : mount;
+ return mountFunction(JiraImportProgress, {
+ propsData: {
+ illustration: 'illustration.svg',
+ importInitiator: 'Jane Doe',
+ importProject: 'JIRAPROJECT',
+ importTime: '2020-04-08T12:17:25+00:00',
+ issuesPath: 'gitlab-org/gitlab-test/-/issues',
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('empty state', () => {
+ beforeEach(() => {
+ wrapper = mountComponent();
+ });
+
+ it('contains illustration', () => {
+ expect(getGlEmptyStateAttribute('svgpath')).toBe('illustration.svg');
+ });
+
+ it('contains a title', () => {
+ const title = 'Import in progress';
+ expect(getGlEmptyStateAttribute('title')).toBe(title);
+ });
+
+ it('contains button text', () => {
+ expect(getGlEmptyStateAttribute('primarybuttontext')).toBe('View issues');
+ });
+
+ it('contains button url', () => {
+ expect(getGlEmptyStateAttribute('primarybuttonlink')).toBe('gitlab-org/gitlab-test/-/issues');
+ });
+ });
+
+ describe('description', () => {
+ beforeEach(() => {
+ wrapper = mountComponent({ mountType: 'mount' });
+ });
+
+ it('shows who initiated the import', () => {
+ expect(getParagraphText()).toContain('Import started by: Jane Doe');
+ });
+
+ it('shows the time of import', () => {
+ expect(getParagraphText()).toContain('Time of import: Apr 8, 2020 12:17pm GMT+0000');
+ });
+
+ it('shows the project key of the import', () => {
+ expect(getParagraphText()).toContain('Jira project: JIRAPROJECT');
+ });
+ });
+});
diff --git a/spec/frontend/jira_import/components/jira_import_setup_spec.js b/spec/frontend/jira_import/components/jira_import_setup_spec.js
index 27366bd7e8a..834c14b512e 100644
--- a/spec/frontend/jira_import/components/jira_import_setup_spec.js
+++ b/spec/frontend/jira_import/components/jira_import_setup_spec.js
@@ -1,9 +1,12 @@
+import { GlEmptyState } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import JiraImportSetup from '~/jira_import/components/jira_import_setup.vue';
describe('JiraImportSetup', () => {
let wrapper;
+ const getGlEmptyStateAttribute = attribute => wrapper.find(GlEmptyState).attributes(attribute);
+
beforeEach(() => {
wrapper = shallowMount(JiraImportSetup, {
propsData: {
@@ -17,12 +20,16 @@ describe('JiraImportSetup', () => {
wrapper = null;
});
- it('displays a message to the user', () => {
- const message = 'You will first need to set up Jira Integration to use this feature.';
- expect(wrapper.find('p').text()).toBe(message);
+ it('contains illustration', () => {
+ expect(getGlEmptyStateAttribute('svgpath')).toBe('illustration.svg');
+ });
+
+ it('contains a description', () => {
+ const description = 'You will first need to set up Jira Integration to use this feature.';
+ expect(getGlEmptyStateAttribute('description')).toBe(description);
});
- it('contains button to set up Jira integration', () => {
- expect(wrapper.find('a').text()).toBe('Set up Jira Integration');
+ it('contains button text', () => {
+ expect(getGlEmptyStateAttribute('primarybuttontext')).toBe('Set up Jira Integration');
});
});
diff --git a/spec/frontend/jira_import/utils_spec.js b/spec/frontend/jira_import/utils_spec.js
new file mode 100644
index 00000000000..a14db104229
--- /dev/null
+++ b/spec/frontend/jira_import/utils_spec.js
@@ -0,0 +1,27 @@
+import { IMPORT_STATE, isInProgress } from '~/jira_import/utils';
+
+describe('isInProgress', () => {
+ it('returns true when state is IMPORT_STATE.SCHEDULED', () => {
+ expect(isInProgress(IMPORT_STATE.SCHEDULED)).toBe(true);
+ });
+
+ it('returns true when state is IMPORT_STATE.STARTED', () => {
+ expect(isInProgress(IMPORT_STATE.STARTED)).toBe(true);
+ });
+
+ it('returns false when state is IMPORT_STATE.FAILED', () => {
+ expect(isInProgress(IMPORT_STATE.FAILED)).toBe(false);
+ });
+
+ it('returns false when state is IMPORT_STATE.FINISHED', () => {
+ expect(isInProgress(IMPORT_STATE.FINISHED)).toBe(false);
+ });
+
+ it('returns false when state is IMPORT_STATE.NONE', () => {
+ expect(isInProgress(IMPORT_STATE.NONE)).toBe(false);
+ });
+
+ it('returns false when state is undefined', () => {
+ expect(isInProgress()).toBe(false);
+ });
+});
diff --git a/spec/frontend/logs/mock_data.js b/spec/frontend/logs/mock_data.js
index 537582cff5a..14c8f7a2ba2 100644
--- a/spec/frontend/logs/mock_data.js
+++ b/spec/frontend/logs/mock_data.js
@@ -34,91 +34,31 @@ export const mockPods = [
export const mockLogsResult = [
{
timestamp: '2019-12-13T13:43:18.2760123Z',
- message: '10.36.0.1 - - [16/Oct/2019:06:29:48 UTC] "GET / HTTP/1.1" 200 13',
+ message: 'log line 1',
pod: 'foo',
},
{
timestamp: '2019-12-13T13:43:18.2760123Z',
- message: '- -> /',
+ message: 'log line A',
pod: 'bar',
},
{
timestamp: '2019-12-13T13:43:26.8420123Z',
- message: '10.36.0.1 - - [16/Oct/2019:06:29:57 UTC] "GET / HTTP/1.1" 200 13',
+ message: 'log line 2',
pod: 'foo',
},
{
timestamp: '2019-12-13T13:43:26.8420123Z',
- message: '- -> /',
- pod: 'bar',
- },
- {
- timestamp: '2019-12-13T13:43:28.3710123Z',
- message: '10.36.0.1 - - [16/Oct/2019:06:29:58 UTC] "GET / HTTP/1.1" 200 13',
- pod: 'foo',
- },
- {
- timestamp: '2019-12-13T13:43:28.3710123Z',
- message: '- -> /',
- pod: 'bar',
- },
- {
- timestamp: '2019-12-13T13:43:36.8860123Z',
- message: '10.36.0.1 - - [16/Oct/2019:06:30:07 UTC] "GET / HTTP/1.1" 200 13',
- pod: 'foo',
- },
- {
- timestamp: '2019-12-13T13:43:36.8860123Z',
- message: '- -> /',
- pod: 'bar',
- },
- {
- timestamp: '2019-12-13T13:43:38.4000123Z',
- message: '10.36.0.1 - - [16/Oct/2019:06:30:08 UTC] "GET / HTTP/1.1" 200 13',
- pod: 'foo',
- },
- {
- timestamp: '2019-12-13T13:43:38.4000123Z',
- message: '- -> /',
- pod: 'bar',
- },
- {
- timestamp: '2019-12-13T13:43:46.8420123Z',
- message: '10.36.0.1 - - [16/Oct/2019:06:30:17 UTC] "GET / HTTP/1.1" 200 13',
- pod: 'foo',
- },
- {
- timestamp: '2019-12-13T13:43:46.8430123Z',
- message: '- -> /',
- pod: 'bar',
- },
- {
- timestamp: '2019-12-13T13:43:48.3240123Z',
- message: '10.36.0.1 - - [16/Oct/2019:06:30:18 UTC] "GET / HTTP/1.1" 200 13',
- pod: 'foo',
- },
- {
- timestamp: '2019-12-13T13:43:48.3250123Z',
- message: '- -> /',
+ message: 'log line B',
pod: 'bar',
},
];
export const mockTrace = [
- 'Dec 13 13:43:18.276Z | foo | 10.36.0.1 - - [16/Oct/2019:06:29:48 UTC] "GET / HTTP/1.1" 200 13',
- 'Dec 13 13:43:18.276Z | bar | - -> /',
- 'Dec 13 13:43:26.842Z | foo | 10.36.0.1 - - [16/Oct/2019:06:29:57 UTC] "GET / HTTP/1.1" 200 13',
- 'Dec 13 13:43:26.842Z | bar | - -> /',
- 'Dec 13 13:43:28.371Z | foo | 10.36.0.1 - - [16/Oct/2019:06:29:58 UTC] "GET / HTTP/1.1" 200 13',
- 'Dec 13 13:43:28.371Z | bar | - -> /',
- 'Dec 13 13:43:36.886Z | foo | 10.36.0.1 - - [16/Oct/2019:06:30:07 UTC] "GET / HTTP/1.1" 200 13',
- 'Dec 13 13:43:36.886Z | bar | - -> /',
- 'Dec 13 13:43:38.400Z | foo | 10.36.0.1 - - [16/Oct/2019:06:30:08 UTC] "GET / HTTP/1.1" 200 13',
- 'Dec 13 13:43:38.400Z | bar | - -> /',
- 'Dec 13 13:43:46.842Z | foo | 10.36.0.1 - - [16/Oct/2019:06:30:17 UTC] "GET / HTTP/1.1" 200 13',
- 'Dec 13 13:43:46.843Z | bar | - -> /',
- 'Dec 13 13:43:48.324Z | foo | 10.36.0.1 - - [16/Oct/2019:06:30:18 UTC] "GET / HTTP/1.1" 200 13',
- 'Dec 13 13:43:48.325Z | bar | - -> /',
+ 'Dec 13 13:43:18.276 | foo | log line 1',
+ 'Dec 13 13:43:18.276 | bar | log line A',
+ 'Dec 13 13:43:26.842 | foo | log line 2',
+ 'Dec 13 13:43:26.842 | bar | log line B',
];
export const mockResponse = {
diff --git a/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap b/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
index d968b042ff1..1906ad7c6ed 100644
--- a/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
+++ b/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
@@ -6,101 +6,106 @@ exports[`Dashboard template matches the default snapshot 1`] = `
data-qa-selector="prometheus_graphs"
>
<div
- class="prometheus-graphs-header gl-p-3 pb-0 border-bottom bg-gray-light"
+ class="prometheus-graphs-header d-sm-flex flex-sm-wrap pt-2 pr-1 pb-0 pl-2 border-bottom bg-gray-light"
>
<div
- class="row"
+ class="mb-2 pr-2 d-flex d-sm-block"
>
- <gl-form-group-stub
- class="col-sm-12 col-md-6 col-lg-2"
- label="Dashboard"
- label-for="monitor-dashboards-dropdown"
- label-size="sm"
- >
- <dashboards-dropdown-stub
- class="mb-0 d-flex"
- data-qa-selector="dashboards_filter_dropdown"
- defaultbranch="master"
- id="monitor-dashboards-dropdown"
- selecteddashboard="[object Object]"
- toggle-class="dropdown-menu-toggle"
- />
- </gl-form-group-stub>
-
- <gl-form-group-stub
- class="col-sm-6 col-md-6 col-lg-2"
- label="Environment"
- label-for="monitor-environments-dropdown"
- label-size="sm"
+ <dashboards-dropdown-stub
+ class="flex-grow-1"
+ data-qa-selector="dashboards_filter_dropdown"
+ defaultbranch="master"
+ id="monitor-dashboards-dropdown"
+ selecteddashboard="[object Object]"
+ toggle-class="dropdown-menu-toggle"
+ />
+ </div>
+
+ <div
+ class="mb-2 pr-2 d-flex d-sm-block"
+ >
+ <gl-dropdown-stub
+ class="flex-grow-1"
+ data-qa-selector="environments_dropdown"
+ id="monitor-environments-dropdown"
+ menu-class="monitor-environment-dropdown-menu"
+ text="production"
+ toggle-class="dropdown-menu-toggle"
>
- <gl-dropdown-stub
- class="mb-0 d-flex"
- data-qa-selector="environments_dropdown"
- id="monitor-environments-dropdown"
- menu-class="monitor-environment-dropdown-menu"
- text="production"
- toggle-class="dropdown-menu-toggle"
+ <div
+ class="d-flex flex-column overflow-hidden"
>
+ <gl-dropdown-header-stub
+ class="monitor-environment-dropdown-header text-center"
+ >
+
+ Environment
+
+ </gl-dropdown-header-stub>
+
+ <gl-dropdown-divider-stub />
+
+ <gl-search-box-by-type-stub
+ class="m-2"
+ clearbuttontitle="Clear"
+ value=""
+ />
+
+ <div
+ class="flex-fill overflow-auto"
+ />
+
<div
- class="d-flex flex-column overflow-hidden"
+ class="text-secondary no-matches-message"
>
- <gl-dropdown-header-stub
- class="monitor-environment-dropdown-header text-center"
- >
- Environment
- </gl-dropdown-header-stub>
-
- <gl-dropdown-divider-stub />
-
- <gl-search-box-by-type-stub
- class="m-2"
- clearbuttontitle="Clear"
- value=""
- />
-
- <div
- class="flex-fill overflow-auto"
- />
-
- <div
- class="text-secondary no-matches-message"
- >
-
- No matching results
- </div>
+ No matching results
+
</div>
- </gl-dropdown-stub>
- </gl-form-group-stub>
-
- <gl-form-group-stub
- class="col-sm-auto col-md-auto col-lg-auto"
+ </div>
+ </gl-dropdown-stub>
+ </div>
+
+ <div
+ class="mb-2 pr-2 d-flex d-sm-block"
+ >
+ <date-time-picker-stub
+ class="flex-grow-1 show-last-dropdown"
+ customenabled="true"
data-qa-selector="show_last_dropdown"
- label="Show last"
- label-for="monitor-time-window-dropdown"
- label-size="sm"
+ options="[object Object],[object Object],[object Object],[object Object],[object Object],[object Object],[object Object]"
+ value="[object Object]"
+ />
+ </div>
+
+ <div
+ class="mb-2 pr-2 d-flex d-sm-block"
+ >
+ <gl-deprecated-button-stub
+ class="flex-grow-1"
+ size="md"
+ title="Refresh dashboard"
+ variant="default"
>
- <date-time-picker-stub
- customenabled="true"
- options="[object Object],[object Object],[object Object],[object Object],[object Object],[object Object],[object Object]"
- value="[object Object]"
+ <icon-stub
+ name="retry"
+ size="16"
/>
- </gl-form-group-stub>
+ </gl-deprecated-button-stub>
+ </div>
+
+ <div
+ class="flex-grow-1"
+ />
+
+ <div
+ class="d-sm-flex"
+ >
+ <!---->
- <gl-form-group-stub
- class="col-sm-2 col-md-2 col-lg-1 refresh-dashboard-button"
- >
- <gl-deprecated-button-stub
- size="md"
- title="Refresh dashboard"
- variant="default"
- >
- <icon-stub
- name="retry"
- size="16"
- />
- </gl-deprecated-button-stub>
- </gl-form-group-stub>
+ <!---->
+
+ <!---->
<!---->
</div>
diff --git a/spec/frontend/monitoring/components/charts/annotations_spec.js b/spec/frontend/monitoring/components/charts/annotations_spec.js
index 69bf1fe4ced..fc90175d307 100644
--- a/spec/frontend/monitoring/components/charts/annotations_spec.js
+++ b/spec/frontend/monitoring/components/charts/annotations_spec.js
@@ -54,6 +54,7 @@ describe('annotations spec', () => {
yAxisIndex: 1,
data: expect.any(Array),
markLine: expect.any(Object),
+ markPoint: expect.any(Object),
}),
);
@@ -61,11 +62,12 @@ describe('annotations spec', () => {
expect(annotation).toEqual(expect.any(Object));
});
- expect(annotations.data).toHaveLength(annotationsData.length);
+ expect(annotations.data).toHaveLength(0);
expect(annotations.markLine.data).toHaveLength(annotationsData.length);
+ expect(annotations.markPoint.data).toHaveLength(annotationsData.length);
});
- it('when deploments and annotations data is passed', () => {
+ it('when deployments and annotations data is passed', () => {
const annotations = generateAnnotationsSeries({
deployments: deploymentData,
annotations: annotationsData,
@@ -77,6 +79,7 @@ describe('annotations spec', () => {
yAxisIndex: 1,
data: expect.any(Array),
markLine: expect.any(Object),
+ markPoint: expect.any(Object),
}),
);
@@ -84,7 +87,9 @@ describe('annotations spec', () => {
expect(annotation).toEqual(expect.any(Object));
});
- expect(annotations.data).toHaveLength(deploymentData.length + annotationsData.length);
+ expect(annotations.data).toHaveLength(deploymentData.length);
+ expect(annotations.markLine.data).toHaveLength(annotationsData.length);
+ expect(annotations.markPoint.data).toHaveLength(annotationsData.length);
});
});
});
diff --git a/spec/frontend/monitoring/components/charts/options_spec.js b/spec/frontend/monitoring/components/charts/options_spec.js
index d219a6627bf..1c8fdc01e3e 100644
--- a/spec/frontend/monitoring/components/charts/options_spec.js
+++ b/spec/frontend/monitoring/components/charts/options_spec.js
@@ -31,7 +31,32 @@ describe('options spec', () => {
});
});
- it('formatter options', () => {
+ it('formatter options defaults to engineering notation', () => {
+ const options = getYAxisOptions();
+
+ expect(options.axisLabel.formatter).toEqual(expect.any(Function));
+ expect(options.axisLabel.formatter(3002.1)).toBe('3k');
+ });
+
+ it('formatter options allows for precision to be set explicitly', () => {
+ const options = getYAxisOptions({
+ precision: 4,
+ });
+
+ expect(options.axisLabel.formatter).toEqual(expect.any(Function));
+ expect(options.axisLabel.formatter(5002.1)).toBe('5.0021k');
+ });
+
+ it('formatter options allows for overrides in milliseconds', () => {
+ const options = getYAxisOptions({
+ format: SUPPORTED_FORMATS.milliseconds,
+ });
+
+ expect(options.axisLabel.formatter).toEqual(expect.any(Function));
+ expect(options.axisLabel.formatter(1.1234)).toBe('1.12ms');
+ });
+
+ it('formatter options allows for overrides in bytes', () => {
const options = getYAxisOptions({
format: SUPPORTED_FORMATS.bytes,
});
@@ -46,7 +71,7 @@ describe('options spec', () => {
const formatter = getTooltipFormatter();
expect(formatter).toEqual(expect.any(Function));
- expect(formatter(1)).toBe('1.000');
+ expect(formatter(0.11111)).toBe('111.1m');
});
it('defined format', () => {
diff --git a/spec/frontend/monitoring/components/charts/time_series_spec.js b/spec/frontend/monitoring/components/charts/time_series_spec.js
index 870e47edde0..5ac716b0c63 100644
--- a/spec/frontend/monitoring/components/charts/time_series_spec.js
+++ b/spec/frontend/monitoring/components/charts/time_series_spec.js
@@ -1,6 +1,7 @@
import { mount } from '@vue/test-utils';
import { setTestTimeout } from 'helpers/timeout';
import { GlLink } from '@gitlab/ui';
+import { TEST_HOST } from 'jest/helpers/test_constants';
import {
GlAreaChart,
GlLineChart,
@@ -12,23 +13,16 @@ import { shallowWrapperContainsSlotText } from 'helpers/vue_test_utils_helper';
import { createStore } from '~/monitoring/stores';
import TimeSeries from '~/monitoring/components/charts/time_series.vue';
import * as types from '~/monitoring/stores/mutation_types';
+import { deploymentData, mockProjectDir, annotationsData } from '../../mock_data';
import {
- deploymentData,
- mockedQueryResultFixture,
+ metricsDashboardPayload,
metricsDashboardViewModel,
- mockProjectDir,
- mockHost,
-} from '../../mock_data';
+ metricResultStatus,
+} from '../../fixture_data';
import * as iconUtils from '~/lib/utils/icon_utils';
-import { getJSONFixture } from '../../../helpers/fixtures';
const mockSvgPathContent = 'mockSvgPathContent';
-const metricsDashboardFixture = getJSONFixture(
- 'metrics_dashboard/environment_metrics_dashboard.json',
-);
-const metricsDashboardPayload = metricsDashboardFixture.dashboard;
-
jest.mock('lodash/throttle', () =>
// this throttle mock executes immediately
jest.fn(func => {
@@ -51,7 +45,7 @@ describe('Time series component', () => {
graphData: { ...graphData, type },
deploymentData: store.state.monitoringDashboard.deploymentData,
annotations: store.state.monitoringDashboard.annotations,
- projectPath: `${mockHost}${mockProjectDir}`,
+ projectPath: `${TEST_HOST}${mockProjectDir}`,
},
store,
stubs: {
@@ -74,7 +68,7 @@ describe('Time series component', () => {
store.commit(
`monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
- mockedQueryResultFixture,
+ metricResultStatus,
);
// dashboard is a dynamically generated fixture and stored at environment_metrics_dashboard.json
[mockGraphData] = store.state.monitoringDashboard.dashboard.panelGroups[1].panels;
@@ -284,6 +278,33 @@ describe('Time series component', () => {
});
});
+ describe('formatAnnotationsTooltipText', () => {
+ const annotationsMetadata = {
+ name: 'annotations',
+ xAxis: annotationsData[0].from,
+ yAxis: 0,
+ tooltipData: {
+ title: '2020/02/19 10:01:41',
+ content: annotationsData[0].description,
+ },
+ };
+
+ const mockMarkPoint = {
+ componentType: 'markPoint',
+ name: 'annotations',
+ value: undefined,
+ data: annotationsMetadata,
+ };
+
+ it('formats tooltip title and sets tooltip content', () => {
+ const formattedTooltipData = timeSeriesChart.vm.formatAnnotationsTooltipText(
+ mockMarkPoint,
+ );
+ expect(formattedTooltipData.title).toBe('19 Feb 2020, 10:01AM');
+ expect(formattedTooltipData.content).toBe(annotationsMetadata.tooltipData.content);
+ });
+ });
+
describe('setSvg', () => {
const mockSvgName = 'mockSvgName';
@@ -386,6 +407,8 @@ describe('Time series component', () => {
series: [
{
name: mockSeriesName,
+ type: 'line',
+ data: [],
},
],
},
@@ -448,8 +471,8 @@ describe('Time series component', () => {
deploymentFormatter = getChartOptions().yAxis[1].axisLabel.formatter;
});
- it('formats and rounds to 2 decimal places', () => {
- expect(dataFormatter(0.88888)).toBe('0.89');
+ it('formats by default to precision notation', () => {
+ expect(dataFormatter(0.88888)).toBe('889m');
});
it('deployment formatter is set as is required to display a tooltip', () => {
@@ -606,7 +629,7 @@ describe('Time series component', () => {
store = createStore();
const graphData = cloneDeep(metricsDashboardViewModel.panelGroups[0].panels[3]);
graphData.metrics.forEach(metric =>
- Object.assign(metric, { result: mockedQueryResultFixture.result }),
+ Object.assign(metric, { result: metricResultStatus.result }),
);
timeSeriesChart = makeTimeSeriesChart(graphData, 'area-chart');
diff --git a/spec/frontend/monitoring/components/dashboard_spec.js b/spec/frontend/monitoring/components/dashboard_spec.js
index f0b510a01f4..8b6ee9b3bf6 100644
--- a/spec/frontend/monitoring/components/dashboard_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_spec.js
@@ -1,34 +1,23 @@
-import { shallowMount, createLocalVue, mount } from '@vue/test-utils';
-import { GlDropdownItem, GlDeprecatedButton } from '@gitlab/ui';
+import { shallowMount, mount } from '@vue/test-utils';
+import Tracking from '~/tracking';
+import { GlModal, GlDropdownItem, GlDeprecatedButton } from '@gitlab/ui';
import VueDraggable from 'vuedraggable';
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import statusCodes from '~/lib/utils/http_status';
import { metricStates } from '~/monitoring/constants';
import Dashboard from '~/monitoring/components/dashboard.vue';
-import { getJSONFixture } from '../../../../spec/frontend/helpers/fixtures';
import DateTimePicker from '~/vue_shared/components/date_time_picker/date_time_picker.vue';
+import CustomMetricsFormFields from '~/custom_metrics/components/custom_metrics_form_fields.vue';
import DashboardsDropdown from '~/monitoring/components/dashboards_dropdown.vue';
import GroupEmptyState from '~/monitoring/components/group_empty_state.vue';
import PanelType from 'ee_else_ce/monitoring/components/panel_type.vue';
import { createStore } from '~/monitoring/stores';
import * as types from '~/monitoring/stores/mutation_types';
-import { setupComponentStore, propsData } from '../init_utils';
-import {
- metricsDashboardViewModel,
- environmentData,
- dashboardGitResponse,
- mockedQueryResultFixture,
-} from '../mock_data';
-
-const localVue = createLocalVue();
-const expectedPanelCount = 4;
-
-const metricsDashboardFixture = getJSONFixture(
- 'metrics_dashboard/environment_metrics_dashboard.json',
-);
-const metricsDashboardPayload = metricsDashboardFixture.dashboard;
+import { setupStoreWithDashboard, setMetricResult, setupStoreWithData } from '../store_utils';
+import { environmentData, dashboardGitResponse, propsData } from '../mock_data';
+import { metricsDashboardViewModel, metricsDashboardPanelCount } from '../fixture_data';
describe('Dashboard', () => {
let store;
@@ -43,7 +32,6 @@ describe('Dashboard', () => {
const createShallowWrapper = (props = {}, options = {}) => {
wrapper = shallowMount(Dashboard, {
- localVue,
propsData: { ...propsData, ...props },
methods: {
fetchData: jest.fn(),
@@ -55,7 +43,6 @@ describe('Dashboard', () => {
const createMountedWrapper = (props = {}, options = {}) => {
wrapper = mount(Dashboard, {
- localVue,
propsData: { ...propsData, ...props },
methods: {
fetchData: jest.fn(),
@@ -144,7 +131,7 @@ describe('Dashboard', () => {
{ stubs: ['graph-group', 'panel-type'] },
);
- setupComponentStore(wrapper);
+ setupStoreWithData(wrapper.vm.$store);
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.vm.showEmptyState).toEqual(false);
@@ -172,7 +159,7 @@ describe('Dashboard', () => {
beforeEach(() => {
createMountedWrapper({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
- setupComponentStore(wrapper);
+ setupStoreWithData(wrapper.vm.$store);
return wrapper.vm.$nextTick();
});
@@ -201,14 +188,7 @@ describe('Dashboard', () => {
it('hides the environments dropdown list when there is no environments', () => {
createMountedWrapper({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
- wrapper.vm.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRICS_DASHBOARD_SUCCESS}`,
- metricsDashboardPayload,
- );
- wrapper.vm.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
- mockedQueryResultFixture,
- );
+ setupStoreWithDashboard(wrapper.vm.$store);
return wrapper.vm.$nextTick().then(() => {
expect(findAllEnvironmentsDropdownItems()).toHaveLength(0);
@@ -218,7 +198,7 @@ describe('Dashboard', () => {
it('renders the datetimepicker dropdown', () => {
createMountedWrapper({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
- setupComponentStore(wrapper);
+ setupStoreWithData(wrapper.vm.$store);
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.find(DateTimePicker).exists()).toBe(true);
@@ -228,7 +208,7 @@ describe('Dashboard', () => {
it('renders the refresh dashboard button', () => {
createMountedWrapper({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
- setupComponentStore(wrapper);
+ setupStoreWithData(wrapper.vm.$store);
return wrapper.vm.$nextTick().then(() => {
const refreshBtn = wrapper.findAll({ ref: 'refreshDashboardBtn' });
@@ -241,7 +221,11 @@ describe('Dashboard', () => {
describe('when one of the metrics is missing', () => {
beforeEach(() => {
createShallowWrapper({ hasMetrics: true });
- setupComponentStore(wrapper);
+
+ const { $store } = wrapper.vm;
+
+ setupStoreWithDashboard($store);
+ setMetricResult({ $store, result: [], panel: 2 });
return wrapper.vm.$nextTick();
});
@@ -273,7 +257,7 @@ describe('Dashboard', () => {
},
);
- setupComponentStore(wrapper);
+ setupStoreWithData(wrapper.vm.$store);
return wrapper.vm.$nextTick();
});
@@ -348,14 +332,14 @@ describe('Dashboard', () => {
beforeEach(() => {
createShallowWrapper({ hasMetrics: true });
- setupComponentStore(wrapper);
+ setupStoreWithData(wrapper.vm.$store);
return wrapper.vm.$nextTick();
});
it('wraps vuedraggable', () => {
expect(findDraggablePanels().exists()).toBe(true);
- expect(findDraggablePanels().length).toEqual(expectedPanelCount);
+ expect(findDraggablePanels().length).toEqual(metricsDashboardPanelCount);
});
it('is disabled by default', () => {
@@ -411,11 +395,11 @@ describe('Dashboard', () => {
it('shows a remove button, which removes a panel', () => {
expect(findFirstDraggableRemoveButton().isEmpty()).toBe(false);
- expect(findDraggablePanels().length).toEqual(expectedPanelCount);
+ expect(findDraggablePanels().length).toEqual(metricsDashboardPanelCount);
findFirstDraggableRemoveButton().trigger('click');
return wrapper.vm.$nextTick(() => {
- expect(findDraggablePanels().length).toEqual(expectedPanelCount - 1);
+ expect(findDraggablePanels().length).toEqual(metricsDashboardPanelCount - 1);
});
});
@@ -534,7 +518,7 @@ describe('Dashboard', () => {
beforeEach(() => {
createShallowWrapper({ hasMetrics: true, currentDashboard });
- setupComponentStore(wrapper);
+ setupStoreWithData(wrapper.vm.$store);
return wrapper.vm.$nextTick();
});
@@ -564,4 +548,74 @@ describe('Dashboard', () => {
});
});
});
+
+ describe('add custom metrics', () => {
+ const findAddMetricButton = () => wrapper.vm.$refs.addMetricBtn;
+ describe('when not available', () => {
+ beforeEach(() => {
+ createShallowWrapper({
+ hasMetrics: true,
+ customMetricsPath: '/endpoint',
+ });
+ });
+ it('does not render add button on the dashboard', () => {
+ expect(findAddMetricButton()).toBeUndefined();
+ });
+ });
+
+ describe('when available', () => {
+ let origPage;
+ beforeEach(done => {
+ jest.spyOn(Tracking, 'event').mockReturnValue();
+ createShallowWrapper({
+ hasMetrics: true,
+ customMetricsPath: '/endpoint',
+ customMetricsAvailable: true,
+ });
+ setupStoreWithData(wrapper.vm.$store);
+
+ origPage = document.body.dataset.page;
+ document.body.dataset.page = 'projects:environments:metrics';
+
+ wrapper.vm.$nextTick(done);
+ });
+ afterEach(() => {
+ document.body.dataset.page = origPage;
+ });
+
+ it('renders add button on the dashboard', () => {
+ expect(findAddMetricButton()).toBeDefined();
+ });
+
+ it('uses modal for custom metrics form', () => {
+ expect(wrapper.find(GlModal).exists()).toBe(true);
+ expect(wrapper.find(GlModal).attributes().modalid).toBe('add-metric');
+ });
+ it('adding new metric is tracked', done => {
+ const submitButton = wrapper.vm.$refs.submitCustomMetricsFormBtn;
+ wrapper.setData({
+ formIsValid: true,
+ });
+ wrapper.vm.$nextTick(() => {
+ submitButton.$el.click();
+ wrapper.vm.$nextTick(() => {
+ expect(Tracking.event).toHaveBeenCalledWith(
+ document.body.dataset.page,
+ 'click_button',
+ {
+ label: 'add_new_metric',
+ property: 'modal',
+ value: undefined,
+ },
+ );
+ done();
+ });
+ });
+ });
+
+ it('renders custom metrics form fields', () => {
+ expect(wrapper.find(CustomMetricsFormFields).exists()).toBe(true);
+ });
+ });
+ });
});
diff --git a/spec/frontend/monitoring/components/dashboard_template_spec.js b/spec/frontend/monitoring/components/dashboard_template_spec.js
index 38523ab82bc..d1790df4189 100644
--- a/spec/frontend/monitoring/components/dashboard_template_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_template_spec.js
@@ -3,7 +3,7 @@ import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import Dashboard from '~/monitoring/components/dashboard.vue';
import { createStore } from '~/monitoring/stores';
-import { propsData } from '../init_utils';
+import { propsData } from '../mock_data';
jest.mock('~/lib/utils/url_utility');
diff --git a/spec/frontend/monitoring/components/dashboard_url_time_spec.js b/spec/frontend/monitoring/components/dashboard_url_time_spec.js
index ebfa09874fa..65e9d036d1a 100644
--- a/spec/frontend/monitoring/components/dashboard_url_time_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_url_time_spec.js
@@ -9,12 +9,11 @@ import {
updateHistory,
} from '~/lib/utils/url_utility';
import axios from '~/lib/utils/axios_utils';
-import { mockProjectDir } from '../mock_data';
+import { mockProjectDir, propsData } from '../mock_data';
import Dashboard from '~/monitoring/components/dashboard.vue';
import { createStore } from '~/monitoring/stores';
import { defaultTimeRange } from '~/vue_shared/constants';
-import { propsData } from '../init_utils';
jest.mock('~/flash');
jest.mock('~/lib/utils/url_utility');
diff --git a/spec/frontend/monitoring/components/panel_type_spec.js b/spec/frontend/monitoring/components/panel_type_spec.js
index 02511ac46ea..819b5235284 100644
--- a/spec/frontend/monitoring/components/panel_type_spec.js
+++ b/spec/frontend/monitoring/components/panel_type_spec.js
@@ -10,17 +10,17 @@ import TimeSeriesChart from '~/monitoring/components/charts/time_series.vue';
import AnomalyChart from '~/monitoring/components/charts/anomaly.vue';
import {
anomalyMockGraphData,
- graphDataPrometheusQueryRange,
mockLogsHref,
mockLogsPath,
mockNamespace,
mockNamespacedData,
mockTimeRange,
-} from 'jest/monitoring/mock_data';
+} from '../mock_data';
+
+import { graphData, graphDataEmpty } from '../fixture_data';
import { createStore, monitoringDashboard } from '~/monitoring/stores';
import { createStore as createEmbedGroupStore } from '~/monitoring/stores/embed_group';
-global.IS_EE = true;
global.URL.createObjectURL = jest.fn();
const mocks = {
@@ -39,10 +39,13 @@ describe('Panel Type component', () => {
const findCopyLink = () => wrapper.find({ ref: 'copyChartLink' });
const findTimeChart = () => wrapper.find({ ref: 'timeChart' });
+ const findTitle = () => wrapper.find({ ref: 'graphTitle' });
+ const findContextualMenu = () => wrapper.find({ ref: 'contextualMenu' });
const createWrapper = props => {
wrapper = shallowMount(PanelType, {
propsData: {
+ graphData,
...props,
},
store,
@@ -64,14 +67,9 @@ describe('Panel Type component', () => {
});
describe('When no graphData is available', () => {
- let glEmptyChart;
- // Deep clone object before modifying
- const graphDataNoResult = JSON.parse(JSON.stringify(graphDataPrometheusQueryRange));
- graphDataNoResult.metrics[0].result = [];
-
beforeEach(() => {
createWrapper({
- graphData: graphDataNoResult,
+ graphData: graphDataEmpty,
});
});
@@ -80,12 +78,8 @@ describe('Panel Type component', () => {
});
describe('Empty Chart component', () => {
- beforeEach(() => {
- glEmptyChart = wrapper.find(EmptyChart);
- });
-
it('renders the chart title', () => {
- expect(wrapper.find({ ref: 'graphTitle' }).text()).toBe(graphDataNoResult.title);
+ expect(findTitle().text()).toBe(graphDataEmpty.title);
});
it('renders the no download csv link', () => {
@@ -93,26 +87,19 @@ describe('Panel Type component', () => {
});
it('does not contain graph widgets', () => {
- expect(wrapper.find('.js-graph-widgets').exists()).toBe(false);
+ expect(findContextualMenu().exists()).toBe(false);
});
it('is a Vue instance', () => {
- expect(glEmptyChart.isVueInstance()).toBe(true);
- });
-
- it('it receives a graph title', () => {
- const props = glEmptyChart.props();
-
- expect(props.graphTitle).toBe(wrapper.vm.graphData.title);
+ expect(wrapper.find(EmptyChart).exists()).toBe(true);
+ expect(wrapper.find(EmptyChart).isVueInstance()).toBe(true);
});
});
});
describe('when graph data is available', () => {
beforeEach(() => {
- createWrapper({
- graphData: graphDataPrometheusQueryRange,
- });
+ createWrapper();
});
afterEach(() => {
@@ -120,11 +107,11 @@ describe('Panel Type component', () => {
});
it('renders the chart title', () => {
- expect(wrapper.find({ ref: 'graphTitle' }).text()).toBe(graphDataPrometheusQueryRange.title);
+ expect(findTitle().text()).toBe(graphData.title);
});
it('contains graph widgets', () => {
- expect(wrapper.find('.js-graph-widgets').exists()).toBe(true);
+ expect(findContextualMenu().exists()).toBe(true);
expect(wrapper.find({ ref: 'downloadCsvLink' }).exists()).toBe(true);
});
@@ -177,11 +164,7 @@ describe('Panel Type component', () => {
const findEditCustomMetricLink = () => wrapper.find({ ref: 'editMetricLink' });
beforeEach(() => {
- createWrapper({
- graphData: {
- ...graphDataPrometheusQueryRange,
- },
- });
+ createWrapper();
return wrapper.vm.$nextTick();
});
@@ -193,10 +176,10 @@ describe('Panel Type component', () => {
it('is present when the panel contains an edit_path property', () => {
wrapper.setProps({
graphData: {
- ...graphDataPrometheusQueryRange,
+ ...graphData,
metrics: [
{
- ...graphDataPrometheusQueryRange.metrics[0],
+ ...graphData.metrics[0],
edit_path: '/root/kubernetes-gke-project/prometheus/metrics/23/edit',
},
],
@@ -205,23 +188,6 @@ describe('Panel Type component', () => {
return wrapper.vm.$nextTick(() => {
expect(findEditCustomMetricLink().exists()).toBe(true);
- });
- });
-
- it('shows an "Edit metric" link for a panel with a single metric', () => {
- wrapper.setProps({
- graphData: {
- ...graphDataPrometheusQueryRange,
- metrics: [
- {
- ...graphDataPrometheusQueryRange.metrics[0],
- edit_path: '/root/kubernetes-gke-project/prometheus/metrics/23/edit',
- },
- ],
- },
- });
-
- return wrapper.vm.$nextTick(() => {
expect(findEditCustomMetricLink().text()).toBe('Edit metric');
});
});
@@ -229,14 +195,14 @@ describe('Panel Type component', () => {
it('shows an "Edit metrics" link for a panel with multiple metrics', () => {
wrapper.setProps({
graphData: {
- ...graphDataPrometheusQueryRange,
+ ...graphData,
metrics: [
{
- ...graphDataPrometheusQueryRange.metrics[0],
+ ...graphData.metrics[0],
edit_path: '/root/kubernetes-gke-project/prometheus/metrics/23/edit',
},
{
- ...graphDataPrometheusQueryRange.metrics[0],
+ ...graphData.metrics[0],
edit_path: '/root/kubernetes-gke-project/prometheus/metrics/23/edit',
},
],
@@ -253,9 +219,7 @@ describe('Panel Type component', () => {
const findViewLogsLink = () => wrapper.find({ ref: 'viewLogsLink' });
beforeEach(() => {
- createWrapper({
- graphData: graphDataPrometheusQueryRange,
- });
+ createWrapper();
return wrapper.vm.$nextTick();
});
@@ -327,7 +291,6 @@ describe('Panel Type component', () => {
beforeEach(() => {
createWrapper({
clipboardText,
- graphData: graphDataPrometheusQueryRange,
});
});
@@ -353,11 +316,13 @@ describe('Panel Type component', () => {
describe('when downloading metrics data as CSV', () => {
beforeEach(() => {
- graphDataPrometheusQueryRange.y_label = 'metric';
wrapper = shallowMount(PanelType, {
propsData: {
clipboardText: exampleText,
- graphData: graphDataPrometheusQueryRange,
+ graphData: {
+ y_label: 'metric',
+ ...graphData,
+ },
},
store,
});
@@ -370,12 +335,12 @@ describe('Panel Type component', () => {
describe('csvText', () => {
it('converts metrics data from json to csv', () => {
- const header = `timestamp,${graphDataPrometheusQueryRange.y_label}`;
- const data = graphDataPrometheusQueryRange.metrics[0].result[0].values;
+ const header = `timestamp,${graphData.y_label}`;
+ const data = graphData.metrics[0].result[0].values;
const firstRow = `${data[0][0]},${data[0][1]}`;
const secondRow = `${data[1][0]},${data[1][1]}`;
- expect(wrapper.vm.csvText).toBe(`${header}\r\n${firstRow}\r\n${secondRow}\r\n`);
+ expect(wrapper.vm.csvText).toMatch(`${header}\r\n${firstRow}\r\n${secondRow}\r\n`);
});
});
@@ -402,7 +367,7 @@ describe('Panel Type component', () => {
wrapper = shallowMount(PanelType, {
propsData: {
- graphData: graphDataPrometheusQueryRange,
+ graphData,
namespace: mockNamespace,
},
store,
diff --git a/spec/frontend/monitoring/fixture_data.js b/spec/frontend/monitoring/fixture_data.js
new file mode 100644
index 00000000000..b7b72a15992
--- /dev/null
+++ b/spec/frontend/monitoring/fixture_data.js
@@ -0,0 +1,49 @@
+import { mapToDashboardViewModel } from '~/monitoring/stores/utils';
+import { metricStates } from '~/monitoring/constants';
+
+import { metricsResult } from './mock_data';
+
+// Use globally available `getJSONFixture` so this file can be imported by both karma and jest specs
+export const metricsDashboardResponse = getJSONFixture(
+ 'metrics_dashboard/environment_metrics_dashboard.json',
+);
+export const metricsDashboardPayload = metricsDashboardResponse.dashboard;
+export const metricsDashboardViewModel = mapToDashboardViewModel(metricsDashboardPayload);
+
+export const metricsDashboardPanelCount = 22;
+export const metricResultStatus = {
+ // First metric in fixture `metrics_dashboard/environment_metrics_dashboard.json`
+ metricId: 'NO_DB_response_metrics_nginx_ingress_throughput_status_code',
+ result: metricsResult,
+};
+export const metricResultPods = {
+ // Second metric in fixture `metrics_dashboard/environment_metrics_dashboard.json`
+ metricId: 'NO_DB_response_metrics_nginx_ingress_latency_pod_average',
+ result: metricsResult,
+};
+export const metricResultEmpty = {
+ metricId: 'NO_DB_response_metrics_nginx_ingress_16_throughput_status_code',
+ result: [],
+};
+
+// Graph data
+
+const firstPanel = metricsDashboardViewModel.panelGroups[0].panels[0];
+
+export const graphData = {
+ ...firstPanel,
+ metrics: firstPanel.metrics.map(metric => ({
+ ...metric,
+ result: metricsResult,
+ state: metricStates.OK,
+ })),
+};
+
+export const graphDataEmpty = {
+ ...firstPanel,
+ metrics: firstPanel.metrics.map(metric => ({
+ ...metric,
+ result: [],
+ state: metricStates.NO_DATA,
+ })),
+};
diff --git a/spec/frontend/monitoring/init_utils.js b/spec/frontend/monitoring/init_utils.js
deleted file mode 100644
index 55b6199fdfc..00000000000
--- a/spec/frontend/monitoring/init_utils.js
+++ /dev/null
@@ -1,57 +0,0 @@
-import * as types from '~/monitoring/stores/mutation_types';
-import {
- metricsDashboardPayload,
- mockedEmptyResult,
- mockedQueryResultPayload,
- mockedQueryResultPayloadCoresTotal,
- mockApiEndpoint,
- environmentData,
-} from './mock_data';
-
-export const propsData = {
- hasMetrics: false,
- documentationPath: '/path/to/docs',
- settingsPath: '/path/to/settings',
- clustersPath: '/path/to/clusters',
- tagsPath: '/path/to/tags',
- projectPath: '/path/to/project',
- logsPath: '/path/to/logs',
- defaultBranch: 'master',
- metricsEndpoint: mockApiEndpoint,
- deploymentsEndpoint: null,
- emptyGettingStartedSvgPath: '/path/to/getting-started.svg',
- emptyLoadingSvgPath: '/path/to/loading.svg',
- emptyNoDataSvgPath: '/path/to/no-data.svg',
- emptyNoDataSmallSvgPath: '/path/to/no-data-small.svg',
- emptyUnableToConnectSvgPath: '/path/to/unable-to-connect.svg',
- currentEnvironmentName: 'production',
- customMetricsAvailable: false,
- customMetricsPath: '',
- validateQueryPath: '',
-};
-
-export const setupComponentStore = wrapper => {
- wrapper.vm.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRICS_DASHBOARD_SUCCESS}`,
- metricsDashboardPayload,
- );
-
- // Load 3 panels to the dashboard, one with an empty result
- wrapper.vm.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
- mockedEmptyResult,
- );
- wrapper.vm.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
- mockedQueryResultPayload,
- );
- wrapper.vm.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
- mockedQueryResultPayloadCoresTotal,
- );
-
- wrapper.vm.$store.commit(
- `monitoringDashboard/${types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS}`,
- environmentData,
- );
-};
diff --git a/spec/frontend/monitoring/mock_data.js b/spec/frontend/monitoring/mock_data.js
index 84dd0b70e71..56236918c68 100644
--- a/spec/frontend/monitoring/mock_data.js
+++ b/spec/frontend/monitoring/mock_data.js
@@ -1,13 +1,47 @@
-import { mapToDashboardViewModel } from '~/monitoring/stores/utils';
-
// This import path needs to be relative for now because this mock data is used in
// Karma specs too, where the helpers/test_constants alias can not be resolved
import { TEST_HOST } from '../helpers/test_constants';
-export const mockHost = 'http://test.host';
export const mockProjectDir = '/frontend-fixtures/environments-project';
export const mockApiEndpoint = `${TEST_HOST}/monitoring/mock`;
+export const propsData = {
+ hasMetrics: false,
+ documentationPath: '/path/to/docs',
+ settingsPath: '/path/to/settings',
+ clustersPath: '/path/to/clusters',
+ tagsPath: '/path/to/tags',
+ projectPath: '/path/to/project',
+ logsPath: '/path/to/logs',
+ defaultBranch: 'master',
+ metricsEndpoint: mockApiEndpoint,
+ deploymentsEndpoint: null,
+ emptyGettingStartedSvgPath: '/path/to/getting-started.svg',
+ emptyLoadingSvgPath: '/path/to/loading.svg',
+ emptyNoDataSvgPath: '/path/to/no-data.svg',
+ emptyNoDataSmallSvgPath: '/path/to/no-data-small.svg',
+ emptyUnableToConnectSvgPath: '/path/to/unable-to-connect.svg',
+ currentEnvironmentName: 'production',
+ customMetricsAvailable: false,
+ customMetricsPath: '',
+ validateQueryPath: '',
+};
+
+const customDashboardsData = new Array(30).fill(null).map((_, idx) => ({
+ default: false,
+ display_name: `Custom Dashboard ${idx}`,
+ can_edit: true,
+ system_dashboard: false,
+ project_blob_path: `${mockProjectDir}/blob/master/dashboards/.gitlab/dashboards/dashboard_${idx}.yml`,
+ path: `.gitlab/dashboards/dashboard_${idx}.yml`,
+}));
+
+export const mockDashboardsErrorResponse = {
+ all_dashboards: customDashboardsData,
+ message: "Each 'panel_group' must define an array :panels",
+ status: 'error',
+};
+
export const anomalyDeploymentData = [
{
id: 111,
@@ -213,130 +247,27 @@ export const deploymentData = [
export const annotationsData = [
{
id: 'gid://gitlab/Metrics::Dashboard::Annotation/1',
- starting_at: '2020-04-01T12:51:58.373Z',
- ending_at: null,
+ startingAt: '2020-04-12 12:51:53 UTC',
+ endingAt: null,
panelId: null,
description: 'This is a test annotation',
},
{
id: 'gid://gitlab/Metrics::Dashboard::Annotation/2',
description: 'test annotation 2',
- starting_at: '2020-04-02T12:51:58.373Z',
- ending_at: null,
+ startingAt: '2020-04-13 12:51:53 UTC',
+ endingAt: null,
panelId: null,
},
{
id: 'gid://gitlab/Metrics::Dashboard::Annotation/3',
description: 'test annotation 3',
- starting_at: '2020-04-04T12:51:58.373Z',
- ending_at: null,
+ startingAt: '2020-04-16 12:51:53 UTC',
+ endingAt: null,
panelId: null,
},
];
-export const metricsNewGroupsAPIResponse = [
- {
- group: 'System metrics (Kubernetes)',
- priority: 5,
- panels: [
- {
- title: 'Memory Usage (Pod average)',
- type: 'area-chart',
- y_label: 'Memory Used per Pod',
- weight: 2,
- metrics: [
- {
- id: 'system_metrics_kubernetes_container_memory_average',
- query_range:
- 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024',
- label: 'Pod average',
- unit: 'MB',
- metric_id: 17,
- prometheus_endpoint_path:
- '/root/autodevops-deploy/environments/32/prometheus/api/v1/query_range?query=avg%28sum%28container_memory_usage_bytes%7Bcontainer_name%21%3D%22POD%22%2Cpod_name%3D~%22%5E%25%7Bci_environment_slug%7D-%28%5B%5Ec%5D.%2A%7Cc%28%5B%5Ea%5D%7Ca%28%5B%5En%5D%7Cn%28%5B%5Ea%5D%7Ca%28%5B%5Er%5D%7Cr%5B%5Ey%5D%29%29%29%29.%2A%7C%29-%28.%2A%29%22%2Cnamespace%3D%22%25%7Bkube_namespace%7D%22%7D%29+by+%28job%29%29+without+%28job%29+%2F+count%28avg%28container_memory_usage_bytes%7Bcontainer_name%21%3D%22POD%22%2Cpod_name%3D~%22%5E%25%7Bci_environment_slug%7D-%28%5B%5Ec%5D.%2A%7Cc%28%5B%5Ea%5D%7Ca%28%5B%5En%5D%7Cn%28%5B%5Ea%5D%7Ca%28%5B%5Er%5D%7Cr%5B%5Ey%5D%29%29%29%29.%2A%7C%29-%28.%2A%29%22%2Cnamespace%3D%22%25%7Bkube_namespace%7D%22%7D%29+without+%28job%29%29+%2F1024%2F1024',
- appearance: {
- line: {
- width: 2,
- },
- },
- },
- ],
- },
- ],
- },
-];
-
-const metricsResult = [
- {
- metric: {},
- values: [
- [1563272065.589, '10.396484375'],
- [1563272125.589, '10.333984375'],
- [1563272185.589, '10.333984375'],
- [1563272245.589, '10.333984375'],
- [1563272305.589, '10.333984375'],
- [1563272365.589, '10.333984375'],
- [1563272425.589, '10.38671875'],
- [1563272485.589, '10.333984375'],
- [1563272545.589, '10.333984375'],
- [1563272605.589, '10.333984375'],
- [1563272665.589, '10.333984375'],
- [1563272725.589, '10.333984375'],
- [1563272785.589, '10.396484375'],
- [1563272845.589, '10.333984375'],
- [1563272905.589, '10.333984375'],
- [1563272965.589, '10.3984375'],
- [1563273025.589, '10.337890625'],
- [1563273085.589, '10.34765625'],
- [1563273145.589, '10.337890625'],
- [1563273205.589, '10.337890625'],
- [1563273265.589, '10.337890625'],
- [1563273325.589, '10.337890625'],
- [1563273385.589, '10.337890625'],
- [1563273445.589, '10.337890625'],
- [1563273505.589, '10.337890625'],
- [1563273565.589, '10.337890625'],
- [1563273625.589, '10.337890625'],
- [1563273685.589, '10.337890625'],
- [1563273745.589, '10.337890625'],
- [1563273805.589, '10.337890625'],
- [1563273865.589, '10.390625'],
- [1563273925.589, '10.390625'],
- ],
- },
-];
-
-export const mockedEmptyResult = {
- metricId: '1_response_metrics_nginx_ingress_throughput_status_code',
- result: [],
-};
-
-export const mockedEmptyThroughputResult = {
- metricId: 'NO_DB_response_metrics_nginx_ingress_16_throughput_status_code',
- result: [],
-};
-
-export const mockedQueryResultPayload = {
- metricId: '12_system_metrics_kubernetes_container_memory_total',
- result: metricsResult,
-};
-
-export const mockedQueryResultPayloadCoresTotal = {
- metricId: '13_system_metrics_kubernetes_container_cores_total',
- result: metricsResult,
-};
-
-export const mockedQueryResultFixture = {
- // First metric in fixture `metrics_dashboard/environment_metrics_dashboard.json`
- metricId: 'NO_DB_response_metrics_nginx_ingress_throughput_status_code',
- result: metricsResult,
-};
-
-export const mockedQueryResultFixtureStatusCode = {
- metricId: 'NO_DB_response_metrics_nginx_ingress_latency_pod_average',
- result: metricsResult,
-};
-
const extraEnvironmentData = new Array(15).fill(null).map((_, idx) => ({
id: `gid://gitlab/Environments/${150 + idx}`,
name: `no-deployment/noop-branch-${idx}`,
@@ -384,158 +315,6 @@ export const environmentData = [
},
].concat(extraEnvironmentData);
-export const metricsDashboardPayload = {
- dashboard: 'Environment metrics',
- priority: 1,
- panel_groups: [
- {
- group: 'System metrics (Kubernetes)',
- priority: 5,
- panels: [
- {
- title: 'Memory Usage (Total)',
- type: 'area-chart',
- y_label: 'Total Memory Used',
- weight: 4,
- y_axis: {
- format: 'megabytes',
- },
- metrics: [
- {
- id: 'system_metrics_kubernetes_container_memory_total',
- query_range:
- 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) /1000/1000',
- label: 'Total',
- unit: 'MB',
- metric_id: 12,
- prometheus_endpoint_path: 'http://test',
- },
- ],
- },
- {
- title: 'Core Usage (Total)',
- type: 'area-chart',
- y_label: 'Total Cores',
- weight: 3,
- metrics: [
- {
- id: 'system_metrics_kubernetes_container_cores_total',
- query_range:
- 'avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job)',
- label: 'Total',
- unit: 'cores',
- metric_id: 13,
- },
- ],
- },
- {
- title: 'Memory Usage (Pod average)',
- type: 'line-chart',
- y_label: 'Memory Used per Pod',
- weight: 2,
- metrics: [
- {
- id: 'system_metrics_kubernetes_container_memory_average',
- query_range:
- 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024',
- label: 'Pod average',
- unit: 'MB',
- metric_id: 14,
- },
- ],
- },
- {
- title: 'memories',
- type: 'area-chart',
- y_label: 'memories',
- metrics: [
- {
- id: 'metric_of_ages_1000',
- label: 'memory_1000',
- unit: 'count',
- prometheus_endpoint_path: '/root',
- metric_id: 20,
- },
- {
- id: 'metric_of_ages_1001',
- label: 'memory_1000',
- unit: 'count',
- prometheus_endpoint_path: '/root',
- metric_id: 21,
- },
- {
- id: 'metric_of_ages_1002',
- label: 'memory_1000',
- unit: 'count',
- prometheus_endpoint_path: '/root',
- metric_id: 22,
- },
- {
- id: 'metric_of_ages_1003',
- label: 'memory_1000',
- unit: 'count',
- prometheus_endpoint_path: '/root',
- metric_id: 23,
- },
- {
- id: 'metric_of_ages_1004',
- label: 'memory_1004',
- unit: 'count',
- prometheus_endpoint_path: '/root',
- metric_id: 24,
- },
- ],
- },
- ],
- },
- {
- group: 'Response metrics (NGINX Ingress VTS)',
- priority: 10,
- panels: [
- {
- metrics: [
- {
- id: 'response_metrics_nginx_ingress_throughput_status_code',
- label: 'Status Code',
- metric_id: 1,
- prometheus_endpoint_path:
- '/root/autodevops-deploy/environments/32/prometheus/api/v1/query_range?query=sum%28rate%28nginx_upstream_responses_total%7Bupstream%3D~%22%25%7Bkube_namespace%7D-%25%7Bci_environment_slug%7D-.%2A%22%7D%5B2m%5D%29%29+by+%28status_code%29',
- query_range:
- 'sum(rate(nginx_upstream_responses_total{upstream=~"%{kube_namespace}-%{ci_environment_slug}-.*"}[2m])) by (status_code)',
- unit: 'req / sec',
- },
- ],
- title: 'Throughput',
- type: 'area-chart',
- weight: 1,
- y_label: 'Requests / Sec',
- },
- ],
- },
- ],
-};
-
-/**
- * Mock of response of metrics_dashboard.json
- */
-export const metricsDashboardResponse = {
- all_dashboards: [],
- dashboard: metricsDashboardPayload,
- metrics_data: {},
- status: 'success',
-};
-
-export const metricsDashboardViewModel = mapToDashboardViewModel(metricsDashboardPayload);
-
-const customDashboardsData = new Array(30).fill(null).map((_, idx) => ({
- default: false,
- display_name: `Custom Dashboard ${idx}`,
- can_edit: true,
- system_dashboard: false,
- project_blob_path: `${mockProjectDir}/blob/master/dashboards/.gitlab/dashboards/dashboard_${idx}.yml`,
- path: `.gitlab/dashboards/dashboard_${idx}.yml`,
-}));
-
export const dashboardGitResponse = [
{
default: true,
@@ -548,11 +327,19 @@ export const dashboardGitResponse = [
...customDashboardsData,
];
-export const mockDashboardsErrorResponse = {
- all_dashboards: customDashboardsData,
- message: "Each 'panel_group' must define an array :panels",
- status: 'error',
-};
+// Metrics mocks
+
+export const metricsResult = [
+ {
+ metric: {},
+ values: [
+ [1563272065.589, '10.396484375'],
+ [1563272125.589, '10.333984375'],
+ [1563272185.589, '10.333984375'],
+ [1563272245.589, '10.333984375'],
+ ],
+ },
+];
export const graphDataPrometheusQuery = {
title: 'Super Chart A2',
@@ -578,29 +365,6 @@ export const graphDataPrometheusQuery = {
],
};
-export const graphDataPrometheusQueryRange = {
- title: 'Super Chart A1',
- type: 'area-chart',
- weight: 2,
- metrics: [
- {
- metricId: '2_metric_a',
- query_range:
- 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) /1024/1024/1024',
- unit: 'MB',
- label: 'Total Consumption',
- prometheus_endpoint_path:
- '/root/kubernetes-gke-project/environments/35/prometheus/api/v1/query?query=max%28go_memstats_alloc_bytes%7Bjob%3D%22prometheus%22%7D%29+by+%28job%29+%2F1024%2F1024',
- result: [
- {
- metric: {},
- values: [[1495700554.925, '8.0390625'], [1495700614.925, '8.0390625']],
- },
- ],
- },
- ],
-};
-
export const graphDataPrometheusQueryRangeMultiTrack = {
title: 'Super Chart A3',
type: 'heatmap',
diff --git a/spec/frontend/monitoring/store/actions_spec.js b/spec/frontend/monitoring/store/actions_spec.js
index c34a5afceb0..f312aa1fd34 100644
--- a/spec/frontend/monitoring/store/actions_spec.js
+++ b/spec/frontend/monitoring/store/actions_spec.js
@@ -23,7 +23,11 @@ import {
setGettingStartedEmptyState,
duplicateSystemDashboard,
} from '~/monitoring/stores/actions';
-import { gqClient, parseEnvironmentsResponse } from '~/monitoring/stores/utils';
+import {
+ gqClient,
+ parseEnvironmentsResponse,
+ parseAnnotationsResponse,
+} from '~/monitoring/stores/utils';
import getEnvironments from '~/monitoring/queries/getEnvironments.query.graphql';
import getAnnotations from '~/monitoring/queries/getAnnotations.query.graphql';
import storeState from '~/monitoring/stores/state';
@@ -31,11 +35,14 @@ import {
deploymentData,
environmentData,
annotationsData,
- metricsDashboardResponse,
- metricsDashboardViewModel,
dashboardGitResponse,
mockDashboardsErrorResponse,
} from '../mock_data';
+import {
+ metricsDashboardResponse,
+ metricsDashboardViewModel,
+ metricsDashboardPanelCount,
+} from '../fixture_data';
jest.mock('~/flash');
@@ -221,6 +228,10 @@ describe('Monitoring store actions', () => {
describe('fetchAnnotations', () => {
const { state } = store;
+ state.timeRange = {
+ start: '2020-04-15T12:54:32.137Z',
+ end: '2020-08-15T12:54:32.137Z',
+ };
state.projectPath = 'gitlab-org/gitlab-test';
state.currentEnvironmentName = 'production';
state.currentDashboard = '.gitlab/dashboards/custom_dashboard.yml';
@@ -236,17 +247,25 @@ describe('Monitoring store actions', () => {
variables: {
projectPath: state.projectPath,
environmentName: state.currentEnvironmentName,
- dashboardId: state.currentDashboard,
+ dashboardPath: state.currentDashboard,
+ startingFrom: state.timeRange.start,
},
};
+ const parsedResponse = parseAnnotationsResponse(annotationsData);
mockMutate.mockResolvedValue({
data: {
project: {
- environment: {
- metricDashboard: {
- annotations: annotationsData,
- },
+ environments: {
+ nodes: [
+ {
+ metricsDashboard: {
+ annotations: {
+ nodes: parsedResponse,
+ },
+ },
+ },
+ ],
},
},
},
@@ -257,10 +276,7 @@ describe('Monitoring store actions', () => {
null,
state,
[],
- [
- { type: 'requestAnnotations' },
- { type: 'receiveAnnotationsSuccess', payload: annotationsData },
- ],
+ [{ type: 'receiveAnnotationsSuccess', payload: parsedResponse }],
() => {
expect(mockMutate).toHaveBeenCalledWith(mutationVariables);
},
@@ -274,7 +290,8 @@ describe('Monitoring store actions', () => {
variables: {
projectPath: state.projectPath,
environmentName: state.currentEnvironmentName,
- dashboardId: state.currentDashboard,
+ dashboardPath: state.currentDashboard,
+ startingFrom: state.timeRange.start,
},
};
@@ -285,7 +302,7 @@ describe('Monitoring store actions', () => {
null,
state,
[],
- [{ type: 'requestAnnotations' }, { type: 'receiveAnnotationsFailure' }],
+ [{ type: 'receiveAnnotationsFailure' }],
() => {
expect(mockMutate).toHaveBeenCalledWith(mutationVariables);
},
@@ -553,7 +570,7 @@ describe('Monitoring store actions', () => {
fetchDashboardData({ state, commit, dispatch })
.then(() => {
- expect(dispatch).toHaveBeenCalledTimes(10); // one per metric plus 1 for deployments
+ expect(dispatch).toHaveBeenCalledTimes(metricsDashboardPanelCount + 1); // plus 1 for deployments
expect(dispatch).toHaveBeenCalledWith('fetchDeploymentsData');
expect(dispatch).toHaveBeenCalledWith('fetchPrometheusMetric', {
metric,
@@ -581,11 +598,13 @@ describe('Monitoring store actions', () => {
let metric;
let state;
let data;
+ let prometheusEndpointPath;
beforeEach(() => {
state = storeState();
- [metric] = metricsDashboardResponse.dashboard.panel_groups[0].panels[0].metrics;
- metric = convertObjectPropsToCamelCase(metric, { deep: true });
+ [metric] = metricsDashboardViewModel.panelGroups[0].panels[0].metrics;
+
+ prometheusEndpointPath = metric.prometheusEndpointPath;
data = {
metricId: metric.metricId,
@@ -594,7 +613,7 @@ describe('Monitoring store actions', () => {
});
it('commits result', done => {
- mock.onGet('http://test').reply(200, { data }); // One attempt
+ mock.onGet(prometheusEndpointPath).reply(200, { data }); // One attempt
testAction(
fetchPrometheusMetric,
@@ -631,7 +650,7 @@ describe('Monitoring store actions', () => {
};
it('uses calculated step', done => {
- mock.onGet('http://test').reply(200, { data }); // One attempt
+ mock.onGet(prometheusEndpointPath).reply(200, { data }); // One attempt
testAction(
fetchPrometheusMetric,
@@ -673,7 +692,7 @@ describe('Monitoring store actions', () => {
};
it('uses metric step', done => {
- mock.onGet('http://test').reply(200, { data }); // One attempt
+ mock.onGet(prometheusEndpointPath).reply(200, { data }); // One attempt
testAction(
fetchPrometheusMetric,
@@ -705,10 +724,10 @@ describe('Monitoring store actions', () => {
it('commits result, when waiting for results', done => {
// Mock multiple attempts while the cache is filling up
- mock.onGet('http://test').replyOnce(statusCodes.NO_CONTENT);
- mock.onGet('http://test').replyOnce(statusCodes.NO_CONTENT);
- mock.onGet('http://test').replyOnce(statusCodes.NO_CONTENT);
- mock.onGet('http://test').reply(200, { data }); // 4th attempt
+ mock.onGet(prometheusEndpointPath).replyOnce(statusCodes.NO_CONTENT);
+ mock.onGet(prometheusEndpointPath).replyOnce(statusCodes.NO_CONTENT);
+ mock.onGet(prometheusEndpointPath).replyOnce(statusCodes.NO_CONTENT);
+ mock.onGet(prometheusEndpointPath).reply(200, { data }); // 4th attempt
testAction(
fetchPrometheusMetric,
@@ -739,10 +758,10 @@ describe('Monitoring store actions', () => {
it('commits failure, when waiting for results and getting a server error', done => {
// Mock multiple attempts while the cache is filling up and fails
- mock.onGet('http://test').replyOnce(statusCodes.NO_CONTENT);
- mock.onGet('http://test').replyOnce(statusCodes.NO_CONTENT);
- mock.onGet('http://test').replyOnce(statusCodes.NO_CONTENT);
- mock.onGet('http://test').reply(500); // 4th attempt
+ mock.onGet(prometheusEndpointPath).replyOnce(statusCodes.NO_CONTENT);
+ mock.onGet(prometheusEndpointPath).replyOnce(statusCodes.NO_CONTENT);
+ mock.onGet(prometheusEndpointPath).replyOnce(statusCodes.NO_CONTENT);
+ mock.onGet(prometheusEndpointPath).reply(500); // 4th attempt
const error = new Error('Request failed with status code 500');
diff --git a/spec/frontend/monitoring/store/getters_spec.js b/spec/frontend/monitoring/store/getters_spec.js
index 40341d32cf5..f040876b832 100644
--- a/spec/frontend/monitoring/store/getters_spec.js
+++ b/spec/frontend/monitoring/store/getters_spec.js
@@ -3,18 +3,13 @@ import * as getters from '~/monitoring/stores/getters';
import mutations from '~/monitoring/stores/mutations';
import * as types from '~/monitoring/stores/mutation_types';
import { metricStates } from '~/monitoring/constants';
+import { environmentData, metricsResult } from '../mock_data';
import {
- environmentData,
- mockedEmptyThroughputResult,
- mockedQueryResultFixture,
- mockedQueryResultFixtureStatusCode,
-} from '../mock_data';
-import { getJSONFixture } from '../../helpers/fixtures';
-
-const metricsDashboardFixture = getJSONFixture(
- 'metrics_dashboard/environment_metrics_dashboard.json',
-);
-const metricsDashboardPayload = metricsDashboardFixture.dashboard;
+ metricsDashboardPayload,
+ metricResultStatus,
+ metricResultPods,
+ metricResultEmpty,
+} from '../fixture_data';
describe('Monitoring store Getters', () => {
describe('getMetricStates', () => {
@@ -22,6 +17,21 @@ describe('Monitoring store Getters', () => {
let state;
let getMetricStates;
+ const setMetricSuccess = ({ result = metricsResult, group = 0, panel = 0, metric = 0 }) => {
+ const { metricId } = state.dashboard.panelGroups[group].panels[panel].metrics[metric];
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, {
+ metricId,
+ result,
+ });
+ };
+
+ const setMetricFailure = ({ group = 0, panel = 0, metric = 0 }) => {
+ const { metricId } = state.dashboard.panelGroups[group].panels[panel].metrics[metric];
+ mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
+ metricId,
+ });
+ };
+
beforeEach(() => {
setupState = (initState = {}) => {
state = initState;
@@ -61,31 +71,30 @@ describe('Monitoring store Getters', () => {
it('on an empty metric with no result, returns NO_DATA', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedEmptyThroughputResult);
+ setMetricSuccess({ result: [], group: 2 });
expect(getMetricStates()).toEqual([metricStates.NO_DATA]);
});
it('on a metric with a result, returns OK', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
+ setMetricSuccess({ group: 1 });
expect(getMetricStates()).toEqual([metricStates.OK]);
});
it('on a metric with an error, returns an error', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
- metricId: groups[0].panels[0].metrics[0].metricId,
- });
+ setMetricFailure({});
expect(getMetricStates()).toEqual([metricStates.UNKNOWN_ERROR]);
});
it('on multiple metrics with results, returns OK', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixtureStatusCode);
+
+ setMetricSuccess({ group: 1 });
+ setMetricSuccess({ group: 1, panel: 1 });
expect(getMetricStates()).toEqual([metricStates.OK]);
@@ -96,15 +105,8 @@ describe('Monitoring store Getters', () => {
it('on multiple metrics errors', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
- metricId: groups[0].panels[0].metrics[0].metricId,
- });
- mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
- metricId: groups[0].panels[0].metrics[0].metricId,
- });
- mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
- metricId: groups[1].panels[0].metrics[0].metricId,
- });
+ setMetricFailure({});
+ setMetricFailure({ group: 1 });
// Entire dashboard fails
expect(getMetricStates()).toEqual([metricStates.UNKNOWN_ERROR]);
@@ -116,14 +118,11 @@ describe('Monitoring store Getters', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
// An success in 1 group
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
+ setMetricSuccess({ group: 1 });
+
// An error in 2 groups
- mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
- metricId: groups[1].panels[1].metrics[0].metricId,
- });
- mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
- metricId: groups[2].panels[0].metrics[0].metricId,
- });
+ setMetricFailure({ group: 1, panel: 1 });
+ setMetricFailure({ group: 2, panel: 0 });
expect(getMetricStates()).toEqual([metricStates.OK, metricStates.UNKNOWN_ERROR]);
expect(getMetricStates(groups[1].key)).toEqual([
@@ -182,38 +181,35 @@ describe('Monitoring store Getters', () => {
it('an empty metric, returns empty', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedEmptyThroughputResult);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, metricResultEmpty);
expect(metricsWithData()).toEqual([]);
});
it('a metric with results, it returns a metric', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, metricResultStatus);
- expect(metricsWithData()).toEqual([mockedQueryResultFixture.metricId]);
+ expect(metricsWithData()).toEqual([metricResultStatus.metricId]);
});
it('multiple metrics with results, it return multiple metrics', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixtureStatusCode);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, metricResultStatus);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, metricResultPods);
- expect(metricsWithData()).toEqual([
- mockedQueryResultFixture.metricId,
- mockedQueryResultFixtureStatusCode.metricId,
- ]);
+ expect(metricsWithData()).toEqual([metricResultStatus.metricId, metricResultPods.metricId]);
});
it('multiple metrics with results, it returns metrics filtered by group', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixtureStatusCode);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, metricResultStatus);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, metricResultPods);
// First group has metrics
expect(metricsWithData(state.dashboard.panelGroups[1].key)).toEqual([
- mockedQueryResultFixture.metricId,
- mockedQueryResultFixtureStatusCode.metricId,
+ metricResultStatus.metricId,
+ metricResultPods.metricId,
]);
// Second group has no metrics
diff --git a/spec/frontend/monitoring/store/mutations_spec.js b/spec/frontend/monitoring/store/mutations_spec.js
index 34d224e13b0..1452e9bc491 100644
--- a/spec/frontend/monitoring/store/mutations_spec.js
+++ b/spec/frontend/monitoring/store/mutations_spec.js
@@ -6,12 +6,7 @@ import state from '~/monitoring/stores/state';
import { metricStates } from '~/monitoring/constants';
import { deploymentData, dashboardGitResponse } from '../mock_data';
-import { getJSONFixture } from '../../helpers/fixtures';
-
-const metricsDashboardFixture = getJSONFixture(
- 'metrics_dashboard/environment_metrics_dashboard.json',
-);
-const metricsDashboardPayload = metricsDashboardFixture.dashboard;
+import { metricsDashboardPayload } from '../fixture_data';
describe('Monitoring mutations', () => {
let stateCopy;
diff --git a/spec/frontend/monitoring/store/utils_spec.js b/spec/frontend/monitoring/store/utils_spec.js
index f46409e8e32..7ee2a16b4bd 100644
--- a/spec/frontend/monitoring/store/utils_spec.js
+++ b/spec/frontend/monitoring/store/utils_spec.js
@@ -2,9 +2,11 @@ import { SUPPORTED_FORMATS } from '~/lib/utils/unit_format';
import {
uniqMetricsId,
parseEnvironmentsResponse,
+ parseAnnotationsResponse,
removeLeadingSlash,
mapToDashboardViewModel,
} from '~/monitoring/stores/utils';
+import { annotationsData } from '../mock_data';
import { NOT_IN_DB_PREFIX } from '~/monitoring/constants';
const projectPath = 'gitlab-org/gitlab-test';
@@ -56,7 +58,7 @@ describe('mapToDashboardViewModel', () => {
y_label: 'Y Label A',
yAxis: {
name: 'Y Label A',
- format: 'number',
+ format: 'engineering',
precision: 2,
},
metrics: [],
@@ -138,7 +140,7 @@ describe('mapToDashboardViewModel', () => {
y_label: '',
yAxis: {
name: '',
- format: SUPPORTED_FORMATS.number,
+ format: SUPPORTED_FORMATS.engineering,
precision: 2,
},
metrics: [],
@@ -159,7 +161,7 @@ describe('mapToDashboardViewModel', () => {
},
yAxis: {
name: '',
- format: SUPPORTED_FORMATS.number,
+ format: SUPPORTED_FORMATS.engineering,
precision: 2,
},
metrics: [],
@@ -219,7 +221,7 @@ describe('mapToDashboardViewModel', () => {
},
});
- expect(getMappedPanel().yAxis.format).toBe(SUPPORTED_FORMATS.number);
+ expect(getMappedPanel().yAxis.format).toBe(SUPPORTED_FORMATS.engineering);
});
// This property allows single_stat panels to render percentile values
@@ -376,6 +378,27 @@ describe('parseEnvironmentsResponse', () => {
});
});
+describe('parseAnnotationsResponse', () => {
+ const parsedAnnotationResponse = [
+ {
+ description: 'This is a test annotation',
+ endingAt: null,
+ id: 'gid://gitlab/Metrics::Dashboard::Annotation/1',
+ panelId: null,
+ startingAt: new Date('2020-04-12T12:51:53.000Z'),
+ },
+ ];
+ it.each`
+ case | input | expected
+ ${'Returns empty array for null input'} | ${null} | ${[]}
+ ${'Returns empty array for undefined input'} | ${undefined} | ${[]}
+ ${'Returns empty array for empty input'} | ${[]} | ${[]}
+ ${'Returns parsed responses for annotations data'} | ${[annotationsData[0]]} | ${parsedAnnotationResponse}
+ `('$case', ({ input, expected }) => {
+ expect(parseAnnotationsResponse(input)).toEqual(expected);
+ });
+});
+
describe('removeLeadingSlash', () => {
[
{ input: null, output: '' },
diff --git a/spec/frontend/monitoring/store_utils.js b/spec/frontend/monitoring/store_utils.js
new file mode 100644
index 00000000000..d764a79ccc3
--- /dev/null
+++ b/spec/frontend/monitoring/store_utils.js
@@ -0,0 +1,34 @@
+import * as types from '~/monitoring/stores/mutation_types';
+import { metricsResult, environmentData } from './mock_data';
+import { metricsDashboardPayload } from './fixture_data';
+
+export const setMetricResult = ({ $store, result, group = 0, panel = 0, metric = 0 }) => {
+ const { dashboard } = $store.state.monitoringDashboard;
+ const { metricId } = dashboard.panelGroups[group].panels[panel].metrics[metric];
+
+ $store.commit(`monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`, {
+ metricId,
+ result,
+ });
+};
+
+const setEnvironmentData = $store => {
+ $store.commit(`monitoringDashboard/${types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS}`, environmentData);
+};
+
+export const setupStoreWithDashboard = $store => {
+ $store.commit(
+ `monitoringDashboard/${types.RECEIVE_METRICS_DASHBOARD_SUCCESS}`,
+ metricsDashboardPayload,
+ );
+};
+
+export const setupStoreWithData = $store => {
+ setupStoreWithDashboard($store);
+
+ setMetricResult({ $store, result: [], panel: 0 });
+ setMetricResult({ $store, result: metricsResult, panel: 1 });
+ setMetricResult({ $store, result: metricsResult, panel: 2 });
+
+ setEnvironmentData($store);
+};
diff --git a/spec/frontend/monitoring/utils_spec.js b/spec/frontend/monitoring/utils_spec.js
index 262b8b985cc..0bb1b987b2e 100644
--- a/spec/frontend/monitoring/utils_spec.js
+++ b/spec/frontend/monitoring/utils_spec.js
@@ -1,17 +1,17 @@
import * as monitoringUtils from '~/monitoring/utils';
import { queryToObject, mergeUrlParams, removeParams } from '~/lib/utils/url_utility';
+import { TEST_HOST } from 'jest/helpers/test_constants';
import {
- mockHost,
mockProjectDir,
graphDataPrometheusQuery,
- graphDataPrometheusQueryRange,
anomalyMockGraphData,
barMockData,
} from './mock_data';
+import { graphData } from './fixture_data';
jest.mock('~/lib/utils/url_utility');
-const mockPath = `${mockHost}${mockProjectDir}/-/environments/29/metrics`;
+const mockPath = `${TEST_HOST}${mockProjectDir}/-/environments/29/metrics`;
const generatedLink = 'http://chart.link.com';
@@ -101,10 +101,7 @@ describe('monitoring/utils', () => {
* the validator will look for the `values` key instead of `value`
*/
it('validates data with the query_range format', () => {
- const validGraphData = monitoringUtils.graphDataValidatorForValues(
- false,
- graphDataPrometheusQueryRange,
- );
+ const validGraphData = monitoringUtils.graphDataValidatorForValues(false, graphData);
expect(validGraphData).toBe(true);
});
diff --git a/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js b/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
index d3932ca09ff..9c292fa0f2b 100644
--- a/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
+++ b/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
@@ -55,7 +55,12 @@ describe('Settings Panel', () => {
currentSettings: { ...defaultProps.currentSettings, ...currentSettings },
};
- return mountFn(settingsPanel, { propsData });
+ return mountFn(settingsPanel, {
+ propsData,
+ provide: {
+ glFeatures: { metricsDashboardVisibilitySwitchingAvailable: true },
+ },
+ });
};
const overrideCurrentSettings = (currentSettingsProps, extraProps = {}) => {
@@ -471,4 +476,28 @@ describe('Settings Panel', () => {
});
});
});
+
+ describe('Metrics dashboard', () => {
+ it('should show the metrics dashboard access toggle', () => {
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find({ ref: 'metrics-visibility-settings' }).exists()).toBe(true);
+ });
+ });
+
+ it('should set the visibility level description based upon the selected visibility level', () => {
+ wrapper
+ .find('[name="project[project_feature_attributes][metrics_dashboard_access_level]"]')
+ .setValue(visibilityOptions.PUBLIC);
+
+ expect(wrapper.vm.metricsAccessLevel).toBe(visibilityOptions.PUBLIC);
+ });
+
+ it('should contain help text', () => {
+ wrapper = overrideCurrentSettings({ visibilityLevel: visibilityOptions.PRIVATE });
+
+ expect(wrapper.find({ ref: 'metrics-visibility-settings' }).props().helpText).toEqual(
+ 'With Metrics Dashboard you can visualize this project performance metrics',
+ );
+ });
+ });
});
diff --git a/spec/frontend/pipelines/graph/action_component_spec.js b/spec/frontend/pipelines/graph/action_component_spec.js
index 43da6388efa..3c5938cfa1f 100644
--- a/spec/frontend/pipelines/graph/action_component_spec.js
+++ b/spec/frontend/pipelines/graph/action_component_spec.js
@@ -7,6 +7,7 @@ import ActionComponent from '~/pipelines/components/graph/action_component.vue';
describe('pipeline graph action component', () => {
let wrapper;
let mock;
+ const findButton = () => wrapper.find('button');
beforeEach(() => {
mock = new MockAdapter(axios);
@@ -44,15 +45,15 @@ describe('pipeline graph action component', () => {
});
it('should render an svg', () => {
- expect(wrapper.find('.ci-action-icon-wrapper')).toBeDefined();
- expect(wrapper.find('svg')).toBeDefined();
+ expect(wrapper.find('.ci-action-icon-wrapper').exists()).toBe(true);
+ expect(wrapper.find('svg').exists()).toBe(true);
});
describe('on click', () => {
it('emits `pipelineActionRequestComplete` after a successful request', done => {
jest.spyOn(wrapper.vm, '$emit');
- wrapper.find('button').trigger('click');
+ findButton().trigger('click');
waitForPromises()
.then(() => {
@@ -63,7 +64,7 @@ describe('pipeline graph action component', () => {
});
it('renders a loading icon while waiting for request', done => {
- wrapper.find('button').trigger('click');
+ findButton().trigger('click');
wrapper.vm.$nextTick(() => {
expect(wrapper.find('.js-action-icon-loading').exists()).toBe(true);
diff --git a/spec/frontend/pipelines/graph/graph_component_spec.js b/spec/frontend/pipelines/graph/graph_component_spec.js
new file mode 100644
index 00000000000..a9b06eab3fa
--- /dev/null
+++ b/spec/frontend/pipelines/graph/graph_component_spec.js
@@ -0,0 +1,305 @@
+import Vue from 'vue';
+import { mount } from '@vue/test-utils';
+import PipelineStore from '~/pipelines/stores/pipeline_store';
+import graphComponent from '~/pipelines/components/graph/graph_component.vue';
+import stageColumnComponent from '~/pipelines/components/graph/stage_column_component.vue';
+import linkedPipelinesColumn from '~/pipelines/components/graph/linked_pipelines_column.vue';
+import graphJSON from './mock_data';
+import linkedPipelineJSON from './linked_pipelines_mock_data';
+import PipelinesMediator from '~/pipelines/pipeline_details_mediator';
+
+describe('graph component', () => {
+ const store = new PipelineStore();
+ store.storePipeline(linkedPipelineJSON);
+ const mediator = new PipelinesMediator({ endpoint: '' });
+
+ let wrapper;
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('while is loading', () => {
+ it('should render a loading icon', () => {
+ wrapper = mount(graphComponent, {
+ propsData: {
+ isLoading: true,
+ pipeline: {},
+ mediator,
+ },
+ });
+
+ expect(wrapper.find('.gl-spinner').exists()).toBe(true);
+ });
+ });
+
+ describe('with data', () => {
+ it('should render the graph', () => {
+ wrapper = mount(graphComponent, {
+ propsData: {
+ isLoading: false,
+ pipeline: graphJSON,
+ mediator,
+ },
+ });
+
+ expect(wrapper.find('.js-pipeline-graph').exists()).toBe(true);
+
+ expect(wrapper.find(stageColumnComponent).classes()).toContain('no-margin');
+
+ expect(
+ wrapper
+ .findAll(stageColumnComponent)
+ .at(1)
+ .classes(),
+ ).toContain('left-margin');
+
+ expect(wrapper.find('.stage-column:nth-child(2) .build:nth-child(1)').classes()).toContain(
+ 'left-connector',
+ );
+
+ expect(wrapper.find('.loading-icon').exists()).toBe(false);
+
+ expect(wrapper.find('.stage-column-list').exists()).toBe(true);
+ });
+ });
+
+ describe('when linked pipelines are present', () => {
+ beforeEach(() => {
+ wrapper = mount(graphComponent, {
+ propsData: {
+ isLoading: false,
+ pipeline: store.state.pipeline,
+ mediator,
+ },
+ });
+ });
+
+ describe('rendered output', () => {
+ it('should include the pipelines graph', () => {
+ expect(wrapper.find('.js-pipeline-graph').exists()).toBe(true);
+ });
+
+ it('should not include the loading icon', () => {
+ expect(wrapper.find('.fa-spinner').exists()).toBe(false);
+ });
+
+ it('should include the stage column list', () => {
+ expect(wrapper.find(stageColumnComponent).exists()).toBe(true);
+ });
+
+ it('should include the no-margin class on the first child if there is only one job', () => {
+ const firstStageColumnElement = wrapper.find(stageColumnComponent);
+
+ expect(firstStageColumnElement.classes()).toContain('no-margin');
+ });
+
+ it('should include the has-only-one-job class on the first child', () => {
+ const firstStageColumnElement = wrapper.find('.stage-column-list .stage-column');
+
+ expect(firstStageColumnElement.classes()).toContain('has-only-one-job');
+ });
+
+ it('should include the left-margin class on the second child', () => {
+ const firstStageColumnElement = wrapper.find('.stage-column-list .stage-column:last-child');
+
+ expect(firstStageColumnElement.classes()).toContain('left-margin');
+ });
+
+ it('should include the js-has-linked-pipelines flag', () => {
+ expect(wrapper.find('.js-has-linked-pipelines').exists()).toBe(true);
+ });
+ });
+
+ describe('computeds and methods', () => {
+ describe('capitalizeStageName', () => {
+ it('it capitalizes the stage name', () => {
+ expect(
+ wrapper
+ .findAll('.stage-column .stage-name')
+ .at(1)
+ .text(),
+ ).toBe('Prebuild');
+ });
+ });
+
+ describe('stageConnectorClass', () => {
+ it('it returns left-margin when there is a triggerer', () => {
+ expect(
+ wrapper
+ .findAll(stageColumnComponent)
+ .at(1)
+ .classes(),
+ ).toContain('left-margin');
+ });
+ });
+ });
+
+ describe('linked pipelines components', () => {
+ beforeEach(() => {
+ wrapper = mount(graphComponent, {
+ propsData: {
+ isLoading: false,
+ pipeline: store.state.pipeline,
+ mediator,
+ },
+ });
+ });
+
+ it('should render an upstream pipelines column at first position', () => {
+ expect(wrapper.find(linkedPipelinesColumn).exists()).toBe(true);
+ expect(wrapper.find('.stage-column .stage-name').text()).toBe('Upstream');
+ });
+
+ it('should render a downstream pipelines column at last position', () => {
+ const stageColumnNames = wrapper.findAll('.stage-column .stage-name');
+
+ expect(wrapper.find(linkedPipelinesColumn).exists()).toBe(true);
+ expect(stageColumnNames.at(stageColumnNames.length - 1).text()).toBe('Downstream');
+ });
+
+ describe('triggered by', () => {
+ describe('on click', () => {
+ it('should emit `onClickTriggeredBy` when triggered by linked pipeline is clicked', () => {
+ const btnWrapper = wrapper.find('.linked-pipeline-content');
+
+ btnWrapper.trigger('click');
+
+ btnWrapper.vm.$nextTick(() => {
+ expect(wrapper.emitted().onClickTriggeredBy).toEqual([
+ store.state.pipeline.triggered_by,
+ ]);
+ });
+ });
+ });
+
+ describe('with expanded pipeline', () => {
+ it('should render expanded pipeline', done => {
+ // expand the pipeline
+ store.state.pipeline.triggered_by[0].isExpanded = true;
+
+ wrapper = mount(graphComponent, {
+ propsData: {
+ isLoading: false,
+ pipeline: store.state.pipeline,
+ mediator,
+ },
+ });
+
+ Vue.nextTick()
+ .then(() => {
+ expect(wrapper.find('.js-upstream-pipeline-12').exists()).toBe(true);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+ });
+
+ describe('triggered', () => {
+ describe('on click', () => {
+ it('should emit `onClickTriggered`', () => {
+ // We have to mock this method since we do both style change and
+ // emit and event, not mocking returns an error.
+ wrapper.setMethods({
+ handleClickedDownstream: jest.fn(() =>
+ wrapper.vm.$emit('onClickTriggered', ...store.state.pipeline.triggered),
+ ),
+ });
+
+ const btnWrappers = wrapper.findAll('.linked-pipeline-content');
+ const downstreamBtnWrapper = btnWrappers.at(btnWrappers.length - 1);
+
+ downstreamBtnWrapper.trigger('click');
+
+ downstreamBtnWrapper.vm.$nextTick(() => {
+ expect(wrapper.emitted().onClickTriggered).toEqual([store.state.pipeline.triggered]);
+ });
+ });
+ });
+
+ describe('with expanded pipeline', () => {
+ it('should render expanded pipeline', done => {
+ // expand the pipeline
+ store.state.pipeline.triggered[0].isExpanded = true;
+
+ wrapper = mount(graphComponent, {
+ propsData: {
+ isLoading: false,
+ pipeline: store.state.pipeline,
+ mediator,
+ },
+ });
+
+ Vue.nextTick()
+ .then(() => {
+ expect(wrapper.find('.js-downstream-pipeline-34993051')).not.toBeNull();
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+ });
+ });
+ });
+
+ describe('when linked pipelines are not present', () => {
+ beforeEach(() => {
+ const pipeline = Object.assign(linkedPipelineJSON, { triggered: null, triggered_by: null });
+ wrapper = mount(graphComponent, {
+ propsData: {
+ isLoading: false,
+ pipeline,
+ mediator,
+ },
+ });
+ });
+
+ describe('rendered output', () => {
+ it('should include the first column with a no margin', () => {
+ const firstColumn = wrapper.find('.stage-column');
+
+ expect(firstColumn.classes()).toContain('no-margin');
+ });
+
+ it('should not render a linked pipelines column', () => {
+ expect(wrapper.find('.linked-pipelines-column').exists()).toBe(false);
+ });
+ });
+
+ describe('stageConnectorClass', () => {
+ it('it returns no-margin when no triggerer and there is one job', () => {
+ expect(wrapper.find(stageColumnComponent).classes()).toContain('no-margin');
+ });
+
+ it('it returns left-margin when no triggerer and not the first stage', () => {
+ expect(
+ wrapper
+ .findAll(stageColumnComponent)
+ .at(1)
+ .classes(),
+ ).toContain('left-margin');
+ });
+ });
+ });
+
+ describe('capitalizeStageName', () => {
+ it('capitalizes and escapes stage name', () => {
+ wrapper = mount(graphComponent, {
+ propsData: {
+ isLoading: false,
+ pipeline: graphJSON,
+ mediator,
+ },
+ });
+
+ expect(
+ wrapper
+ .find('.stage-column:nth-child(2) .stage-name')
+ .text()
+ .trim(),
+ ).toEqual('Deploy &lt;img src=x onerror=alert(document.domain)&gt;');
+ });
+ });
+});
diff --git a/spec/javascripts/pipelines/graph/job_group_dropdown_spec.js b/spec/frontend/pipelines/graph/job_group_dropdown_spec.js
index a3957f94caa..b323e1d8a06 100644
--- a/spec/javascripts/pipelines/graph/job_group_dropdown_spec.js
+++ b/spec/frontend/pipelines/graph/job_group_dropdown_spec.js
@@ -1,11 +1,7 @@
-import Vue from 'vue';
-import mountComponent from 'spec/helpers/vue_mount_component_helper';
+import { shallowMount } from '@vue/test-utils';
import JobGroupDropdown from '~/pipelines/components/graph/job_group_dropdown.vue';
describe('job group dropdown component', () => {
- const Component = Vue.extend(JobGroupDropdown);
- let vm;
-
const group = {
jobs: [
{
@@ -66,20 +62,23 @@ describe('job group dropdown component', () => {
},
};
+ let wrapper;
+ const findButton = () => wrapper.find('button');
+
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
beforeEach(() => {
- vm = mountComponent(Component, { group });
+ wrapper = shallowMount(JobGroupDropdown, { propsData: { group } });
});
it('renders button with group name and size', () => {
- expect(vm.$el.querySelector('button').textContent).toContain(group.name);
- expect(vm.$el.querySelector('button').textContent).toContain(group.size);
+ expect(findButton().text()).toContain(group.name);
+ expect(findButton().text()).toContain(group.size);
});
it('renders dropdown with jobs', () => {
- expect(vm.$el.querySelectorAll('.scrollable-menu>ul>li').length).toEqual(group.jobs.length);
+ expect(wrapper.findAll('.scrollable-menu>ul>li').length).toBe(group.jobs.length);
});
});
diff --git a/spec/frontend/pipelines/graph/job_item_spec.js b/spec/frontend/pipelines/graph/job_item_spec.js
index 0c64d5c9fa8..da777466e3e 100644
--- a/spec/frontend/pipelines/graph/job_item_spec.js
+++ b/spec/frontend/pipelines/graph/job_item_spec.js
@@ -47,7 +47,7 @@ describe('pipeline graph job item', () => {
expect(link.attributes('title')).toEqual(`${mockJob.name} - ${mockJob.status.label}`);
- expect(wrapper.find('.js-status-icon-success')).toBeDefined();
+ expect(wrapper.find('.ci-status-icon-success').exists()).toBe(true);
expect(trimText(wrapper.find('.ci-status-text').text())).toBe(mockJob.name);
@@ -73,7 +73,7 @@ describe('pipeline graph job item', () => {
},
});
- expect(wrapper.find('.js-status-icon-success')).toBeDefined();
+ expect(wrapper.find('.ci-status-icon-success').exists()).toBe(true);
expect(wrapper.find('a').exists()).toBe(false);
expect(trimText(wrapper.find('.ci-status-text').text())).toEqual(mockJob.name);
@@ -84,8 +84,8 @@ describe('pipeline graph job item', () => {
it('it should render the action icon', () => {
createWrapper({ job: mockJob });
- expect(wrapper.find('a.ci-action-icon-container')).toBeDefined();
- expect(wrapper.find('i.ci-action-icon-wrapper')).toBeDefined();
+ expect(wrapper.find('.ci-action-icon-container').exists()).toBe(true);
+ expect(wrapper.find('.ci-action-icon-wrapper').exists()).toBe(true);
});
});
diff --git a/spec/frontend/pipelines/graph/job_name_component_spec.js b/spec/frontend/pipelines/graph/job_name_component_spec.js
new file mode 100644
index 00000000000..3574b66403e
--- /dev/null
+++ b/spec/frontend/pipelines/graph/job_name_component_spec.js
@@ -0,0 +1,36 @@
+import { mount } from '@vue/test-utils';
+import ciIcon from '~/vue_shared/components/ci_icon.vue';
+
+import jobNameComponent from '~/pipelines/components/graph/job_name_component.vue';
+
+describe('job name component', () => {
+ let wrapper;
+
+ const propsData = {
+ name: 'foo',
+ status: {
+ icon: 'status_success',
+ group: 'success',
+ },
+ };
+
+ beforeEach(() => {
+ wrapper = mount(jobNameComponent, {
+ propsData,
+ });
+ });
+
+ it('should render the provided name', () => {
+ expect(
+ wrapper
+ .find('.ci-status-text')
+ .text()
+ .trim(),
+ ).toBe(propsData.name);
+ });
+
+ it('should render an icon with the provided status', () => {
+ expect(wrapper.find(ciIcon).exists()).toBe(true);
+ expect(wrapper.find('.ci-status-icon-success').exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/pipelines/graph/linked_pipeline_spec.js b/spec/frontend/pipelines/graph/linked_pipeline_spec.js
index 7f49b21100d..cf78aa3ef71 100644
--- a/spec/frontend/pipelines/graph/linked_pipeline_spec.js
+++ b/spec/frontend/pipelines/graph/linked_pipeline_spec.js
@@ -1,12 +1,17 @@
import { mount } from '@vue/test-utils';
import LinkedPipelineComponent from '~/pipelines/components/graph/linked_pipeline.vue';
+import CiStatus from '~/vue_shared/components/ci_icon.vue';
import mockData from './linked_pipelines_mock_data';
const mockPipeline = mockData.triggered[0];
+const validTriggeredPipelineId = mockPipeline.project.id;
+const invalidTriggeredPipelineId = mockPipeline.project.id + 5;
+
describe('Linked pipeline', () => {
let wrapper;
+ const findButton = () => wrapper.find('button');
const createWrapper = propsData => {
wrapper = mount(LinkedPipelineComponent, {
@@ -21,7 +26,7 @@ describe('Linked pipeline', () => {
describe('rendered output', () => {
const props = {
pipeline: mockPipeline,
- projectId: 20,
+ projectId: invalidTriggeredPipelineId,
columnTitle: 'Downstream',
};
@@ -44,14 +49,13 @@ describe('Linked pipeline', () => {
});
it('should render an svg within the status container', () => {
- const pipelineStatusElement = wrapper.find('.js-linked-pipeline-status');
+ const pipelineStatusElement = wrapper.find(CiStatus);
expect(pipelineStatusElement.find('svg').exists()).toBe(true);
});
it('should render the pipeline status icon svg', () => {
- expect(wrapper.find('.js-ci-status-icon-running').exists()).toBe(true);
- expect(wrapper.find('.js-ci-status-icon-running').html()).toContain('<svg');
+ expect(wrapper.find('.ci-status-icon-failed svg').exists()).toBe(true);
});
it('should have a ci-status child component', () => {
@@ -88,7 +92,7 @@ describe('Linked pipeline', () => {
describe('parent/child', () => {
const downstreamProps = {
pipeline: mockPipeline,
- projectId: 19,
+ projectId: validTriggeredPipelineId,
columnTitle: 'Downstream',
};
@@ -116,7 +120,7 @@ describe('Linked pipeline', () => {
describe('when isLoading is true', () => {
const props = {
pipeline: { ...mockPipeline, isLoading: true },
- projectId: 19,
+ projectId: invalidTriggeredPipelineId,
columnTitle: 'Downstream',
};
@@ -132,7 +136,7 @@ describe('Linked pipeline', () => {
describe('on click', () => {
const props = {
pipeline: mockPipeline,
- projectId: 19,
+ projectId: validTriggeredPipelineId,
columnTitle: 'Downstream',
};
@@ -142,18 +146,18 @@ describe('Linked pipeline', () => {
it('emits `pipelineClicked` event', () => {
jest.spyOn(wrapper.vm, '$emit');
- wrapper.find('button').trigger('click');
+ findButton().trigger('click');
expect(wrapper.emitted().pipelineClicked).toBeTruthy();
});
it('should emit `bv::hide::tooltip` to close the tooltip', () => {
jest.spyOn(wrapper.vm.$root, '$emit');
- wrapper.find('button').trigger('click');
+ findButton().trigger('click');
expect(wrapper.vm.$root.$emit.mock.calls[0]).toEqual([
'bv::hide::tooltip',
- 'js-linked-pipeline-132',
+ 'js-linked-pipeline-34993051',
]);
});
});
diff --git a/spec/frontend/pipelines/graph/linked_pipelines_column_spec.js b/spec/frontend/pipelines/graph/linked_pipelines_column_spec.js
new file mode 100644
index 00000000000..82eaa553d0c
--- /dev/null
+++ b/spec/frontend/pipelines/graph/linked_pipelines_column_spec.js
@@ -0,0 +1,38 @@
+import { shallowMount } from '@vue/test-utils';
+import LinkedPipelinesColumn from '~/pipelines/components/graph/linked_pipelines_column.vue';
+import LinkedPipeline from '~/pipelines/components/graph/linked_pipeline.vue';
+import mockData from './linked_pipelines_mock_data';
+
+describe('Linked Pipelines Column', () => {
+ const propsData = {
+ columnTitle: 'Upstream',
+ linkedPipelines: mockData.triggered,
+ graphPosition: 'right',
+ projectId: 19,
+ };
+ let wrapper;
+
+ beforeEach(() => {
+ wrapper = shallowMount(LinkedPipelinesColumn, { propsData });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders the pipeline orientation', () => {
+ const titleElement = wrapper.find('.linked-pipelines-column-title');
+
+ expect(titleElement.text()).toBe(propsData.columnTitle);
+ });
+
+ it('renders the correct number of linked pipelines', () => {
+ const linkedPipelineElements = wrapper.findAll(LinkedPipeline);
+
+ expect(linkedPipelineElements.length).toBe(propsData.linkedPipelines.length);
+ });
+
+ it('renders cross project triangle when column is upstream', () => {
+ expect(wrapper.find('.cross-project-triangle').exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/pipelines/graph/linked_pipelines_mock_data.js b/spec/frontend/pipelines/graph/linked_pipelines_mock_data.js
index c9a94b3101f..3e9c0814403 100644
--- a/spec/frontend/pipelines/graph/linked_pipelines_mock_data.js
+++ b/spec/frontend/pipelines/graph/linked_pipelines_mock_data.js
@@ -1,411 +1,3779 @@
export default {
- project: {
- id: 19,
+ id: 23211253,
+ user: {
+ id: 3585,
+ name: 'Achilleas Pipinellis',
+ username: 'axil',
+ state: 'active',
+ avatar_url: 'https://assets.gitlab-static.net/uploads/-/system/user/avatar/3585/avatar.png',
+ web_url: 'https://gitlab.com/axil',
+ status_tooltip_html:
+ '\u003cspan class="user-status-emoji has-tooltip" title="I like pizza" data-html="true" data-placement="top"\u003e\u003cgl-emoji title="slice of pizza" data-name="pizza" data-unicode-version="6.0"\u003e🍕\u003c/gl-emoji\u003e\u003c/span\u003e',
+ path: '/axil',
},
- triggered_by: {
- id: 129,
- active: true,
- path: '/gitlab-org/gitlab-foss/pipelines/129',
- project: {
- name: 'GitLabCE',
- },
- details: {
- status: {
- icon: 'status_running',
- text: 'running',
- label: 'running',
- group: 'running',
- has_details: true,
- details_path: '/gitlab-org/gitlab-foss/pipelines/129',
- favicon:
- '/assets/ci_favicons/dev/favicon_status_running-c3ad2fc53ea6079c174e5b6c1351ff349e99ec3af5a5622fb77b0fe53ea279c1.ico',
- },
- },
- flags: {
- latest: false,
- triggered: false,
- stuck: false,
- yaml_errors: false,
- retryable: true,
- cancelable: true,
- },
- ref: {
- name: '7-5-stable',
- path: '/gitlab-org/gitlab-foss/commits/7-5-stable',
- tag: false,
- branch: true,
- },
- commit: {
- id: '23433d4d8b20d7e45c103d0b6048faad38a130ab',
- short_id: '23433d4d',
- title: 'Version 7.5.0.rc1',
- created_at: '2014-11-17T15:44:14.000+01:00',
- parent_ids: ['30ac909f30f58d319b42ed1537664483894b18cd'],
- message: 'Version 7.5.0.rc1\n',
- author_name: 'Jacob Vosmaer',
- author_email: 'contact@jacobvosmaer.nl',
- authored_date: '2014-11-17T15:44:14.000+01:00',
- committer_name: 'Jacob Vosmaer',
- committer_email: 'contact@jacobvosmaer.nl',
- committed_date: '2014-11-17T15:44:14.000+01:00',
- author_gravatar_url:
- 'http://www.gravatar.com/avatar/e66d11c0eedf8c07b3b18fca46599807?s=80&d=identicon',
- commit_url:
- 'http://localhost:3000/gitlab-org/gitlab-foss/commit/23433d4d8b20d7e45c103d0b6048faad38a130ab',
- commit_path: '/gitlab-org/gitlab-foss/commit/23433d4d8b20d7e45c103d0b6048faad38a130ab',
- },
- retry_path: '/gitlab-org/gitlab-foss/pipelines/129/retry',
- cancel_path: '/gitlab-org/gitlab-foss/pipelines/129/cancel',
- created_at: '2017-05-24T14:46:20.090Z',
- updated_at: '2017-05-24T14:46:29.906Z',
+ active: false,
+ coverage: null,
+ source: 'push',
+ created_at: '2018-06-05T11:31:30.452Z',
+ updated_at: '2018-10-31T16:35:31.305Z',
+ path: '/gitlab-org/gitlab-runner/pipelines/23211253',
+ flags: {
+ latest: false,
+ stuck: false,
+ auto_devops: false,
+ merge_request: false,
+ yaml_errors: false,
+ retryable: false,
+ cancelable: false,
+ failure_reason: false,
},
- triggered: [
- {
- id: 132,
- active: true,
- path: '/gitlab-org/gitlab-foss/pipelines/132',
- project: {
- name: 'GitLabCE',
- id: 19,
- },
- details: {
+ details: {
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-runner/pipelines/23211253',
+ illustration: null,
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ duration: 53,
+ finished_at: '2018-10-31T16:35:31.299Z',
+ stages: [
+ {
+ name: 'prebuild',
+ title: 'prebuild: passed',
+ groups: [
+ {
+ name: 'review-docs-deploy',
+ size: 1,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'manual play action',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-runner/-/jobs/72469032',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-org/gitlab-runner/-/jobs/72469032/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ jobs: [
+ {
+ id: 72469032,
+ name: 'review-docs-deploy',
+ started: '2018-10-31T16:34:58.778Z',
+ archived: false,
+ build_path: '/gitlab-org/gitlab-runner/-/jobs/72469032',
+ retry_path: '/gitlab-org/gitlab-runner/-/jobs/72469032/retry',
+ play_path: '/gitlab-org/gitlab-runner/-/jobs/72469032/play',
+ playable: true,
+ scheduled: false,
+ created_at: '2018-06-05T11:31:30.495Z',
+ updated_at: '2018-10-31T16:35:31.251Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'manual play action',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-runner/-/jobs/72469032',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-org/gitlab-runner/-/jobs/72469032/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ },
+ ],
+ },
+ ],
status: {
- icon: 'status_running',
- text: 'running',
- label: 'running',
- group: 'running',
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
has_details: true,
- details_path: '/gitlab-org/gitlab-foss/pipelines/132',
+ details_path: '/gitlab-org/gitlab-runner/pipelines/23211253#prebuild',
+ illustration: null,
favicon:
- '/assets/ci_favicons/dev/favicon_status_running-c3ad2fc53ea6079c174e5b6c1351ff349e99ec3af5a5622fb77b0fe53ea279c1.ico',
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
},
+ path: '/gitlab-org/gitlab-runner/pipelines/23211253#prebuild',
+ dropdown_path: '/gitlab-org/gitlab-runner/pipelines/23211253/stage.json?stage=prebuild',
},
- flags: {
- latest: false,
- triggered: false,
- stuck: false,
- yaml_errors: false,
- retryable: true,
- cancelable: true,
- },
- ref: {
- name: 'crowd',
- path: '/gitlab-org/gitlab-foss/commits/crowd',
- tag: false,
- branch: true,
- },
- commit: {
- id: 'b9d58c4cecd06be74c3cc32ccfb522b31544ab2e',
- short_id: 'b9d58c4c',
- title: 'getting user keys publically through http without any authentication, the github…',
- created_at: '2013-10-03T12:50:33.000+05:30',
- parent_ids: ['e219cf7246c6a0495e4507deaffeba11e79f13b8'],
- message:
- 'getting user keys publically through http without any authentication, the github way. E.g: http://github.com/devaroop.keys\n\nchangelog updated to include ssh key retrieval feature update\n',
- author_name: 'devaroop',
- author_email: 'devaroop123@yahoo.co.in',
- authored_date: '2013-10-02T20:39:29.000+05:30',
- committer_name: 'devaroop',
- committer_email: 'devaroop123@yahoo.co.in',
- committed_date: '2013-10-03T12:50:33.000+05:30',
- author_gravatar_url:
- 'http://www.gravatar.com/avatar/35df4b155ec66a3127d53459941cf8a2?s=80&d=identicon',
- commit_url:
- 'http://localhost:3000/gitlab-org/gitlab-foss/commit/b9d58c4cecd06be74c3cc32ccfb522b31544ab2e',
- commit_path: '/gitlab-org/gitlab-foss/commit/b9d58c4cecd06be74c3cc32ccfb522b31544ab2e',
- },
- retry_path: '/gitlab-org/gitlab-foss/pipelines/132/retry',
- cancel_path: '/gitlab-org/gitlab-foss/pipelines/132/cancel',
- created_at: '2017-05-24T14:46:24.644Z',
- updated_at: '2017-05-24T14:48:55.226Z',
- },
- {
- id: 133,
- active: true,
- path: '/gitlab-org/gitlab-foss/pipelines/133',
- project: {
- name: 'GitLabCE',
- },
- details: {
+ {
+ name: 'test',
+ title: 'test: passed',
+ groups: [
+ {
+ name: 'docs check links',
+ size: 1,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-runner/-/jobs/72469033',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/gitlab-org/gitlab-runner/-/jobs/72469033/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ jobs: [
+ {
+ id: 72469033,
+ name: 'docs check links',
+ started: '2018-06-05T11:31:33.240Z',
+ archived: false,
+ build_path: '/gitlab-org/gitlab-runner/-/jobs/72469033',
+ retry_path: '/gitlab-org/gitlab-runner/-/jobs/72469033/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2018-06-05T11:31:30.627Z',
+ updated_at: '2018-06-05T11:31:54.363Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-runner/-/jobs/72469033',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/gitlab-org/gitlab-runner/-/jobs/72469033/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ },
+ ],
+ },
+ ],
status: {
- icon: 'status_running',
- text: 'running',
- label: 'running',
- group: 'running',
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
has_details: true,
- details_path: '/gitlab-org/gitlab-foss/pipelines/133',
+ details_path: '/gitlab-org/gitlab-runner/pipelines/23211253#test',
+ illustration: null,
favicon:
- '/assets/ci_favicons/dev/favicon_status_running-c3ad2fc53ea6079c174e5b6c1351ff349e99ec3af5a5622fb77b0fe53ea279c1.ico',
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
},
+ path: '/gitlab-org/gitlab-runner/pipelines/23211253#test',
+ dropdown_path: '/gitlab-org/gitlab-runner/pipelines/23211253/stage.json?stage=test',
},
- flags: {
- latest: false,
- triggered: false,
- stuck: false,
- yaml_errors: false,
- retryable: true,
- cancelable: true,
- },
- ref: {
- name: 'crowd',
- path: '/gitlab-org/gitlab-foss/commits/crowd',
- tag: false,
- branch: true,
- },
- commit: {
- id: 'b6bd4856a33df3d144be66c4ed1f1396009bb08b',
- short_id: 'b6bd4856',
- title: 'getting user keys publically through http without any authentication, the github…',
- created_at: '2013-10-02T20:39:29.000+05:30',
- parent_ids: ['e219cf7246c6a0495e4507deaffeba11e79f13b8'],
- message:
- 'getting user keys publically through http without any authentication, the github way. E.g: http://github.com/devaroop.keys\n',
- author_name: 'devaroop',
- author_email: 'devaroop123@yahoo.co.in',
- authored_date: '2013-10-02T20:39:29.000+05:30',
- committer_name: 'devaroop',
- committer_email: 'devaroop123@yahoo.co.in',
- committed_date: '2013-10-02T20:39:29.000+05:30',
- author_gravatar_url:
- 'http://www.gravatar.com/avatar/35df4b155ec66a3127d53459941cf8a2?s=80&d=identicon',
- commit_url:
- 'http://localhost:3000/gitlab-org/gitlab-foss/commit/b6bd4856a33df3d144be66c4ed1f1396009bb08b',
- commit_path: '/gitlab-org/gitlab-foss/commit/b6bd4856a33df3d144be66c4ed1f1396009bb08b',
- },
- retry_path: '/gitlab-org/gitlab-foss/pipelines/133/retry',
- cancel_path: '/gitlab-org/gitlab-foss/pipelines/133/cancel',
- created_at: '2017-05-24T14:46:24.648Z',
- updated_at: '2017-05-24T14:48:59.673Z',
- },
- {
- id: 130,
- active: true,
- path: '/gitlab-org/gitlab-foss/pipelines/130',
- project: {
- name: 'GitLabCE',
- },
- details: {
+ {
+ name: 'cleanup',
+ title: 'cleanup: skipped',
+ groups: [
+ {
+ name: 'review-docs-cleanup',
+ size: 1,
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual stop action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-runner/-/jobs/72469034',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'stop',
+ title: 'Stop',
+ path: '/gitlab-org/gitlab-runner/-/jobs/72469034/play',
+ method: 'post',
+ button_title: 'Stop this environment',
+ },
+ },
+ jobs: [
+ {
+ id: 72469034,
+ name: 'review-docs-cleanup',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-org/gitlab-runner/-/jobs/72469034',
+ play_path: '/gitlab-org/gitlab-runner/-/jobs/72469034/play',
+ playable: true,
+ scheduled: false,
+ created_at: '2018-06-05T11:31:30.760Z',
+ updated_at: '2018-06-05T11:31:56.037Z',
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual stop action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-runner/-/jobs/72469034',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'stop',
+ title: 'Stop',
+ path: '/gitlab-org/gitlab-runner/-/jobs/72469034/play',
+ method: 'post',
+ button_title: 'Stop this environment',
+ },
+ },
+ },
+ ],
+ },
+ ],
status: {
- icon: 'status_running',
- text: 'running',
- label: 'running',
- group: 'running',
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
has_details: true,
- details_path: '/gitlab-org/gitlab-foss/pipelines/130',
+ details_path: '/gitlab-org/gitlab-runner/pipelines/23211253#cleanup',
+ illustration: null,
favicon:
- '/assets/ci_favicons/dev/favicon_status_running-c3ad2fc53ea6079c174e5b6c1351ff349e99ec3af5a5622fb77b0fe53ea279c1.ico',
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
},
+ path: '/gitlab-org/gitlab-runner/pipelines/23211253#cleanup',
+ dropdown_path: '/gitlab-org/gitlab-runner/pipelines/23211253/stage.json?stage=cleanup',
},
- flags: {
- latest: false,
- triggered: false,
- stuck: false,
- yaml_errors: false,
- retryable: true,
- cancelable: true,
+ ],
+ artifacts: [],
+ manual_actions: [
+ {
+ name: 'review-docs-cleanup',
+ path: '/gitlab-org/gitlab-runner/-/jobs/72469034/play',
+ playable: true,
+ scheduled: false,
},
- ref: {
- name: 'crowd',
- path: '/gitlab-org/gitlab-foss/commits/crowd',
- tag: false,
- branch: true,
+ {
+ name: 'review-docs-deploy',
+ path: '/gitlab-org/gitlab-runner/-/jobs/72469032/play',
+ playable: true,
+ scheduled: false,
},
- commit: {
- id: '6d7ced4a2311eeff037c5575cca1868a6d3f586f',
- short_id: '6d7ced4a',
- title: 'Whitespace fixes to patch',
- created_at: '2013-10-08T13:53:22.000-05:00',
- parent_ids: ['1875141a963a4238bda29011d8f7105839485253'],
- message: 'Whitespace fixes to patch\n',
- author_name: 'Dale Hamel',
- author_email: 'dale.hamel@srvthe.net',
- authored_date: '2013-10-08T13:53:22.000-05:00',
- committer_name: 'Dale Hamel',
- committer_email: 'dale.hamel@invenia.ca',
- committed_date: '2013-10-08T13:53:22.000-05:00',
- author_gravatar_url:
- 'http://www.gravatar.com/avatar/cd08930e69fa5ad1a669206e7bafe476?s=80&d=identicon',
- commit_url:
- 'http://localhost:3000/gitlab-org/gitlab-foss/commit/6d7ced4a2311eeff037c5575cca1868a6d3f586f',
- commit_path: '/gitlab-org/gitlab-foss/commit/6d7ced4a2311eeff037c5575cca1868a6d3f586f',
+ ],
+ scheduled_actions: [],
+ },
+ ref: {
+ name: 'docs/add-development-guide-to-readme',
+ path: '/gitlab-org/gitlab-runner/commits/docs/add-development-guide-to-readme',
+ tag: false,
+ branch: true,
+ merge_request: false,
+ },
+ commit: {
+ id: '8083eb0a920572214d0dccedd7981f05d535ad46',
+ short_id: '8083eb0a',
+ title: 'Add link to development guide in readme',
+ created_at: '2018-06-05T11:30:48.000Z',
+ parent_ids: ['1d7cf79b5a1a2121b9474ac20d61c1b8f621289d'],
+ message:
+ 'Add link to development guide in readme\n\nCloses https://gitlab.com/gitlab-org/gitlab-runner/issues/3122\n',
+ author_name: 'Achilleas Pipinellis',
+ author_email: 'axil@gitlab.com',
+ authored_date: '2018-06-05T11:30:48.000Z',
+ committer_name: 'Achilleas Pipinellis',
+ committer_email: 'axil@gitlab.com',
+ committed_date: '2018-06-05T11:30:48.000Z',
+ author: {
+ id: 3585,
+ name: 'Achilleas Pipinellis',
+ username: 'axil',
+ state: 'active',
+ avatar_url: 'https://assets.gitlab-static.net/uploads/-/system/user/avatar/3585/avatar.png',
+ web_url: 'https://gitlab.com/axil',
+ status_tooltip_html: null,
+ path: '/axil',
+ },
+ author_gravatar_url:
+ 'https://secure.gravatar.com/avatar/1d37af00eec153a8333a4ce18e9aea41?s=80\u0026d=identicon',
+ commit_url:
+ 'https://gitlab.com/gitlab-org/gitlab-runner/commit/8083eb0a920572214d0dccedd7981f05d535ad46',
+ commit_path: '/gitlab-org/gitlab-runner/commit/8083eb0a920572214d0dccedd7981f05d535ad46',
+ },
+ project: { id: 20 },
+ triggered_by: {
+ id: 12,
+ user: {
+ id: 376774,
+ name: 'Alessio Caiazza',
+ username: 'nolith',
+ state: 'active',
+ avatar_url: 'https://assets.gitlab-static.net/uploads/-/system/user/avatar/376774/avatar.png',
+ web_url: 'https://gitlab.com/nolith',
+ status_tooltip_html: null,
+ path: '/nolith',
+ },
+ active: false,
+ coverage: null,
+ source: 'pipeline',
+ path: '/gitlab-com/gitlab-docs/pipelines/34993051',
+ details: {
+ status: {
+ icon: 'status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ tooltip: 'failed',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/pipelines/34993051',
+ illustration: null,
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
},
- retry_path: '/gitlab-org/gitlab-foss/pipelines/130/retry',
- cancel_path: '/gitlab-org/gitlab-foss/pipelines/130/cancel',
- created_at: '2017-05-24T14:46:24.630Z',
- updated_at: '2017-05-24T14:49:45.091Z',
+ duration: 118,
+ finished_at: '2018-10-31T16:41:40.615Z',
+ stages: [
+ {
+ name: 'build-images',
+ title: 'build-images: skipped',
+ groups: [
+ {
+ name: 'image:bootstrap',
+ size: 1,
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982853',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982853/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ jobs: [
+ {
+ id: 11421321982853,
+ name: 'image:bootstrap',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982853',
+ play_path: '/gitlab-com/gitlab-docs/-/jobs/114982853/play',
+ playable: true,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.704Z',
+ updated_at: '2018-10-31T16:35:24.118Z',
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982853',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982853/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'image:builder-onbuild',
+ size: 1,
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982854',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982854/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ jobs: [
+ {
+ id: 1149822131854,
+ name: 'image:builder-onbuild',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982854',
+ play_path: '/gitlab-com/gitlab-docs/-/jobs/114982854/play',
+ playable: true,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.728Z',
+ updated_at: '2018-10-31T16:35:24.070Z',
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982854',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982854/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'image:nginx-onbuild',
+ size: 1,
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982855',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982855/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ jobs: [
+ {
+ id: 11498285523424,
+ name: 'image:nginx-onbuild',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982855',
+ play_path: '/gitlab-com/gitlab-docs/-/jobs/114982855/play',
+ playable: true,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.753Z',
+ updated_at: '2018-10-31T16:35:24.033Z',
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982855',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982855/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/pipelines/34993051#build-images',
+ illustration: null,
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ path: '/gitlab-com/gitlab-docs/pipelines/34993051#build-images',
+ dropdown_path: '/gitlab-com/gitlab-docs/pipelines/34993051/stage.json?stage=build-images',
+ },
+ {
+ name: 'build',
+ title: 'build: failed',
+ groups: [
+ {
+ name: 'compile_dev',
+ size: 1,
+ status: {
+ icon: 'status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ tooltip: 'failed - (script failure)',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114984694',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/528/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ jobs: [
+ {
+ id: 1149846949786,
+ name: 'compile_dev',
+ started: '2018-10-31T16:39:41.598Z',
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114984694',
+ retry_path: '/gitlab-com/gitlab-docs/-/jobs/114984694/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2018-10-31T16:39:41.138Z',
+ updated_at: '2018-10-31T16:41:40.072Z',
+ status: {
+ icon: 'status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ tooltip: 'failed - (script failure)',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114984694',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/528/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ recoverable: false,
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ tooltip: 'failed',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/pipelines/34993051#build',
+ illustration: null,
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
+ },
+ path: '/gitlab-com/gitlab-docs/pipelines/34993051#build',
+ dropdown_path: '/gitlab-com/gitlab-docs/pipelines/34993051/stage.json?stage=build',
+ },
+ {
+ name: 'deploy',
+ title: 'deploy: skipped',
+ groups: [
+ {
+ name: 'review',
+ size: 1,
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982857',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ jobs: [
+ {
+ id: 11498282342357,
+ name: 'review',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982857',
+ playable: false,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.805Z',
+ updated_at: '2018-10-31T16:41:40.569Z',
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982857',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ },
+ ],
+ },
+ {
+ name: 'review_stop',
+ size: 1,
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982858',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ jobs: [
+ {
+ id: 114982858,
+ name: 'review_stop',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982858',
+ playable: false,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.840Z',
+ updated_at: '2018-10-31T16:41:40.480Z',
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982858',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/pipelines/34993051#deploy',
+ illustration: null,
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ path: '/gitlab-com/gitlab-docs/pipelines/34993051#deploy',
+ dropdown_path: '/gitlab-com/gitlab-docs/pipelines/34993051/stage.json?stage=deploy',
+ },
+ ],
+ artifacts: [],
+ manual_actions: [
+ {
+ name: 'image:bootstrap',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982853/play',
+ playable: true,
+ scheduled: false,
+ },
+ {
+ name: 'image:builder-onbuild',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982854/play',
+ playable: true,
+ scheduled: false,
+ },
+ {
+ name: 'image:nginx-onbuild',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982855/play',
+ playable: true,
+ scheduled: false,
+ },
+ {
+ name: 'review_stop',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982858/play',
+ playable: false,
+ scheduled: false,
+ },
+ ],
+ scheduled_actions: [],
},
- {
- id: 131,
- active: true,
- path: '/gitlab-org/gitlab-foss/pipelines/132',
- project: {
- name: 'GitLabCE',
+ project: {
+ id: 20,
+ name: 'Test',
+ full_path: '/gitlab-com/gitlab-docs',
+ full_name: 'GitLab.com / GitLab Docs',
+ },
+ triggered_by: {
+ id: 349932310342451,
+ user: {
+ id: 376774,
+ name: 'Alessio Caiazza',
+ username: 'nolith',
+ state: 'active',
+ avatar_url:
+ 'https://assets.gitlab-static.net/uploads/-/system/user/avatar/376774/avatar.png',
+ web_url: 'https://gitlab.com/nolith',
+ status_tooltip_html: null,
+ path: '/nolith',
},
+ active: false,
+ coverage: null,
+ source: 'pipeline',
+ path: '/gitlab-com/gitlab-docs/pipelines/34993051',
details: {
status: {
- icon: 'status_running',
- text: 'running',
- label: 'running',
- group: 'running',
+ icon: 'status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ tooltip: 'failed',
has_details: true,
- details_path: '/gitlab-org/gitlab-foss/pipelines/132',
+ details_path: '/gitlab-com/gitlab-docs/pipelines/34993051',
+ illustration: null,
favicon:
- '/assets/ci_favicons/dev/favicon_status_running-c3ad2fc53ea6079c174e5b6c1351ff349e99ec3af5a5622fb77b0fe53ea279c1.ico',
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
},
+ duration: 118,
+ finished_at: '2018-10-31T16:41:40.615Z',
+ stages: [
+ {
+ name: 'build-images',
+ title: 'build-images: skipped',
+ groups: [
+ {
+ name: 'image:bootstrap',
+ size: 1,
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982853',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982853/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ jobs: [
+ {
+ id: 11421321982853,
+ name: 'image:bootstrap',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982853',
+ play_path: '/gitlab-com/gitlab-docs/-/jobs/114982853/play',
+ playable: true,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.704Z',
+ updated_at: '2018-10-31T16:35:24.118Z',
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982853',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982853/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'image:builder-onbuild',
+ size: 1,
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982854',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982854/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ jobs: [
+ {
+ id: 1149822131854,
+ name: 'image:builder-onbuild',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982854',
+ play_path: '/gitlab-com/gitlab-docs/-/jobs/114982854/play',
+ playable: true,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.728Z',
+ updated_at: '2018-10-31T16:35:24.070Z',
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982854',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982854/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'image:nginx-onbuild',
+ size: 1,
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982855',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982855/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ jobs: [
+ {
+ id: 11498285523424,
+ name: 'image:nginx-onbuild',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982855',
+ play_path: '/gitlab-com/gitlab-docs/-/jobs/114982855/play',
+ playable: true,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.753Z',
+ updated_at: '2018-10-31T16:35:24.033Z',
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982855',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982855/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/pipelines/34993051#build-images',
+ illustration: null,
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ path: '/gitlab-com/gitlab-docs/pipelines/34993051#build-images',
+ dropdown_path:
+ '/gitlab-com/gitlab-docs/pipelines/34993051/stage.json?stage=build-images',
+ },
+ {
+ name: 'build',
+ title: 'build: failed',
+ groups: [
+ {
+ name: 'compile_dev',
+ size: 1,
+ status: {
+ icon: 'status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ tooltip: 'failed - (script failure)',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114984694',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114984694/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ jobs: [
+ {
+ id: 1149846949786,
+ name: 'compile_dev',
+ started: '2018-10-31T16:39:41.598Z',
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114984694',
+ retry_path: '/gitlab-com/gitlab-docs/-/jobs/114984694/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2018-10-31T16:39:41.138Z',
+ updated_at: '2018-10-31T16:41:40.072Z',
+ status: {
+ icon: 'status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ tooltip: 'failed - (script failure)',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114984694',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114984694/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ recoverable: false,
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ tooltip: 'failed',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/pipelines/34993051#build',
+ illustration: null,
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
+ },
+ path: '/gitlab-com/gitlab-docs/pipelines/34993051#build',
+ dropdown_path: '/gitlab-com/gitlab-docs/pipelines/34993051/stage.json?stage=build',
+ },
+ {
+ name: 'deploy',
+ title: 'deploy: skipped',
+ groups: [
+ {
+ name: 'review',
+ size: 1,
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982857',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ jobs: [
+ {
+ id: 11498282342357,
+ name: 'review',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982857',
+ playable: false,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.805Z',
+ updated_at: '2018-10-31T16:41:40.569Z',
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982857',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ },
+ ],
+ },
+ {
+ name: 'review_stop',
+ size: 1,
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982858',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ jobs: [
+ {
+ id: 114982858,
+ name: 'review_stop',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982858',
+ playable: false,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.840Z',
+ updated_at: '2018-10-31T16:41:40.480Z',
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982858',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/pipelines/34993051#deploy',
+ illustration: null,
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ path: '/gitlab-com/gitlab-docs/pipelines/34993051#deploy',
+ dropdown_path: '/gitlab-com/gitlab-docs/pipelines/34993051/stage.json?stage=deploy',
+ },
+ ],
+ artifacts: [],
+ manual_actions: [
+ {
+ name: 'image:bootstrap',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982853/play',
+ playable: true,
+ scheduled: false,
+ },
+ {
+ name: 'image:builder-onbuild',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982854/play',
+ playable: true,
+ scheduled: false,
+ },
+ {
+ name: 'image:nginx-onbuild',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982855/play',
+ playable: true,
+ scheduled: false,
+ },
+ {
+ name: 'review_stop',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982858/play',
+ playable: false,
+ scheduled: false,
+ },
+ ],
+ scheduled_actions: [],
},
- flags: {
- latest: false,
- triggered: false,
- stuck: false,
- yaml_errors: false,
- retryable: true,
- cancelable: true,
- },
- ref: {
- name: 'crowd',
- path: '/gitlab-org/gitlab-foss/commits/crowd',
- tag: false,
- branch: true,
- },
- commit: {
- id: 'b9d58c4cecd06be74c3cc32ccfb522b31544ab2e',
- short_id: 'b9d58c4c',
- title: 'getting user keys publically through http without any authentication, the github…',
- created_at: '2013-10-03T12:50:33.000+05:30',
- parent_ids: ['e219cf7246c6a0495e4507deaffeba11e79f13b8'],
- message:
- 'getting user keys publically through http without any authentication, the github way. E.g: http://github.com/devaroop.keys\n\nchangelog updated to include ssh key retrieval feature update\n',
- author_name: 'devaroop',
- author_email: 'devaroop123@yahoo.co.in',
- authored_date: '2013-10-02T20:39:29.000+05:30',
- committer_name: 'devaroop',
- committer_email: 'devaroop123@yahoo.co.in',
- committed_date: '2013-10-03T12:50:33.000+05:30',
- author_gravatar_url:
- 'http://www.gravatar.com/avatar/35df4b155ec66a3127d53459941cf8a2?s=80&d=identicon',
- commit_url:
- 'http://localhost:3000/gitlab-org/gitlab-foss/commit/b9d58c4cecd06be74c3cc32ccfb522b31544ab2e',
- commit_path: '/gitlab-org/gitlab-foss/commit/b9d58c4cecd06be74c3cc32ccfb522b31544ab2e',
+ project: {
+ id: 20,
+ name: 'GitLab Docs',
+ full_path: '/gitlab-com/gitlab-docs',
+ full_name: 'GitLab.com / GitLab Docs',
},
- retry_path: '/gitlab-org/gitlab-foss/pipelines/132/retry',
- cancel_path: '/gitlab-org/gitlab-foss/pipelines/132/cancel',
- created_at: '2017-05-24T14:46:24.644Z',
- updated_at: '2017-05-24T14:48:55.226Z',
},
+ triggered: [],
+ },
+ triggered: [
{
- id: 134,
- active: true,
- path: '/gitlab-org/gitlab-foss/pipelines/133',
- project: {
- name: 'GitLabCE',
+ id: 34993051,
+ user: {
+ id: 376774,
+ name: 'Alessio Caiazza',
+ username: 'nolith',
+ state: 'active',
+ avatar_url:
+ 'https://assets.gitlab-static.net/uploads/-/system/user/avatar/376774/avatar.png',
+ web_url: 'https://gitlab.com/nolith',
+ status_tooltip_html: null,
+ path: '/nolith',
},
+ active: false,
+ coverage: null,
+ source: 'pipeline',
+ path: '/gitlab-com/gitlab-docs/pipelines/34993051',
details: {
status: {
- icon: 'status_running',
- text: 'running',
- label: 'running',
- group: 'running',
+ icon: 'status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ tooltip: 'failed',
has_details: true,
- details_path: '/gitlab-org/gitlab-foss/pipelines/133',
+ details_path: '/gitlab-com/gitlab-docs/pipelines/34993051',
+ illustration: null,
favicon:
- '/assets/ci_favicons/dev/favicon_status_running-c3ad2fc53ea6079c174e5b6c1351ff349e99ec3af5a5622fb77b0fe53ea279c1.ico',
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
},
+ duration: 118,
+ finished_at: '2018-10-31T16:41:40.615Z',
+ stages: [
+ {
+ name: 'build-images',
+ title: 'build-images: skipped',
+ groups: [
+ {
+ name: 'image:bootstrap',
+ size: 1,
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982853',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982853/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ jobs: [
+ {
+ id: 114982853,
+ name: 'image:bootstrap',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982853',
+ play_path: '/gitlab-com/gitlab-docs/-/jobs/114982853/play',
+ playable: true,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.704Z',
+ updated_at: '2018-10-31T16:35:24.118Z',
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982853',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982853/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'image:builder-onbuild',
+ size: 1,
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982854',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982854/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ jobs: [
+ {
+ id: 114982854,
+ name: 'image:builder-onbuild',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982854',
+ play_path: '/gitlab-com/gitlab-docs/-/jobs/114982854/play',
+ playable: true,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.728Z',
+ updated_at: '2018-10-31T16:35:24.070Z',
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982854',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982854/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'image:nginx-onbuild',
+ size: 1,
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982855',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982855/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ jobs: [
+ {
+ id: 114982855,
+ name: 'image:nginx-onbuild',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982855',
+ play_path: '/gitlab-com/gitlab-docs/-/jobs/114982855/play',
+ playable: true,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.753Z',
+ updated_at: '2018-10-31T16:35:24.033Z',
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982855',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982855/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/pipelines/34993051#build-images',
+ illustration: null,
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ path: '/gitlab-com/gitlab-docs/pipelines/34993051#build-images',
+ dropdown_path:
+ '/gitlab-com/gitlab-docs/pipelines/34993051/stage.json?stage=build-images',
+ },
+ {
+ name: 'build',
+ title: 'build: failed',
+ groups: [
+ {
+ name: 'compile_dev',
+ size: 1,
+ status: {
+ icon: 'status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ tooltip: 'failed - (script failure)',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114984694',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/528/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ jobs: [
+ {
+ id: 114984694,
+ name: 'compile_dev',
+ started: '2018-10-31T16:39:41.598Z',
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114984694',
+ retry_path: '/gitlab-com/gitlab-docs/-/jobs/114984694/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2018-10-31T16:39:41.138Z',
+ updated_at: '2018-10-31T16:41:40.072Z',
+ status: {
+ icon: 'status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ tooltip: 'failed - (script failure)',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114984694',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/528/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ recoverable: false,
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ tooltip: 'failed',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/pipelines/34993051#build',
+ illustration: null,
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
+ },
+ path: '/gitlab-com/gitlab-docs/pipelines/34993051#build',
+ dropdown_path: '/gitlab-com/gitlab-docs/pipelines/34993051/stage.json?stage=build',
+ },
+ {
+ name: 'deploy',
+ title: 'deploy: skipped',
+ groups: [
+ {
+ name: 'review',
+ size: 1,
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982857',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ jobs: [
+ {
+ id: 114982857,
+ name: 'review',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982857',
+ playable: false,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.805Z',
+ updated_at: '2018-10-31T16:41:40.569Z',
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982857',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ },
+ ],
+ },
+ {
+ name: 'review_stop',
+ size: 1,
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982858',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ jobs: [
+ {
+ id: 114982858,
+ name: 'review_stop',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982858',
+ playable: false,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.840Z',
+ updated_at: '2018-10-31T16:41:40.480Z',
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982858',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/pipelines/34993051#deploy',
+ illustration: null,
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ path: '/gitlab-com/gitlab-docs/pipelines/34993051#deploy',
+ dropdown_path: '/gitlab-com/gitlab-docs/pipelines/34993051/stage.json?stage=deploy',
+ },
+ ],
+ artifacts: [],
+ manual_actions: [
+ {
+ name: 'image:bootstrap',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982853/play',
+ playable: true,
+ scheduled: false,
+ },
+ {
+ name: 'image:builder-onbuild',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982854/play',
+ playable: true,
+ scheduled: false,
+ },
+ {
+ name: 'image:nginx-onbuild',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982855/play',
+ playable: true,
+ scheduled: false,
+ },
+ {
+ name: 'review_stop',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982858/play',
+ playable: false,
+ scheduled: false,
+ },
+ ],
+ scheduled_actions: [],
},
- flags: {
- latest: false,
- triggered: false,
- stuck: false,
- yaml_errors: false,
- retryable: true,
- cancelable: true,
- },
- ref: {
- name: 'crowd',
- path: '/gitlab-org/gitlab-foss/commits/crowd',
- tag: false,
- branch: true,
- },
- commit: {
- id: 'b6bd4856a33df3d144be66c4ed1f1396009bb08b',
- short_id: 'b6bd4856',
- title: 'getting user keys publically through http without any authentication, the github…',
- created_at: '2013-10-02T20:39:29.000+05:30',
- parent_ids: ['e219cf7246c6a0495e4507deaffeba11e79f13b8'],
- message:
- 'getting user keys publically through http without any authentication, the github way. E.g: http://github.com/devaroop.keys\n',
- author_name: 'devaroop',
- author_email: 'devaroop123@yahoo.co.in',
- authored_date: '2013-10-02T20:39:29.000+05:30',
- committer_name: 'devaroop',
- committer_email: 'devaroop123@yahoo.co.in',
- committed_date: '2013-10-02T20:39:29.000+05:30',
- author_gravatar_url:
- 'http://www.gravatar.com/avatar/35df4b155ec66a3127d53459941cf8a2?s=80&d=identicon',
- commit_url:
- 'http://localhost:3000/gitlab-org/gitlab-foss/commit/b6bd4856a33df3d144be66c4ed1f1396009bb08b',
- commit_path: '/gitlab-org/gitlab-foss/commit/b6bd4856a33df3d144be66c4ed1f1396009bb08b',
+ project: {
+ id: 20,
+ name: 'GitLab Docs',
+ full_path: '/gitlab-com/gitlab-docs',
+ full_name: 'GitLab.com / GitLab Docs',
},
- retry_path: '/gitlab-org/gitlab-foss/pipelines/133/retry',
- cancel_path: '/gitlab-org/gitlab-foss/pipelines/133/cancel',
- created_at: '2017-05-24T14:46:24.648Z',
- updated_at: '2017-05-24T14:48:59.673Z',
},
{
- id: 135,
- active: true,
- path: '/gitlab-org/gitlab-foss/pipelines/130',
- project: {
- name: 'GitLabCE',
+ id: 34993052,
+ user: {
+ id: 376774,
+ name: 'Alessio Caiazza',
+ username: 'nolith',
+ state: 'active',
+ avatar_url:
+ 'https://assets.gitlab-static.net/uploads/-/system/user/avatar/376774/avatar.png',
+ web_url: 'https://gitlab.com/nolith',
+ status_tooltip_html: null,
+ path: '/nolith',
},
+ active: false,
+ coverage: null,
+ source: 'pipeline',
+ path: '/gitlab-com/gitlab-docs/pipelines/34993051',
details: {
status: {
- icon: 'status_running',
- text: 'running',
- label: 'running',
- group: 'running',
+ icon: 'status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ tooltip: 'failed',
has_details: true,
- details_path: '/gitlab-org/gitlab-foss/pipelines/130',
+ details_path: '/gitlab-com/gitlab-docs/pipelines/34993051',
+ illustration: null,
favicon:
- '/assets/ci_favicons/dev/favicon_status_running-c3ad2fc53ea6079c174e5b6c1351ff349e99ec3af5a5622fb77b0fe53ea279c1.ico',
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
},
+ duration: 118,
+ finished_at: '2018-10-31T16:41:40.615Z',
+ stages: [
+ {
+ name: 'build-images',
+ title: 'build-images: skipped',
+ groups: [
+ {
+ name: 'image:bootstrap',
+ size: 1,
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982853',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982853/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ jobs: [
+ {
+ id: 114982853,
+ name: 'image:bootstrap',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982853',
+ play_path: '/gitlab-com/gitlab-docs/-/jobs/114982853/play',
+ playable: true,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.704Z',
+ updated_at: '2018-10-31T16:35:24.118Z',
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982853',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982853/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'image:builder-onbuild',
+ size: 1,
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982854',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982854/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ jobs: [
+ {
+ id: 114982854,
+ name: 'image:builder-onbuild',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982854',
+ play_path: '/gitlab-com/gitlab-docs/-/jobs/114982854/play',
+ playable: true,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.728Z',
+ updated_at: '2018-10-31T16:35:24.070Z',
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982854',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982854/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'image:nginx-onbuild',
+ size: 1,
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982855',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982855/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ jobs: [
+ {
+ id: 1224982855,
+ name: 'image:nginx-onbuild',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982855',
+ play_path: '/gitlab-com/gitlab-docs/-/jobs/114982855/play',
+ playable: true,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.753Z',
+ updated_at: '2018-10-31T16:35:24.033Z',
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982855',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982855/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/pipelines/34993051#build-images',
+ illustration: null,
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ path: '/gitlab-com/gitlab-docs/pipelines/34993051#build-images',
+ dropdown_path:
+ '/gitlab-com/gitlab-docs/pipelines/34993051/stage.json?stage=build-images',
+ },
+ {
+ name: 'build',
+ title: 'build: failed',
+ groups: [
+ {
+ name: 'compile_dev',
+ size: 1,
+ status: {
+ icon: 'status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ tooltip: 'failed - (script failure)',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114984694',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114984694/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ jobs: [
+ {
+ id: 1123984694,
+ name: 'compile_dev',
+ started: '2018-10-31T16:39:41.598Z',
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114984694',
+ retry_path: '/gitlab-com/gitlab-docs/-/jobs/114984694/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2018-10-31T16:39:41.138Z',
+ updated_at: '2018-10-31T16:41:40.072Z',
+ status: {
+ icon: 'status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ tooltip: 'failed - (script failure)',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114984694',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114984694/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ recoverable: false,
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ tooltip: 'failed',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/pipelines/34993051#build',
+ illustration: null,
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
+ },
+ path: '/gitlab-com/gitlab-docs/pipelines/34993051#build',
+ dropdown_path: '/gitlab-com/gitlab-docs/pipelines/34993051/stage.json?stage=build',
+ },
+ {
+ name: 'deploy',
+ title: 'deploy: skipped',
+ groups: [
+ {
+ name: 'review',
+ size: 1,
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982857',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ jobs: [
+ {
+ id: 1143232982857,
+ name: 'review',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982857',
+ playable: false,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.805Z',
+ updated_at: '2018-10-31T16:41:40.569Z',
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982857',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ },
+ ],
+ },
+ {
+ name: 'review_stop',
+ size: 1,
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982858',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ jobs: [
+ {
+ id: 114921313182858,
+ name: 'review_stop',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982858',
+ playable: false,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.840Z',
+ updated_at: '2018-10-31T16:41:40.480Z',
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982858',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/pipelines/34993051#deploy',
+ illustration: null,
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ path: '/gitlab-com/gitlab-docs/pipelines/34993051#deploy',
+ dropdown_path: '/gitlab-com/gitlab-docs/pipelines/34993051/stage.json?stage=deploy',
+ },
+ ],
+ artifacts: [],
+ manual_actions: [
+ {
+ name: 'image:bootstrap',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982853/play',
+ playable: true,
+ scheduled: false,
+ },
+ {
+ name: 'image:builder-onbuild',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982854/play',
+ playable: true,
+ scheduled: false,
+ },
+ {
+ name: 'image:nginx-onbuild',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982855/play',
+ playable: true,
+ scheduled: false,
+ },
+ {
+ name: 'review_stop',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982858/play',
+ playable: false,
+ scheduled: false,
+ },
+ ],
+ scheduled_actions: [],
},
- flags: {
- latest: false,
- triggered: false,
- stuck: false,
- yaml_errors: false,
- retryable: true,
- cancelable: true,
- },
- ref: {
- name: 'crowd',
- path: '/gitlab-org/gitlab-foss/commits/crowd',
- tag: false,
- branch: true,
- },
- commit: {
- id: '6d7ced4a2311eeff037c5575cca1868a6d3f586f',
- short_id: '6d7ced4a',
- title: 'Whitespace fixes to patch',
- created_at: '2013-10-08T13:53:22.000-05:00',
- parent_ids: ['1875141a963a4238bda29011d8f7105839485253'],
- message: 'Whitespace fixes to patch\n',
- author_name: 'Dale Hamel',
- author_email: 'dale.hamel@srvthe.net',
- authored_date: '2013-10-08T13:53:22.000-05:00',
- committer_name: 'Dale Hamel',
- committer_email: 'dale.hamel@invenia.ca',
- committed_date: '2013-10-08T13:53:22.000-05:00',
- author_gravatar_url:
- 'http://www.gravatar.com/avatar/cd08930e69fa5ad1a669206e7bafe476?s=80&d=identicon',
- commit_url:
- 'http://localhost:3000/gitlab-org/gitlab-foss/commit/6d7ced4a2311eeff037c5575cca1868a6d3f586f',
- commit_path: '/gitlab-org/gitlab-foss/commit/6d7ced4a2311eeff037c5575cca1868a6d3f586f',
+ project: {
+ id: 20,
+ name: 'GitLab Docs',
+ full_path: '/gitlab-com/gitlab-docs',
+ full_name: 'GitLab.com / GitLab Docs',
},
- retry_path: '/gitlab-org/gitlab-foss/pipelines/130/retry',
- cancel_path: '/gitlab-org/gitlab-foss/pipelines/130/cancel',
- created_at: '2017-05-24T14:46:24.630Z',
- updated_at: '2017-05-24T14:49:45.091Z',
+ triggered: [
+ {
+ id: 26,
+ user: null,
+ active: false,
+ coverage: null,
+ source: 'push',
+ created_at: '2019-01-06T17:48:37.599Z',
+ updated_at: '2019-01-06T17:48:38.371Z',
+ path: '/h5bp/html5-boilerplate/pipelines/26',
+ flags: {
+ latest: true,
+ stuck: false,
+ auto_devops: false,
+ merge_request: false,
+ yaml_errors: false,
+ retryable: true,
+ cancelable: false,
+ failure_reason: false,
+ },
+ details: {
+ status: {
+ icon: 'status_warning',
+ text: 'passed',
+ label: 'passed with warnings',
+ group: 'success-with-warnings',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/pipelines/26',
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ duration: null,
+ finished_at: '2019-01-06T17:48:38.370Z',
+ stages: [
+ {
+ name: 'build',
+ title: 'build: passed',
+ groups: [
+ {
+ name: 'build:linux',
+ size: 1,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/526',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/526/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ jobs: [
+ {
+ id: 526,
+ name: 'build:linux',
+ started: '2019-01-06T08:48:20.236Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/526',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/526/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:37.806Z',
+ updated_at: '2019-01-06T17:48:37.806Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/526',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/526/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'build:osx',
+ size: 1,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/527',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/527/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ jobs: [
+ {
+ id: 527,
+ name: 'build:osx',
+ started: '2019-01-06T07:48:20.237Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/527',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/527/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:37.846Z',
+ updated_at: '2019-01-06T17:48:37.846Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/527',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/527/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/pipelines/26#build',
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ path: '/h5bp/html5-boilerplate/pipelines/26#build',
+ dropdown_path: '/h5bp/html5-boilerplate/pipelines/26/stage.json?stage=build',
+ },
+ {
+ name: 'test',
+ title: 'test: passed with warnings',
+ groups: [
+ {
+ name: 'jenkins',
+ size: 1,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: null,
+ group: 'success',
+ tooltip: null,
+ has_details: false,
+ details_path: null,
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ jobs: [
+ {
+ id: 546,
+ name: 'jenkins',
+ started: '2019-01-06T11:48:20.237Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/546',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:38.359Z',
+ updated_at: '2019-01-06T17:48:38.359Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: null,
+ group: 'success',
+ tooltip: null,
+ has_details: false,
+ details_path: null,
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ },
+ ],
+ },
+ {
+ name: 'rspec:linux',
+ size: 3,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: false,
+ details_path: null,
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ jobs: [
+ {
+ id: 528,
+ name: 'rspec:linux 0 3',
+ started: '2019-01-06T09:48:20.237Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/528',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/528/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:37.885Z',
+ updated_at: '2019-01-06T17:48:37.885Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/528',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/528/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ },
+ {
+ id: 529,
+ name: 'rspec:linux 1 3',
+ started: '2019-01-06T09:48:20.237Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/529',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/529/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:37.907Z',
+ updated_at: '2019-01-06T17:48:37.907Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/529',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/529/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ },
+ {
+ id: 530,
+ name: 'rspec:linux 2 3',
+ started: '2019-01-06T09:48:20.237Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/530',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/530/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:37.927Z',
+ updated_at: '2019-01-06T17:48:37.927Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/530',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/530/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'rspec:osx',
+ size: 1,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/535',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/535/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ jobs: [
+ {
+ id: 535,
+ name: 'rspec:osx',
+ started: '2019-01-06T09:48:20.237Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/535',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/535/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:38.018Z',
+ updated_at: '2019-01-06T17:48:38.018Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/535',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/535/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'rspec:windows',
+ size: 3,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: false,
+ details_path: null,
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ jobs: [
+ {
+ id: 531,
+ name: 'rspec:windows 0 3',
+ started: '2019-01-06T09:48:20.237Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/531',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/531/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:37.944Z',
+ updated_at: '2019-01-06T17:48:37.944Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/531',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/531/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ },
+ {
+ id: 532,
+ name: 'rspec:windows 1 3',
+ started: '2019-01-06T09:48:20.237Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/532',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/532/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:37.962Z',
+ updated_at: '2019-01-06T17:48:37.962Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/532',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/532/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ },
+ {
+ id: 534,
+ name: 'rspec:windows 2 3',
+ started: '2019-01-06T09:48:20.237Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/534',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/534/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:37.999Z',
+ updated_at: '2019-01-06T17:48:37.999Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/534',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/534/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'spinach:linux',
+ size: 1,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/536',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/536/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ jobs: [
+ {
+ id: 536,
+ name: 'spinach:linux',
+ started: '2019-01-06T09:48:20.237Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/536',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/536/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:38.050Z',
+ updated_at: '2019-01-06T17:48:38.050Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/536',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/536/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'spinach:osx',
+ size: 1,
+ status: {
+ icon: 'status_warning',
+ text: 'failed',
+ label: 'failed (allowed to fail)',
+ group: 'failed-with-warnings',
+ tooltip: 'failed - (unknown failure) (allowed to fail)',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/537',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/537/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ jobs: [
+ {
+ id: 537,
+ name: 'spinach:osx',
+ started: '2019-01-06T09:48:20.237Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/537',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/537/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:38.069Z',
+ updated_at: '2019-01-06T17:48:38.069Z',
+ status: {
+ icon: 'status_warning',
+ text: 'failed',
+ label: 'failed (allowed to fail)',
+ group: 'failed-with-warnings',
+ tooltip: 'failed - (unknown failure) (allowed to fail)',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/537',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/537/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ callout_message: 'There is an unknown failure, please try again',
+ recoverable: true,
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_warning',
+ text: 'passed',
+ label: 'passed with warnings',
+ group: 'success-with-warnings',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/pipelines/26#test',
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ path: '/h5bp/html5-boilerplate/pipelines/26#test',
+ dropdown_path: '/h5bp/html5-boilerplate/pipelines/26/stage.json?stage=test',
+ },
+ {
+ name: 'security',
+ title: 'security: passed',
+ groups: [
+ {
+ name: 'container_scanning',
+ size: 1,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/541',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/541/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ jobs: [
+ {
+ id: 541,
+ name: 'container_scanning',
+ started: '2019-01-06T09:48:20.237Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/541',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/541/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:38.186Z',
+ updated_at: '2019-01-06T17:48:38.186Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/541',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/541/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'dast',
+ size: 1,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/538',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/538/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ jobs: [
+ {
+ id: 538,
+ name: 'dast',
+ started: '2019-01-06T09:48:20.237Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/538',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/538/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:38.087Z',
+ updated_at: '2019-01-06T17:48:38.087Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/538',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/538/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'dependency_scanning',
+ size: 1,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/540',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/540/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ jobs: [
+ {
+ id: 540,
+ name: 'dependency_scanning',
+ started: '2019-01-06T09:48:20.237Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/540',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/540/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:38.153Z',
+ updated_at: '2019-01-06T17:48:38.153Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/540',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/540/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'sast',
+ size: 1,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/539',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/539/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ jobs: [
+ {
+ id: 539,
+ name: 'sast',
+ started: '2019-01-06T09:48:20.237Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/539',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/539/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:38.121Z',
+ updated_at: '2019-01-06T17:48:38.121Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/539',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/539/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/pipelines/26#security',
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ path: '/h5bp/html5-boilerplate/pipelines/26#security',
+ dropdown_path: '/h5bp/html5-boilerplate/pipelines/26/stage.json?stage=security',
+ },
+ {
+ name: 'deploy',
+ title: 'deploy: passed',
+ groups: [
+ {
+ name: 'production',
+ size: 1,
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/544',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ jobs: [
+ {
+ id: 544,
+ name: 'production',
+ started: null,
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/544',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:38.313Z',
+ updated_at: '2019-01-06T17:48:38.313Z',
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/544',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ },
+ ],
+ },
+ {
+ name: 'staging',
+ size: 1,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/542',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/542/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ jobs: [
+ {
+ id: 542,
+ name: 'staging',
+ started: '2019-01-06T11:48:20.237Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/542',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/542/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:38.219Z',
+ updated_at: '2019-01-06T17:48:38.219Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/542',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/542/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'stop staging',
+ size: 1,
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/543',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ jobs: [
+ {
+ id: 543,
+ name: 'stop staging',
+ started: null,
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/543',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:38.283Z',
+ updated_at: '2019-01-06T17:48:38.283Z',
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/543',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/pipelines/26#deploy',
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ path: '/h5bp/html5-boilerplate/pipelines/26#deploy',
+ dropdown_path: '/h5bp/html5-boilerplate/pipelines/26/stage.json?stage=deploy',
+ },
+ {
+ name: 'notify',
+ title: 'notify: passed',
+ groups: [
+ {
+ name: 'slack',
+ size: 1,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'manual play action',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/545',
+ illustration: {
+ image:
+ '/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/h5bp/html5-boilerplate/-/jobs/545/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ jobs: [
+ {
+ id: 545,
+ name: 'slack',
+ started: null,
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/545',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/545/retry',
+ play_path: '/h5bp/html5-boilerplate/-/jobs/545/play',
+ playable: true,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:38.341Z',
+ updated_at: '2019-01-06T17:48:38.341Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'manual play action',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/545',
+ illustration: {
+ image:
+ '/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/h5bp/html5-boilerplate/-/jobs/545/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/pipelines/26#notify',
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ path: '/h5bp/html5-boilerplate/pipelines/26#notify',
+ dropdown_path: '/h5bp/html5-boilerplate/pipelines/26/stage.json?stage=notify',
+ },
+ ],
+ artifacts: [
+ {
+ name: 'build:linux',
+ expired: null,
+ expire_at: null,
+ path: '/h5bp/html5-boilerplate/-/jobs/526/artifacts/download',
+ browse_path: '/h5bp/html5-boilerplate/-/jobs/526/artifacts/browse',
+ },
+ {
+ name: 'build:osx',
+ expired: null,
+ expire_at: null,
+ path: '/h5bp/html5-boilerplate/-/jobs/527/artifacts/download',
+ browse_path: '/h5bp/html5-boilerplate/-/jobs/527/artifacts/browse',
+ },
+ ],
+ manual_actions: [
+ {
+ name: 'stop staging',
+ path: '/h5bp/html5-boilerplate/-/jobs/543/play',
+ playable: false,
+ scheduled: false,
+ },
+ {
+ name: 'production',
+ path: '/h5bp/html5-boilerplate/-/jobs/544/play',
+ playable: false,
+ scheduled: false,
+ },
+ {
+ name: 'slack',
+ path: '/h5bp/html5-boilerplate/-/jobs/545/play',
+ playable: true,
+ scheduled: false,
+ },
+ ],
+ scheduled_actions: [],
+ },
+ ref: {
+ name: 'master',
+ path: '/h5bp/html5-boilerplate/commits/master',
+ tag: false,
+ branch: true,
+ merge_request: false,
+ },
+ commit: {
+ id: 'bad98c453eab56d20057f3929989251d45cd1a8b',
+ short_id: 'bad98c45',
+ title: 'remove instances of shrink-to-fit=no (#2103)',
+ created_at: '2018-12-17T20:52:18.000Z',
+ parent_ids: ['49130f6cfe9ff1f749015d735649a2bc6f66cf3a'],
+ message:
+ 'remove instances of shrink-to-fit=no (#2103)\n\ncloses #2102\r\n\r\nPer my findings, the need for it as a default was rectified with the release of iOS 9.3, where the viewport no longer shrunk to accommodate overflow, as was introduced in iOS 9.',
+ author_name: "Scott O'Hara",
+ author_email: 'scottaohara@users.noreply.github.com',
+ authored_date: '2018-12-17T20:52:18.000Z',
+ committer_name: 'Rob Larsen',
+ committer_email: 'rob@drunkenfist.com',
+ committed_date: '2018-12-17T20:52:18.000Z',
+ author: null,
+ author_gravatar_url:
+ 'https://www.gravatar.com/avatar/6d597df7cf998d16cbe00ccac063b31e?s=80\u0026d=identicon',
+ commit_url:
+ 'http://localhost:3001/h5bp/html5-boilerplate/commit/bad98c453eab56d20057f3929989251d45cd1a8b',
+ commit_path: '/h5bp/html5-boilerplate/commit/bad98c453eab56d20057f3929989251d45cd1a8b',
+ },
+ retry_path: '/h5bp/html5-boilerplate/pipelines/26/retry',
+ triggered_by: {
+ id: 4,
+ user: null,
+ active: false,
+ coverage: null,
+ source: 'push',
+ path: '/gitlab-org/gitlab-test/pipelines/4',
+ details: {
+ status: {
+ icon: 'status_warning',
+ text: 'passed',
+ label: 'passed with warnings',
+ group: 'success-with-warnings',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-test/pipelines/4',
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ },
+ project: {
+ id: 1,
+ name: 'Gitlab Test',
+ full_path: '/gitlab-org/gitlab-test',
+ full_name: 'Gitlab Org / Gitlab Test',
+ },
+ },
+ triggered: [],
+ project: {
+ id: 20,
+ name: 'GitLab Docs',
+ full_path: '/gitlab-com/gitlab-docs',
+ full_name: 'GitLab.com / GitLab Docs',
+ },
+ },
+ ],
},
],
};
diff --git a/spec/javascripts/pipelines/graph/mock_data.js b/spec/frontend/pipelines/graph/mock_data.js
index a4a5d78f906..a4a5d78f906 100644
--- a/spec/javascripts/pipelines/graph/mock_data.js
+++ b/spec/frontend/pipelines/graph/mock_data.js
diff --git a/spec/frontend/pipelines/graph/stage_column_component_spec.js b/spec/frontend/pipelines/graph/stage_column_component_spec.js
new file mode 100644
index 00000000000..88e56eee1d6
--- /dev/null
+++ b/spec/frontend/pipelines/graph/stage_column_component_spec.js
@@ -0,0 +1,136 @@
+import { shallowMount } from '@vue/test-utils';
+
+import stageColumnComponent from '~/pipelines/components/graph/stage_column_component.vue';
+
+describe('stage column component', () => {
+ const mockJob = {
+ id: 4250,
+ name: 'test',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ details_path: '/root/ci-mock/builds/4250',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/root/ci-mock/builds/4250/retry',
+ method: 'post',
+ },
+ },
+ };
+
+ let wrapper;
+
+ beforeEach(() => {
+ const mockGroups = [];
+ for (let i = 0; i < 3; i += 1) {
+ const mockedJob = Object.assign({}, mockJob);
+ mockedJob.id += i;
+ mockGroups.push(mockedJob);
+ }
+
+ wrapper = shallowMount(stageColumnComponent, {
+ propsData: {
+ title: 'foo',
+ groups: mockGroups,
+ hasTriggeredBy: false,
+ },
+ });
+ });
+
+ it('should render provided title', () => {
+ expect(
+ wrapper
+ .find('.stage-name')
+ .text()
+ .trim(),
+ ).toBe('foo');
+ });
+
+ it('should render the provided groups', () => {
+ expect(wrapper.findAll('.builds-container > ul > li').length).toBe(
+ wrapper.props('groups').length,
+ );
+ });
+
+ describe('jobId', () => {
+ it('escapes job name', () => {
+ wrapper = shallowMount(stageColumnComponent, {
+ propsData: {
+ groups: [
+ {
+ id: 4259,
+ name: '<img src=x onerror=alert(document.domain)>',
+ status: {
+ icon: 'status_success',
+ label: 'success',
+ tooltip: '<img src=x onerror=alert(document.domain)>',
+ },
+ },
+ ],
+ title: 'test',
+ hasTriggeredBy: false,
+ },
+ });
+
+ expect(wrapper.find('.builds-container li').attributes('id')).toBe(
+ 'ci-badge-&lt;img src=x onerror=alert(document.domain)&gt;',
+ );
+ });
+ });
+
+ describe('with action', () => {
+ it('renders action button', () => {
+ wrapper = shallowMount(stageColumnComponent, {
+ propsData: {
+ groups: [
+ {
+ id: 4259,
+ name: '<img src=x onerror=alert(document.domain)>',
+ status: {
+ icon: 'status_success',
+ label: 'success',
+ tooltip: '<img src=x onerror=alert(document.domain)>',
+ },
+ },
+ ],
+ title: 'test',
+ hasTriggeredBy: false,
+ action: {
+ icon: 'play',
+ title: 'Play all',
+ path: 'action',
+ },
+ },
+ });
+
+ expect(wrapper.find('.js-stage-action').exists()).toBe(true);
+ });
+ });
+
+ describe('without action', () => {
+ it('does not render action button', () => {
+ wrapper = shallowMount(stageColumnComponent, {
+ propsData: {
+ groups: [
+ {
+ id: 4259,
+ name: '<img src=x onerror=alert(document.domain)>',
+ status: {
+ icon: 'status_success',
+ label: 'success',
+ tooltip: '<img src=x onerror=alert(document.domain)>',
+ },
+ },
+ ],
+ title: 'test',
+ hasTriggeredBy: false,
+ },
+ });
+
+ expect(wrapper.find('.js-stage-action').exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/registry/explorer/pages/list_spec.js b/spec/frontend/registry/explorer/pages/list_spec.js
index 3e46a29f776..f69b849521d 100644
--- a/spec/frontend/registry/explorer/pages/list_spec.js
+++ b/spec/frontend/registry/explorer/pages/list_spec.js
@@ -1,11 +1,12 @@
import VueRouter from 'vue-router';
import { shallowMount, createLocalVue } from '@vue/test-utils';
-import { GlPagination, GlSkeletonLoader, GlSprintf } from '@gitlab/ui';
+import { GlPagination, GlSkeletonLoader, GlSprintf, GlAlert } from '@gitlab/ui';
import Tracking from '~/tracking';
import component from '~/registry/explorer/pages/list.vue';
import QuickstartDropdown from '~/registry/explorer/components/quickstart_dropdown.vue';
import GroupEmptyState from '~/registry/explorer/components/group_empty_state.vue';
import ProjectEmptyState from '~/registry/explorer/components/project_empty_state.vue';
+import ProjectPolicyAlert from '~/registry/explorer/components/project_policy_alert.vue';
import store from '~/registry/explorer/stores/';
import { SET_MAIN_LOADING } from '~/registry/explorer/stores/mutation_types/';
import {
@@ -35,6 +36,8 @@ describe('List Page', () => {
const findQuickStartDropdown = () => wrapper.find(QuickstartDropdown);
const findProjectEmptyState = () => wrapper.find(ProjectEmptyState);
const findGroupEmptyState = () => wrapper.find(GroupEmptyState);
+ const findProjectPolicyAlert = () => wrapper.find(ProjectPolicyAlert);
+ const findDeleteAlert = () => wrapper.find(GlAlert);
beforeEach(() => {
wrapper = shallowMount(component, {
@@ -57,6 +60,18 @@ describe('List Page', () => {
wrapper.destroy();
});
+ describe('Expiration policy notification', () => {
+ it('shows up on project page', () => {
+ expect(findProjectPolicyAlert().exists()).toBe(true);
+ });
+ it('does show up on group page', () => {
+ store.dispatch('setInitialState', { isGroupPage: true });
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findProjectPolicyAlert().exists()).toBe(false);
+ });
+ });
+ });
+
describe('connection error', () => {
const config = {
characterError: true,
@@ -179,32 +194,38 @@ describe('List Page', () => {
it('should call deleteItem when confirming deletion', () => {
dispatchSpy.mockResolvedValue();
- const itemToDelete = wrapper.vm.images[0];
- wrapper.setData({ itemToDelete });
+ findDeleteBtn().vm.$emit('click');
+ expect(wrapper.vm.itemToDelete).not.toEqual({});
findDeleteModal().vm.$emit('ok');
expect(store.dispatch).toHaveBeenCalledWith(
'requestDeleteImage',
- itemToDelete.destroy_path,
+ wrapper.vm.itemToDelete,
);
});
- it('should show a success toast when delete request is successful', () => {
+ it('should show a success alert when delete request is successful', () => {
dispatchSpy.mockResolvedValue();
+ findDeleteBtn().vm.$emit('click');
+ expect(wrapper.vm.itemToDelete).not.toEqual({});
return wrapper.vm.handleDeleteImage().then(() => {
- expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(DELETE_IMAGE_SUCCESS_MESSAGE, {
- type: 'success',
- });
- expect(wrapper.vm.itemToDelete).toEqual({});
+ const alert = findDeleteAlert();
+ expect(alert.exists()).toBe(true);
+ expect(alert.text().replace(/\s\s+/gm, ' ')).toBe(
+ DELETE_IMAGE_SUCCESS_MESSAGE.replace('%{title}', wrapper.vm.itemToDelete.path),
+ );
});
});
- it('should show a error toast when delete request fails', () => {
+ it('should show an error alert when delete request fails', () => {
dispatchSpy.mockRejectedValue();
+ findDeleteBtn().vm.$emit('click');
+ expect(wrapper.vm.itemToDelete).not.toEqual({});
return wrapper.vm.handleDeleteImage().then(() => {
- expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(DELETE_IMAGE_ERROR_MESSAGE, {
- type: 'error',
- });
- expect(wrapper.vm.itemToDelete).toEqual({});
+ const alert = findDeleteAlert();
+ expect(alert.exists()).toBe(true);
+ expect(alert.text().replace(/\s\s+/gm, ' ')).toBe(
+ DELETE_IMAGE_ERROR_MESSAGE.replace('%{title}', wrapper.vm.itemToDelete.path),
+ );
});
});
});
diff --git a/spec/frontend/registry/explorer/stores/actions_spec.js b/spec/frontend/registry/explorer/stores/actions_spec.js
index b39c79dd1ab..58f61a0e8c2 100644
--- a/spec/frontend/registry/explorer/stores/actions_spec.js
+++ b/spec/frontend/registry/explorer/stores/actions_spec.js
@@ -279,39 +279,32 @@ describe('Actions RegistryExplorer Store', () => {
});
describe('request delete single image', () => {
- const deletePath = 'delete/path';
+ const image = {
+ destroy_path: 'delete/path',
+ };
+
it('successfully performs the delete request', done => {
- mock.onDelete(deletePath).replyOnce(200);
+ mock.onDelete(image.destroy_path).replyOnce(200);
testAction(
actions.requestDeleteImage,
- deletePath,
- {
- pagination: {},
- },
+ image,
+ {},
[
{ type: types.SET_MAIN_LOADING, payload: true },
+ { type: types.UPDATE_IMAGE, payload: { ...image, deleting: true } },
{ type: types.SET_MAIN_LOADING, payload: false },
],
- [
- {
- type: 'setShowGarbageCollectionTip',
- payload: true,
- },
- {
- type: 'requestImagesList',
- payload: { pagination: {} },
- },
- ],
+ [],
done,
);
});
it('should turn off loading on error', done => {
- mock.onDelete(deletePath).replyOnce(400);
+ mock.onDelete(image.destroy_path).replyOnce(400);
testAction(
actions.requestDeleteImage,
- deletePath,
+ image,
{},
[
{ type: types.SET_MAIN_LOADING, payload: true },
diff --git a/spec/frontend/registry/explorer/stores/mutations_spec.js b/spec/frontend/registry/explorer/stores/mutations_spec.js
index 029fd23f7ce..43b2ba84218 100644
--- a/spec/frontend/registry/explorer/stores/mutations_spec.js
+++ b/spec/frontend/registry/explorer/stores/mutations_spec.js
@@ -28,14 +28,32 @@ describe('Mutations Registry Explorer Store', () => {
describe('SET_IMAGES_LIST_SUCCESS', () => {
it('should set the images list', () => {
- const images = [1, 2, 3];
- const expectedState = { ...mockState, images };
+ const images = [{ name: 'foo' }, { name: 'bar' }];
+ const defaultStatus = { deleting: false, failedDelete: false };
+ const expectedState = {
+ ...mockState,
+ images: [{ name: 'foo', ...defaultStatus }, { name: 'bar', ...defaultStatus }],
+ };
mutations[types.SET_IMAGES_LIST_SUCCESS](mockState, images);
expect(mockState).toEqual(expectedState);
});
});
+ describe('UPDATE_IMAGE', () => {
+ it('should update an image', () => {
+ mockState.images = [{ id: 1, name: 'foo' }, { id: 2, name: 'bar' }];
+ const payload = { id: 1, name: 'baz' };
+ const expectedState = {
+ ...mockState,
+ images: [payload, { id: 2, name: 'bar' }],
+ };
+ mutations[types.UPDATE_IMAGE](mockState, payload);
+
+ expect(mockState).toEqual(expectedState);
+ });
+ });
+
describe('SET_TAGS_LIST_SUCCESS', () => {
it('should set the tags list', () => {
const tags = [1, 2, 3];
diff --git a/spec/frontend/repository/router_spec.js b/spec/frontend/repository/router_spec.js
index 6944b23558a..8f3ac53c37a 100644
--- a/spec/frontend/repository/router_spec.js
+++ b/spec/frontend/repository/router_spec.js
@@ -4,15 +4,14 @@ import createRouter from '~/repository/router';
describe('Repository router spec', () => {
it.each`
- path | branch | component | componentName
- ${'/'} | ${'master'} | ${IndexPage} | ${'IndexPage'}
- ${'/tree/master'} | ${'master'} | ${TreePage} | ${'TreePage'}
- ${'/-/tree/master'} | ${'master'} | ${TreePage} | ${'TreePage'}
- ${'/-/tree/master/app/assets'} | ${'master'} | ${TreePage} | ${'TreePage'}
- ${'/-/tree/feature/test-%23/app/assets'} | ${'feature/test-#'} | ${TreePage} | ${'TreePage'}
- ${'/-/tree/123/app/assets'} | ${'master'} | ${null} | ${'null'}
- `('sets component as $componentName for path "$path"', ({ path, component, branch }) => {
- const router = createRouter('', branch);
+ path | component | componentName
+ ${'/'} | ${IndexPage} | ${'IndexPage'}
+ ${'/tree/master'} | ${TreePage} | ${'TreePage'}
+ ${'/-/tree/master'} | ${TreePage} | ${'TreePage'}
+ ${'/-/tree/master/app/assets'} | ${TreePage} | ${'TreePage'}
+ ${'/-/tree/123/app/assets'} | ${null} | ${'null'}
+ `('sets component as $componentName for path "$path"', ({ path, component }) => {
+ const router = createRouter('', 'master');
const componentsForRoute = router.getMatchedComponents(path);
diff --git a/spec/frontend/sidebar/sidebar_assignees_spec.js b/spec/frontend/sidebar/sidebar_assignees_spec.js
new file mode 100644
index 00000000000..c1876066a21
--- /dev/null
+++ b/spec/frontend/sidebar/sidebar_assignees_spec.js
@@ -0,0 +1,74 @@
+import { shallowMount } from '@vue/test-utils';
+import AxiosMockAdapter from 'axios-mock-adapter';
+import axios from 'axios';
+import SidebarAssignees from '~/sidebar/components/assignees/sidebar_assignees.vue';
+import Assigness from '~/sidebar/components/assignees/assignees.vue';
+import SidebarMediator from '~/sidebar/sidebar_mediator';
+import SidebarService from '~/sidebar/services/sidebar_service';
+import SidebarStore from '~/sidebar/stores/sidebar_store';
+import Mock from './mock_data';
+
+describe('sidebar assignees', () => {
+ let wrapper;
+ let mediator;
+ let axiosMock;
+
+ const createComponent = () => {
+ wrapper = shallowMount(SidebarAssignees, {
+ propsData: {
+ mediator,
+ field: '',
+ },
+ // Attaching to document is required because this component emits something from the parent element :/
+ attachToDocument: true,
+ });
+ };
+
+ beforeEach(() => {
+ axiosMock = new AxiosMockAdapter(axios);
+ mediator = new SidebarMediator(Mock.mediator);
+
+ jest.spyOn(mediator, 'saveAssignees');
+ jest.spyOn(mediator, 'assignYourself');
+
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+
+ SidebarService.singleton = null;
+ SidebarStore.singleton = null;
+ SidebarMediator.singleton = null;
+ axiosMock.restore();
+ });
+
+ it('calls the mediator when saves the assignees', () => {
+ expect(mediator.saveAssignees).not.toHaveBeenCalled();
+
+ wrapper.vm.saveAssignees();
+
+ expect(mediator.saveAssignees).toHaveBeenCalled();
+ });
+
+ it('calls the mediator when "assignSelf" method is called', () => {
+ expect(mediator.assignYourself).not.toHaveBeenCalled();
+ expect(mediator.store.assignees.length).toBe(0);
+
+ wrapper.vm.assignSelf();
+
+ expect(mediator.assignYourself).toHaveBeenCalled();
+ expect(mediator.store.assignees.length).toBe(1);
+ });
+
+ it('hides assignees until fetched', () => {
+ expect(wrapper.find(Assigness).exists()).toBe(false);
+
+ wrapper.vm.store.isFetching.assignees = false;
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find(Assigness).exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/snippet/snippet_edit_spec.js b/spec/frontend/snippet/snippet_edit_spec.js
new file mode 100644
index 00000000000..cfe5062c86b
--- /dev/null
+++ b/spec/frontend/snippet/snippet_edit_spec.js
@@ -0,0 +1,45 @@
+import '~/snippet/snippet_edit';
+import { SnippetEditInit } from '~/snippets';
+import initSnippet from '~/snippet/snippet_bundle';
+
+import { triggerDOMEvent } from 'jest/helpers/dom_events_helper';
+
+jest.mock('~/snippet/snippet_bundle');
+jest.mock('~/snippets');
+
+describe('Snippet edit form initialization', () => {
+ const setFF = flag => {
+ gon.features = { snippetsEditVue: flag };
+ };
+ let features;
+
+ beforeEach(() => {
+ features = gon.features;
+ setFixtures('<div class="snippet-form"></div>');
+ });
+
+ afterEach(() => {
+ gon.features = features;
+ });
+
+ it.each`
+ name | flag | isVue
+ ${'Regular'} | ${false} | ${false}
+ ${'Vue'} | ${true} | ${true}
+ `('correctly initializes $name Snippet Edit form', ({ flag, isVue }) => {
+ initSnippet.mockClear();
+ SnippetEditInit.mockClear();
+
+ setFF(flag);
+
+ triggerDOMEvent('DOMContentLoaded');
+
+ if (isVue) {
+ expect(initSnippet).not.toHaveBeenCalled();
+ expect(SnippetEditInit).toHaveBeenCalled();
+ } else {
+ expect(initSnippet).toHaveBeenCalled();
+ expect(SnippetEditInit).not.toHaveBeenCalled();
+ }
+ });
+});
diff --git a/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap b/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap
index 3c3f9764f64..334ceaa064f 100644
--- a/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap
+++ b/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap
@@ -39,7 +39,6 @@ exports[`Snippet Description Edit component rendering matches the snapshot 1`] =
qa-description-textarea"
data-supports-quick-actions="false"
dir="auto"
- id="snippet-description"
placeholder="Write a comment or drag your files here…"
/>
</markdown-field-stub>
diff --git a/spec/frontend/snippets/components/edit_spec.js b/spec/frontend/snippets/components/edit_spec.js
new file mode 100644
index 00000000000..21a4ccf5a74
--- /dev/null
+++ b/spec/frontend/snippets/components/edit_spec.js
@@ -0,0 +1,279 @@
+import { shallowMount } from '@vue/test-utils';
+import axios from '~/lib/utils/axios_utils';
+
+import { GlLoadingIcon } from '@gitlab/ui';
+import { joinPaths, redirectTo } from '~/lib/utils/url_utility';
+
+import SnippetEditApp from '~/snippets/components/edit.vue';
+import SnippetDescriptionEdit from '~/snippets/components/snippet_description_edit.vue';
+import SnippetVisibilityEdit from '~/snippets/components/snippet_visibility_edit.vue';
+import SnippetBlobEdit from '~/snippets/components/snippet_blob_edit.vue';
+import TitleField from '~/vue_shared/components/form/title.vue';
+import FormFooterActions from '~/vue_shared/components/form/form_footer_actions.vue';
+
+import UpdateSnippetMutation from '~/snippets/mutations/updateSnippet.mutation.graphql';
+import CreateSnippetMutation from '~/snippets/mutations/createSnippet.mutation.graphql';
+
+import AxiosMockAdapter from 'axios-mock-adapter';
+import waitForPromises from 'helpers/wait_for_promises';
+import { ApolloMutation } from 'vue-apollo';
+
+jest.mock('~/lib/utils/url_utility', () => ({
+ getBaseURL: jest.fn().mockReturnValue('foo/'),
+ redirectTo: jest.fn().mockName('redirectTo'),
+ joinPaths: jest
+ .fn()
+ .mockName('joinPaths')
+ .mockReturnValue('contentApiURL'),
+}));
+
+let flashSpy;
+
+const contentMock = 'Foo Bar';
+const rawPathMock = '/foo/bar';
+const rawProjectPathMock = '/project/path';
+const newlyEditedSnippetUrl = 'http://foo.bar';
+const apiError = { message: 'Ufff' };
+
+const defaultProps = {
+ snippetGid: 'gid://gitlab/PersonalSnippet/42',
+ markdownPreviewPath: 'http://preview.foo.bar',
+ markdownDocsPath: 'http://docs.foo.bar',
+};
+
+describe('Snippet Edit app', () => {
+ let wrapper;
+ let axiosMock;
+
+ const resolveMutate = jest.fn().mockResolvedValue({
+ data: {
+ updateSnippet: {
+ errors: [],
+ snippet: {
+ webUrl: newlyEditedSnippetUrl,
+ },
+ },
+ },
+ });
+
+ const rejectMutation = jest.fn().mockRejectedValue(apiError);
+
+ const mutationTypes = {
+ RESOLVE: resolveMutate,
+ REJECT: rejectMutation,
+ };
+
+ function createComponent({
+ props = defaultProps,
+ data = {},
+ loading = false,
+ mutationRes = mutationTypes.RESOLVE,
+ } = {}) {
+ const $apollo = {
+ queries: {
+ snippet: {
+ loading,
+ },
+ },
+ mutate: mutationRes,
+ };
+
+ wrapper = shallowMount(SnippetEditApp, {
+ mocks: { $apollo },
+ stubs: {
+ FormFooterActions,
+ ApolloMutation,
+ },
+ propsData: {
+ ...props,
+ },
+ data() {
+ return data;
+ },
+ });
+
+ flashSpy = jest.spyOn(wrapper.vm, 'flashAPIFailure');
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findSubmitButton = () => wrapper.find('[type=submit]');
+
+ describe('rendering', () => {
+ it('renders loader while the query is in flight', () => {
+ createComponent({ loading: true });
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ });
+
+ it('renders all required components', () => {
+ createComponent();
+
+ expect(wrapper.contains(TitleField)).toBe(true);
+ expect(wrapper.contains(SnippetDescriptionEdit)).toBe(true);
+ expect(wrapper.contains(SnippetBlobEdit)).toBe(true);
+ expect(wrapper.contains(SnippetVisibilityEdit)).toBe(true);
+ expect(wrapper.contains(FormFooterActions)).toBe(true);
+ });
+
+ it('does not fail if there is no snippet yet (new snippet creation)', () => {
+ const snippetGid = '';
+ createComponent({
+ props: {
+ ...defaultProps,
+ snippetGid,
+ },
+ });
+
+ expect(wrapper.props('snippetGid')).toBe(snippetGid);
+ });
+
+ it.each`
+ title | content | expectation
+ ${''} | ${''} | ${true}
+ ${'foo'} | ${''} | ${true}
+ ${''} | ${'foo'} | ${true}
+ ${'foo'} | ${'bar'} | ${false}
+ `(
+ 'disables submit button unless both title and content are present',
+ ({ title, content, expectation }) => {
+ createComponent({
+ data: {
+ snippet: { title },
+ content,
+ },
+ });
+ const isBtnDisabled = Boolean(findSubmitButton().attributes('disabled'));
+ expect(isBtnDisabled).toBe(expectation);
+ },
+ );
+ });
+
+ describe('functionality', () => {
+ describe('handling of the data from GraphQL response', () => {
+ const snippet = {
+ blob: {
+ rawPath: rawPathMock,
+ },
+ };
+ const getResSchema = newSnippet => {
+ return {
+ data: {
+ snippets: {
+ edges: newSnippet ? [] : [snippet],
+ },
+ },
+ };
+ };
+
+ const bootstrapForExistingSnippet = resp => {
+ createComponent({
+ data: {
+ snippet,
+ },
+ });
+
+ if (resp === 500) {
+ axiosMock.onGet('contentApiURL').reply(500);
+ } else {
+ axiosMock.onGet('contentApiURL').reply(200, contentMock);
+ }
+ wrapper.vm.onSnippetFetch(getResSchema());
+ };
+
+ const bootstrapForNewSnippet = () => {
+ createComponent();
+ wrapper.vm.onSnippetFetch(getResSchema(true));
+ };
+
+ beforeEach(() => {
+ axiosMock = new AxiosMockAdapter(axios);
+ });
+
+ afterEach(() => {
+ axiosMock.restore();
+ });
+
+ it('fetches blob content with the additional query', () => {
+ bootstrapForExistingSnippet();
+
+ return waitForPromises().then(() => {
+ expect(joinPaths).toHaveBeenCalledWith('foo/', rawPathMock);
+ expect(wrapper.vm.newSnippet).toBe(false);
+ expect(wrapper.vm.content).toBe(contentMock);
+ });
+ });
+
+ it('flashes the error message if fetching content fails', () => {
+ bootstrapForExistingSnippet(500);
+
+ return waitForPromises().then(() => {
+ expect(flashSpy).toHaveBeenCalled();
+ expect(wrapper.vm.content).toBe('');
+ });
+ });
+
+ it('does not fetch content for new snippet', () => {
+ bootstrapForNewSnippet();
+
+ return waitForPromises().then(() => {
+ // we keep using waitForPromises to make sure we do not run failed test
+ expect(wrapper.vm.newSnippet).toBe(true);
+ expect(wrapper.vm.content).toBe('');
+ expect(joinPaths).not.toHaveBeenCalled();
+ expect(wrapper.vm.snippet).toEqual(wrapper.vm.$options.newSnippetSchema);
+ });
+ });
+ });
+
+ describe('form submission handling', () => {
+ it.each`
+ newSnippet | projectPath | mutation | mutationName
+ ${true} | ${rawProjectPathMock} | ${CreateSnippetMutation} | ${'CreateSnippetMutation with projectPath'}
+ ${true} | ${''} | ${CreateSnippetMutation} | ${'CreateSnippetMutation without projectPath'}
+ ${false} | ${rawProjectPathMock} | ${UpdateSnippetMutation} | ${'UpdateSnippetMutation with projectPath'}
+ ${false} | ${''} | ${UpdateSnippetMutation} | ${'UpdateSnippetMutation without projectPath'}
+ `('should submit $mutationName correctly', ({ newSnippet, projectPath, mutation }) => {
+ createComponent({
+ data: {
+ newSnippet,
+ },
+ props: {
+ ...defaultProps,
+ projectPath,
+ },
+ });
+
+ const mutationPayload = {
+ mutation,
+ variables: {
+ input: newSnippet ? expect.objectContaining({ projectPath }) : expect.any(Object),
+ },
+ };
+
+ wrapper.vm.handleFormSubmit();
+ expect(resolveMutate).toHaveBeenCalledWith(mutationPayload);
+ });
+
+ it('redirects to snippet view on successful mutation', () => {
+ createComponent();
+ wrapper.vm.handleFormSubmit();
+ return waitForPromises().then(() => {
+ expect(redirectTo).toHaveBeenCalledWith(newlyEditedSnippetUrl);
+ });
+ });
+
+ it('flashes an error if mutation failed', () => {
+ createComponent({
+ mutationRes: mutationTypes.REJECT,
+ });
+ wrapper.vm.handleFormSubmit();
+ return waitForPromises().then(() => {
+ expect(redirectTo).not.toHaveBeenCalled();
+ expect(flashSpy).toHaveBeenCalledWith(apiError);
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/snippets/components/snippet_header_spec.js b/spec/frontend/snippets/components/snippet_header_spec.js
index 1b67c08e5a4..16a66c70d6a 100644
--- a/spec/frontend/snippets/components/snippet_header_spec.js
+++ b/spec/frontend/snippets/components/snippet_header_spec.js
@@ -1,7 +1,7 @@
import SnippetHeader from '~/snippets/components/snippet_header.vue';
import DeleteSnippetMutation from '~/snippets/mutations/deleteSnippet.mutation.graphql';
import { ApolloMutation } from 'vue-apollo';
-import { GlNewButton, GlModal } from '@gitlab/ui';
+import { GlButton, GlModal } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
describe('Snippet header component', () => {
@@ -89,7 +89,7 @@ describe('Snippet header component', () => {
updateSnippet: false,
},
});
- expect(wrapper.findAll(GlNewButton).length).toEqual(0);
+ expect(wrapper.findAll(GlButton).length).toEqual(0);
createComponent({
permissions: {
@@ -97,7 +97,7 @@ describe('Snippet header component', () => {
updateSnippet: false,
},
});
- expect(wrapper.findAll(GlNewButton).length).toEqual(1);
+ expect(wrapper.findAll(GlButton).length).toEqual(1);
createComponent({
permissions: {
@@ -105,7 +105,7 @@ describe('Snippet header component', () => {
updateSnippet: true,
},
});
- expect(wrapper.findAll(GlNewButton).length).toEqual(2);
+ expect(wrapper.findAll(GlButton).length).toEqual(2);
createComponent({
permissions: {
@@ -117,7 +117,7 @@ describe('Snippet header component', () => {
canCreateSnippet: true,
});
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.findAll(GlNewButton).length).toEqual(3);
+ expect(wrapper.findAll(GlButton).length).toEqual(3);
});
});
diff --git a/spec/frontend/static_site_editor/components/invalid_content_message_spec.js b/spec/frontend/static_site_editor/components/invalid_content_message_spec.js
new file mode 100644
index 00000000000..7e699e9451c
--- /dev/null
+++ b/spec/frontend/static_site_editor/components/invalid_content_message_spec.js
@@ -0,0 +1,23 @@
+import { shallowMount } from '@vue/test-utils';
+
+import InvalidContentMessage from '~/static_site_editor/components/invalid_content_message.vue';
+
+describe('~/static_site_editor/components/invalid_content_message.vue', () => {
+ let wrapper;
+ const findDocumentationButton = () => wrapper.find({ ref: 'documentationButton' });
+ const documentationUrl =
+ 'https://gitlab.com/gitlab-org/project-templates/static-site-editor-middleman';
+
+ beforeEach(() => {
+ wrapper = shallowMount(InvalidContentMessage);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders the configuration button link', () => {
+ expect(findDocumentationButton().exists()).toBe(true);
+ expect(findDocumentationButton().attributes('href')).toBe(documentationUrl);
+ });
+});
diff --git a/spec/frontend/static_site_editor/components/publish_toolbar_spec.js b/spec/frontend/static_site_editor/components/publish_toolbar_spec.js
index f00fc38430f..82eb12d4c4d 100644
--- a/spec/frontend/static_site_editor/components/publish_toolbar_spec.js
+++ b/spec/frontend/static_site_editor/components/publish_toolbar_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { GlNewButton, GlLoadingIcon } from '@gitlab/ui';
+import { GlButton, GlLoadingIcon } from '@gitlab/ui';
import PublishToolbar from '~/static_site_editor/components/publish_toolbar.vue';
@@ -18,7 +18,7 @@ describe('Static Site Editor Toolbar', () => {
};
const findReturnUrlLink = () => wrapper.find({ ref: 'returnUrlLink' });
- const findSaveChangesButton = () => wrapper.find(GlNewButton);
+ const findSaveChangesButton = () => wrapper.find(GlButton);
const findLoadingIndicator = () => wrapper.find(GlLoadingIcon);
beforeEach(() => {
diff --git a/spec/frontend/static_site_editor/components/saved_changes_message_spec.js b/spec/frontend/static_site_editor/components/saved_changes_message_spec.js
index 76ac7de5c32..659e9be59d2 100644
--- a/spec/frontend/static_site_editor/components/saved_changes_message_spec.js
+++ b/spec/frontend/static_site_editor/components/saved_changes_message_spec.js
@@ -1,22 +1,17 @@
import { shallowMount } from '@vue/test-utils';
+
import SavedChangesMessage from '~/static_site_editor/components/saved_changes_message.vue';
+import { returnUrl, savedContentMeta } from '../mock_data';
+
describe('~/static_site_editor/components/saved_changes_message.vue', () => {
let wrapper;
+ const { branch, commit, mergeRequest } = savedContentMeta;
const props = {
- branch: {
- label: '123-the-branch',
- url: 'https://gitlab.com/gitlab-org/gitlab/-/tree/123-the-branch',
- },
- commit: {
- label: 'a123',
- url: 'https://gitlab.com/gitlab-org/gitlab/-/commit/a123',
- },
- mergeRequest: {
- label: '123',
- url: 'https://gitlab.com/gitlab-org/gitlab/-/merge_requests/123',
- },
- returnUrl: 'https://www.the-static-site.com/post',
+ branch,
+ commit,
+ mergeRequest,
+ returnUrl,
};
const findReturnToSiteButton = () => wrapper.find({ ref: 'returnToSiteButton' });
const findMergeRequestButton = () => wrapper.find({ ref: 'mergeRequestButton' });
@@ -51,11 +46,14 @@ describe('~/static_site_editor/components/saved_changes_message.vue', () => {
${'branch'} | ${findBranchLink} | ${props.branch}
${'commit'} | ${findCommitLink} | ${props.commit}
${'merge request'} | ${findMergeRequestLink} | ${props.mergeRequest}
- `('renders $desc link', ({ findEl, prop }) => {
+ `('renders $desc link', ({ desc, findEl, prop }) => {
const el = findEl();
expect(el.exists()).toBe(true);
- expect(el.attributes('href')).toBe(prop.url);
expect(el.text()).toBe(prop.label);
+
+ if (desc !== 'branch') {
+ expect(el.attributes('href')).toBe(prop.url);
+ }
});
});
diff --git a/spec/frontend/static_site_editor/components/static_site_editor_spec.js b/spec/frontend/static_site_editor/components/static_site_editor_spec.js
index d427df9bd4b..5d4e3758557 100644
--- a/spec/frontend/static_site_editor/components/static_site_editor_spec.js
+++ b/spec/frontend/static_site_editor/components/static_site_editor_spec.js
@@ -1,6 +1,5 @@
import Vuex from 'vuex';
import { shallowMount, createLocalVue } from '@vue/test-utils';
-
import { GlSkeletonLoader } from '@gitlab/ui';
import createState from '~/static_site_editor/store/state';
@@ -8,9 +7,18 @@ import createState from '~/static_site_editor/store/state';
import StaticSiteEditor from '~/static_site_editor/components/static_site_editor.vue';
import EditArea from '~/static_site_editor/components/edit_area.vue';
import EditHeader from '~/static_site_editor/components/edit_header.vue';
+import InvalidContentMessage from '~/static_site_editor/components/invalid_content_message.vue';
import PublishToolbar from '~/static_site_editor/components/publish_toolbar.vue';
+import SubmitChangesError from '~/static_site_editor/components/submit_changes_error.vue';
+import SavedChangesMessage from '~/static_site_editor/components/saved_changes_message.vue';
-import { sourceContent, sourceContentTitle } from '../mock_data';
+import {
+ returnUrl,
+ sourceContent,
+ sourceContentTitle,
+ savedContentMeta,
+ submitChangesError,
+} from '../mock_data';
const localVue = createLocalVue();
@@ -22,14 +30,19 @@ describe('StaticSiteEditor', () => {
let loadContentActionMock;
let setContentActionMock;
let submitChangesActionMock;
+ let dismissSubmitChangesErrorActionMock;
const buildStore = ({ initialState, getters } = {}) => {
loadContentActionMock = jest.fn();
setContentActionMock = jest.fn();
submitChangesActionMock = jest.fn();
+ dismissSubmitChangesErrorActionMock = jest.fn();
store = new Vuex.Store({
- state: createState(initialState),
+ state: createState({
+ isSupportedContent: true,
+ ...initialState,
+ }),
getters: {
contentChanged: () => false,
...getters,
@@ -38,6 +51,7 @@ describe('StaticSiteEditor', () => {
loadContent: loadContentActionMock,
setContent: setContentActionMock,
submitChanges: submitChangesActionMock,
+ dismissSubmitChangesError: dismissSubmitChangesErrorActionMock,
},
});
};
@@ -62,8 +76,11 @@ describe('StaticSiteEditor', () => {
const findEditArea = () => wrapper.find(EditArea);
const findEditHeader = () => wrapper.find(EditHeader);
+ const findInvalidContentMessage = () => wrapper.find(InvalidContentMessage);
const findPublishToolbar = () => wrapper.find(PublishToolbar);
const findSkeletonLoader = () => wrapper.find(GlSkeletonLoader);
+ const findSubmitChangesError = () => wrapper.find(SubmitChangesError);
+ const findSavedChangesMessage = () => wrapper.find(SavedChangesMessage);
beforeEach(() => {
buildStore();
@@ -74,6 +91,17 @@ describe('StaticSiteEditor', () => {
wrapper.destroy();
});
+ it('renders the saved changes message when changes are submitted successfully', () => {
+ buildStore({ initialState: { returnUrl, savedContentMeta } });
+ buildWrapper();
+
+ expect(findSavedChangesMessage().exists()).toBe(true);
+ expect(findSavedChangesMessage().props()).toEqual({
+ returnUrl,
+ ...savedContentMeta,
+ });
+ });
+
describe('when content is not loaded', () => {
it('does not render edit area', () => {
expect(findEditArea().exists()).toBe(false);
@@ -86,6 +114,10 @@ describe('StaticSiteEditor', () => {
it('does not render toolbar', () => {
expect(findPublishToolbar().exists()).toBe(false);
});
+
+ it('does not render saved changes message', () => {
+ expect(findSavedChangesMessage().exists()).toBe(false);
+ });
});
describe('when content is loaded', () => {
@@ -140,6 +172,13 @@ describe('StaticSiteEditor', () => {
expect(findSkeletonLoader().exists()).toBe(true);
});
+ it('does not display submit changes error when an error does not exist', () => {
+ buildContentLoadedStore();
+ buildWrapper();
+
+ expect(findSubmitChangesError().exists()).toBe(false);
+ });
+
it('sets toolbar as saving when saving changes', () => {
buildContentLoadedStore({
initialState: {
@@ -151,6 +190,40 @@ describe('StaticSiteEditor', () => {
expect(findPublishToolbar().props('savingChanges')).toBe(true);
});
+ it('displays invalid content message when content is not supported', () => {
+ buildStore({ initialState: { isSupportedContent: false } });
+ buildWrapper();
+
+ expect(findInvalidContentMessage().exists()).toBe(true);
+ });
+
+ describe('when submitting changes fail', () => {
+ beforeEach(() => {
+ buildContentLoadedStore({
+ initialState: {
+ submitChangesError,
+ },
+ });
+ buildWrapper();
+ });
+
+ it('displays submit changes error message', () => {
+ expect(findSubmitChangesError().exists()).toBe(true);
+ });
+
+ it('dispatches submitChanges action when error message emits retry event', () => {
+ findSubmitChangesError().vm.$emit('retry');
+
+ expect(submitChangesActionMock).toHaveBeenCalled();
+ });
+
+ it('dispatches dismissSubmitChangesError action when error message emits dismiss event', () => {
+ findSubmitChangesError().vm.$emit('dismiss');
+
+ expect(dismissSubmitChangesErrorActionMock).toHaveBeenCalled();
+ });
+ });
+
it('dispatches load content action', () => {
expect(loadContentActionMock).toHaveBeenCalled();
});
diff --git a/spec/frontend/static_site_editor/components/submit_changes_error_spec.js b/spec/frontend/static_site_editor/components/submit_changes_error_spec.js
new file mode 100644
index 00000000000..7af3014b338
--- /dev/null
+++ b/spec/frontend/static_site_editor/components/submit_changes_error_spec.js
@@ -0,0 +1,48 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlButton, GlAlert } from '@gitlab/ui';
+
+import SubmitChangesError from '~/static_site_editor/components/submit_changes_error.vue';
+
+import { submitChangesError as error } from '../mock_data';
+
+describe('Submit Changes Error', () => {
+ let wrapper;
+
+ const buildWrapper = (propsData = {}) => {
+ wrapper = shallowMount(SubmitChangesError, {
+ propsData: {
+ ...propsData,
+ },
+ stubs: {
+ GlAlert,
+ },
+ });
+ };
+
+ const findRetryButton = () => wrapper.find(GlButton);
+ const findAlert = () => wrapper.find(GlAlert);
+
+ beforeEach(() => {
+ buildWrapper({ error });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders error message', () => {
+ expect(findAlert().text()).toContain(error);
+ });
+
+ it('emits dismiss event when alert emits dismiss event', () => {
+ findAlert().vm.$emit('dismiss');
+
+ expect(wrapper.emitted('dismiss')).toHaveLength(1);
+ });
+
+ it('emits retry event when retry button is clicked', () => {
+ findRetryButton().vm.$emit('click');
+
+ expect(wrapper.emitted('retry')).toHaveLength(1);
+ });
+});
diff --git a/spec/frontend/static_site_editor/mock_data.js b/spec/frontend/static_site_editor/mock_data.js
index 345ae0ce6f6..962047e6dd2 100644
--- a/spec/frontend/static_site_editor/mock_data.js
+++ b/spec/frontend/static_site_editor/mock_data.js
@@ -21,10 +21,10 @@ export const sourcePath = 'foobar.md.html';
export const savedContentMeta = {
branch: {
label: 'foobar',
- url: 'foobar/-/tree/foorbar',
+ url: 'foobar/-/tree/foobar',
},
commit: {
- label: 'c1461b08 ',
+ label: 'c1461b08',
url: 'foobar/-/c1461b08',
},
mergeRequest: {
diff --git a/spec/frontend/static_site_editor/store/actions_spec.js b/spec/frontend/static_site_editor/store/actions_spec.js
index a9c039517b7..6b0b77f59b7 100644
--- a/spec/frontend/static_site_editor/store/actions_spec.js
+++ b/spec/frontend/static_site_editor/store/actions_spec.js
@@ -124,24 +124,29 @@ describe('Static Site Editor Store actions', () => {
});
describe('on error', () => {
+ const error = new Error(submitChangesError);
const expectedMutations = [
{ type: mutationTypes.SUBMIT_CHANGES },
- { type: mutationTypes.SUBMIT_CHANGES_ERROR },
+ { type: mutationTypes.SUBMIT_CHANGES_ERROR, payload: error.message },
];
beforeEach(() => {
- submitContentChanges.mockRejectedValueOnce(new Error(submitChangesError));
+ submitContentChanges.mockRejectedValueOnce(error);
});
it('dispatches receiveContentError', () => {
testAction(actions.submitChanges, null, state, expectedMutations);
});
+ });
+ });
- it('displays flash communicating error', () => {
- return testAction(actions.submitChanges, null, state, expectedMutations).then(() => {
- expect(createFlash).toHaveBeenCalledWith(submitChangesError);
- });
- });
+ describe('dismissSubmitChangesError', () => {
+ it('commits dismissSubmitChangesError', () => {
+ testAction(actions.dismissSubmitChangesError, null, state, [
+ {
+ type: mutationTypes.DISMISS_SUBMIT_CHANGES_ERROR,
+ },
+ ]);
});
});
});
diff --git a/spec/frontend/static_site_editor/store/mutations_spec.js b/spec/frontend/static_site_editor/store/mutations_spec.js
index 0b213c11a04..2441f317d90 100644
--- a/spec/frontend/static_site_editor/store/mutations_spec.js
+++ b/spec/frontend/static_site_editor/store/mutations_spec.js
@@ -5,6 +5,7 @@ import {
sourceContentTitle as title,
sourceContent as content,
savedContentMeta,
+ submitChangesError,
} from '../mock_data';
describe('Static Site Editor Store mutations', () => {
@@ -16,19 +17,21 @@ describe('Static Site Editor Store mutations', () => {
});
it.each`
- mutation | stateProperty | payload | expectedValue
- ${types.LOAD_CONTENT} | ${'isLoadingContent'} | ${undefined} | ${true}
- ${types.RECEIVE_CONTENT_SUCCESS} | ${'isLoadingContent'} | ${contentLoadedPayload} | ${false}
- ${types.RECEIVE_CONTENT_SUCCESS} | ${'isContentLoaded'} | ${contentLoadedPayload} | ${true}
- ${types.RECEIVE_CONTENT_SUCCESS} | ${'title'} | ${contentLoadedPayload} | ${title}
- ${types.RECEIVE_CONTENT_SUCCESS} | ${'content'} | ${contentLoadedPayload} | ${content}
- ${types.RECEIVE_CONTENT_SUCCESS} | ${'originalContent'} | ${contentLoadedPayload} | ${content}
- ${types.RECEIVE_CONTENT_ERROR} | ${'isLoadingContent'} | ${undefined} | ${false}
- ${types.SET_CONTENT} | ${'content'} | ${content} | ${content}
- ${types.SUBMIT_CHANGES} | ${'isSavingChanges'} | ${undefined} | ${true}
- ${types.SUBMIT_CHANGES_SUCCESS} | ${'savedContentMeta'} | ${savedContentMeta} | ${savedContentMeta}
- ${types.SUBMIT_CHANGES_SUCCESS} | ${'isSavingChanges'} | ${savedContentMeta} | ${false}
- ${types.SUBMIT_CHANGES_ERROR} | ${'isSavingChanges'} | ${undefined} | ${false}
+ mutation | stateProperty | payload | expectedValue
+ ${types.LOAD_CONTENT} | ${'isLoadingContent'} | ${undefined} | ${true}
+ ${types.RECEIVE_CONTENT_SUCCESS} | ${'isLoadingContent'} | ${contentLoadedPayload} | ${false}
+ ${types.RECEIVE_CONTENT_SUCCESS} | ${'isContentLoaded'} | ${contentLoadedPayload} | ${true}
+ ${types.RECEIVE_CONTENT_SUCCESS} | ${'title'} | ${contentLoadedPayload} | ${title}
+ ${types.RECEIVE_CONTENT_SUCCESS} | ${'content'} | ${contentLoadedPayload} | ${content}
+ ${types.RECEIVE_CONTENT_SUCCESS} | ${'originalContent'} | ${contentLoadedPayload} | ${content}
+ ${types.RECEIVE_CONTENT_ERROR} | ${'isLoadingContent'} | ${undefined} | ${false}
+ ${types.SET_CONTENT} | ${'content'} | ${content} | ${content}
+ ${types.SUBMIT_CHANGES} | ${'isSavingChanges'} | ${undefined} | ${true}
+ ${types.SUBMIT_CHANGES_SUCCESS} | ${'savedContentMeta'} | ${savedContentMeta} | ${savedContentMeta}
+ ${types.SUBMIT_CHANGES_SUCCESS} | ${'isSavingChanges'} | ${savedContentMeta} | ${false}
+ ${types.SUBMIT_CHANGES_ERROR} | ${'isSavingChanges'} | ${undefined} | ${false}
+ ${types.SUBMIT_CHANGES_ERROR} | ${'submitChangesError'} | ${submitChangesError} | ${submitChangesError}
+ ${types.DISMISS_SUBMIT_CHANGES_ERROR} | ${'submitChangesError'} | ${undefined} | ${''}
`(
'$mutation sets $stateProperty to $expectedValue',
({ mutation, stateProperty, payload, expectedValue }) => {
diff --git a/spec/frontend/vue_shared/components/__snapshots__/awards_list_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/awards_list_spec.js.snap
new file mode 100644
index 00000000000..df4b30f1cb8
--- /dev/null
+++ b/spec/frontend/vue_shared/components/__snapshots__/awards_list_spec.js.snap
@@ -0,0 +1,287 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`vue_shared/components/awards_list default matches snapshot 1`] = `
+<div
+ class="awards js-awards-block"
+>
+ <button
+ class="btn award-control"
+ data-boundary="viewport"
+ data-original-title="Ada, Leonardo, and Marie"
+ data-testid="award-button"
+ title=""
+ type="button"
+ >
+ <span
+ data-testid="award-html"
+ >
+
+
+ <gl-emoji
+ data-fallback-src="/assets/emoji/thumbsup-59ec2457ab33e8897261d01a495f6cf5c668d0004807dc541c3b1be5294b1e61.png"
+ data-name="thumbsup"
+ data-unicode-version="6.0"
+ title="thumbs up sign"
+ >
+
+ 👍
+
+ </gl-emoji>
+
+
+ </span>
+
+ <span
+ class="award-control-text js-counter"
+ >
+ 3
+ </span>
+ </button>
+ <button
+ class="btn award-control active"
+ data-boundary="viewport"
+ data-original-title="You, Ada, and Marie"
+ data-testid="award-button"
+ title=""
+ type="button"
+ >
+ <span
+ data-testid="award-html"
+ >
+
+
+ <gl-emoji
+ data-fallback-src="/assets/emoji/thumbsdown-5954334e2dae5357312b3d629f10a496c728029e02216f8c8b887f9b51561c61.png"
+ data-name="thumbsdown"
+ data-unicode-version="6.0"
+ title="thumbs down sign"
+ >
+
+ 👎
+
+ </gl-emoji>
+
+
+ </span>
+
+ <span
+ class="award-control-text js-counter"
+ >
+ 3
+ </span>
+ </button>
+ <button
+ class="btn award-control"
+ data-boundary="viewport"
+ data-original-title="Ada and Jane"
+ data-testid="award-button"
+ title=""
+ type="button"
+ >
+ <span
+ data-testid="award-html"
+ >
+
+
+ <gl-emoji
+ data-fallback-src="/assets/emoji/smile-14905c372d5bf7719bd727c9efae31a03291acec79801652a23710c6848c5d14.png"
+ data-name="smile"
+ data-unicode-version="6.0"
+ title="smiling face with open mouth and smiling eyes"
+ >
+
+ 😄
+
+ </gl-emoji>
+
+
+ </span>
+
+ <span
+ class="award-control-text js-counter"
+ >
+ 2
+ </span>
+ </button>
+ <button
+ class="btn award-control active"
+ data-boundary="viewport"
+ data-original-title="You, Ada, Jane, and Leonardo"
+ data-testid="award-button"
+ title=""
+ type="button"
+ >
+ <span
+ data-testid="award-html"
+ >
+
+
+ <gl-emoji
+ data-fallback-src="/assets/emoji/ok_hand-d63002dce3cc3655b67b8765b7c28d370edba0e3758b2329b60e0e61c4d8e78d.png"
+ data-name="ok_hand"
+ data-unicode-version="6.0"
+ title="ok hand sign"
+ >
+
+ 👌
+
+ </gl-emoji>
+
+
+ </span>
+
+ <span
+ class="award-control-text js-counter"
+ >
+ 4
+ </span>
+ </button>
+ <button
+ class="btn award-control active"
+ data-boundary="viewport"
+ data-original-title="You"
+ data-testid="award-button"
+ title=""
+ type="button"
+ >
+ <span
+ data-testid="award-html"
+ >
+
+
+ <gl-emoji
+ data-fallback-src="/assets/emoji/cactus-2c5c4c35f26c7046fdc002b337e0d939729b33a26980e675950f9934c91e40fd.png"
+ data-name="cactus"
+ data-unicode-version="6.0"
+ title="cactus"
+ >
+
+ 🌵
+
+ </gl-emoji>
+
+
+ </span>
+
+ <span
+ class="award-control-text js-counter"
+ >
+ 1
+ </span>
+ </button>
+ <button
+ class="btn award-control"
+ data-boundary="viewport"
+ data-original-title="Marie"
+ data-testid="award-button"
+ title=""
+ type="button"
+ >
+ <span
+ data-testid="award-html"
+ >
+
+
+ <gl-emoji
+ data-fallback-src="/assets/emoji/a-bddbb39e8a1d35d42b7c08e7d47f63988cb4d8614b79f74e70b9c67c221896cc.png"
+ data-name="a"
+ data-unicode-version="6.0"
+ title="negative squared latin capital letter a"
+ >
+
+ 🅰
+
+ </gl-emoji>
+
+
+ </span>
+
+ <span
+ class="award-control-text js-counter"
+ >
+ 1
+ </span>
+ </button>
+ <button
+ class="btn award-control active"
+ data-boundary="viewport"
+ data-original-title="You"
+ data-testid="award-button"
+ title=""
+ type="button"
+ >
+ <span
+ data-testid="award-html"
+ >
+
+
+ <gl-emoji
+ data-fallback-src="/assets/emoji/b-722f9db9442e7c0fc0d0ac0f5291fbf47c6a0ac4d8abd42e97957da705fb82bf.png"
+ data-name="b"
+ data-unicode-version="6.0"
+ title="negative squared latin capital letter b"
+ >
+
+ 🅱
+
+ </gl-emoji>
+
+
+ </span>
+
+ <span
+ class="award-control-text js-counter"
+ >
+ 1
+ </span>
+ </button>
+
+ <div
+ class="award-menu-holder"
+ >
+ <button
+ aria-label="Add reaction"
+ class="award-control btn js-add-award js-test-add-button-class"
+ data-boundary="viewport"
+ data-original-title="Add reaction"
+ title=""
+ type="button"
+ >
+ <span
+ class="award-control-icon award-control-icon-neutral"
+ >
+ <gl-icon-stub
+ aria-hidden="true"
+ name="slight-smile"
+ size="16"
+ />
+ </span>
+
+ <span
+ class="award-control-icon award-control-icon-positive"
+ >
+ <gl-icon-stub
+ aria-hidden="true"
+ name="smiley"
+ size="16"
+ />
+ </span>
+
+ <span
+ class="award-control-icon award-control-icon-super-positive"
+ >
+ <gl-icon-stub
+ aria-hidden="true"
+ name="smiley"
+ size="16"
+ />
+ </span>
+
+ <i
+ aria-hidden="true"
+ class="fa fa-spinner fa-spin award-control-icon award-control-icon-loading"
+ />
+ </button>
+ </div>
+</div>
+`;
diff --git a/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap
index d837c793784..4cd03a690e9 100644
--- a/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap
+++ b/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap
@@ -42,7 +42,7 @@ exports[`Clone Dropdown Button rendering matches the snapshot 1`] = `
<b-input-group-append-stub
tag="div"
>
- <gl-new-button-stub
+ <gl-button-stub
category="tertiary"
data-clipboard-text="ssh://foo.bar"
icon=""
@@ -55,7 +55,7 @@ exports[`Clone Dropdown Button rendering matches the snapshot 1`] = `
size="16"
title="Copy URL"
/>
- </gl-new-button-stub>
+ </gl-button-stub>
</b-input-group-append-stub>
</b-input-group-stub>
</div>
@@ -92,7 +92,7 @@ exports[`Clone Dropdown Button rendering matches the snapshot 1`] = `
<b-input-group-append-stub
tag="div"
>
- <gl-new-button-stub
+ <gl-button-stub
category="tertiary"
data-clipboard-text="http://foo.bar"
icon=""
@@ -105,7 +105,7 @@ exports[`Clone Dropdown Button rendering matches the snapshot 1`] = `
size="16"
title="Copy URL"
/>
- </gl-new-button-stub>
+ </gl-button-stub>
</b-input-group-append-stub>
</b-input-group-stub>
</div>
diff --git a/spec/frontend/vue_shared/components/awards_list_spec.js b/spec/frontend/vue_shared/components/awards_list_spec.js
new file mode 100644
index 00000000000..bb3e60ab9e2
--- /dev/null
+++ b/spec/frontend/vue_shared/components/awards_list_spec.js
@@ -0,0 +1,213 @@
+import { shallowMount } from '@vue/test-utils';
+import AwardsList from '~/vue_shared/components/awards_list.vue';
+
+const createUser = (id, name) => ({ id, name });
+const createAward = (name, user) => ({ name, user });
+
+const USERS = {
+ root: createUser(1, 'Root'),
+ ada: createUser(2, 'Ada'),
+ marie: createUser(3, 'Marie'),
+ jane: createUser(4, 'Jane'),
+ leonardo: createUser(5, 'Leonardo'),
+};
+
+const EMOJI_SMILE = 'smile';
+const EMOJI_OK = 'ok_hand';
+const EMOJI_THUMBSUP = 'thumbsup';
+const EMOJI_THUMBSDOWN = 'thumbsdown';
+const EMOJI_A = 'a';
+const EMOJI_B = 'b';
+const EMOJI_CACTUS = 'cactus';
+const EMOJI_100 = '100';
+
+const TEST_AWARDS = [
+ createAward(EMOJI_SMILE, USERS.ada),
+ createAward(EMOJI_OK, USERS.ada),
+ createAward(EMOJI_THUMBSUP, USERS.ada),
+ createAward(EMOJI_THUMBSDOWN, USERS.ada),
+ createAward(EMOJI_SMILE, USERS.jane),
+ createAward(EMOJI_OK, USERS.jane),
+ createAward(EMOJI_OK, USERS.leonardo),
+ createAward(EMOJI_THUMBSUP, USERS.leonardo),
+ createAward(EMOJI_THUMBSUP, USERS.marie),
+ createAward(EMOJI_THUMBSDOWN, USERS.marie),
+ createAward(EMOJI_THUMBSDOWN, USERS.root),
+ createAward(EMOJI_OK, USERS.root),
+ // Test that emoji list preserves order of occurrence, not alphabetical order
+ createAward(EMOJI_CACTUS, USERS.root),
+ createAward(EMOJI_A, USERS.marie),
+ createAward(EMOJI_B, USERS.root),
+];
+const TEST_ADD_BUTTON_CLASS = 'js-test-add-button-class';
+
+describe('vue_shared/components/awards_list', () => {
+ let wrapper;
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const createComponent = (props = {}) => {
+ if (wrapper) {
+ throw new Error('There should only be one wrapper created per test');
+ }
+
+ wrapper = shallowMount(AwardsList, { propsData: props });
+ };
+ const matchingEmojiTag = name => expect.stringMatching(`gl-emoji data-name="${name}"`);
+ const findAwardButtons = () => wrapper.findAll('[data-testid="award-button"');
+ const findAwardsData = () =>
+ findAwardButtons().wrappers.map(x => {
+ return {
+ classes: x.classes(),
+ title: x.attributes('data-original-title'),
+ html: x.find('[data-testid="award-html"]').element.innerHTML,
+ count: Number(x.find('.js-counter').text()),
+ };
+ });
+ const findAddAwardButton = () => wrapper.find('.js-add-award');
+
+ describe('default', () => {
+ beforeEach(() => {
+ createComponent({
+ awards: TEST_AWARDS,
+ canAwardEmoji: true,
+ currentUserId: USERS.root.id,
+ addButtonClass: TEST_ADD_BUTTON_CLASS,
+ });
+ });
+
+ it('matches snapshot', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ it('shows awards in correct order', () => {
+ expect(findAwardsData()).toEqual([
+ {
+ classes: ['btn', 'award-control'],
+ count: 3,
+ html: matchingEmojiTag(EMOJI_THUMBSUP),
+ title: 'Ada, Leonardo, and Marie',
+ },
+ {
+ classes: ['btn', 'award-control', 'active'],
+ count: 3,
+ html: matchingEmojiTag(EMOJI_THUMBSDOWN),
+ title: 'You, Ada, and Marie',
+ },
+ {
+ classes: ['btn', 'award-control'],
+ count: 2,
+ html: matchingEmojiTag(EMOJI_SMILE),
+ title: 'Ada and Jane',
+ },
+ {
+ classes: ['btn', 'award-control', 'active'],
+ count: 4,
+ html: matchingEmojiTag(EMOJI_OK),
+ title: 'You, Ada, Jane, and Leonardo',
+ },
+ {
+ classes: ['btn', 'award-control', 'active'],
+ count: 1,
+ html: matchingEmojiTag(EMOJI_CACTUS),
+ title: 'You',
+ },
+ {
+ classes: ['btn', 'award-control'],
+ count: 1,
+ html: matchingEmojiTag(EMOJI_A),
+ title: 'Marie',
+ },
+ {
+ classes: ['btn', 'award-control', 'active'],
+ count: 1,
+ html: matchingEmojiTag(EMOJI_B),
+ title: 'You',
+ },
+ ]);
+ });
+
+ it('with award clicked, it emits award', () => {
+ expect(wrapper.emitted().award).toBeUndefined();
+
+ findAwardButtons()
+ .at(2)
+ .trigger('click');
+
+ expect(wrapper.emitted().award).toEqual([[EMOJI_SMILE]]);
+ });
+
+ it('shows add award button', () => {
+ const btn = findAddAwardButton();
+
+ expect(btn.exists()).toBe(true);
+ expect(btn.classes(TEST_ADD_BUTTON_CLASS)).toBe(true);
+ });
+ });
+
+ describe('with numeric award', () => {
+ beforeEach(() => {
+ createComponent({
+ awards: [createAward(EMOJI_100, USERS.ada)],
+ canAwardEmoji: true,
+ currentUserId: USERS.root.id,
+ });
+ });
+
+ it('when clicked, it emits award as number', () => {
+ expect(wrapper.emitted().award).toBeUndefined();
+
+ findAwardButtons()
+ .at(0)
+ .trigger('click');
+
+ expect(wrapper.emitted().award).toEqual([[Number(EMOJI_100)]]);
+ });
+ });
+
+ describe('with no awards', () => {
+ beforeEach(() => {
+ createComponent({
+ awards: [],
+ canAwardEmoji: true,
+ });
+ });
+
+ it('has no award buttons', () => {
+ expect(findAwardButtons().length).toBe(0);
+ });
+ });
+
+ describe('when cannot award emoji', () => {
+ beforeEach(() => {
+ createComponent({
+ awards: [createAward(EMOJI_CACTUS, USERS.root.id)],
+ canAwardEmoji: false,
+ currentUserId: USERS.marie.id,
+ });
+ });
+
+ it('does not have add button', () => {
+ expect(findAddAwardButton().exists()).toBe(false);
+ });
+ });
+
+ describe('with no user', () => {
+ beforeEach(() => {
+ createComponent({
+ awards: TEST_AWARDS,
+ canAwardEmoji: false,
+ });
+ });
+
+ it('disables award buttons', () => {
+ const buttons = findAwardButtons();
+
+ expect(buttons.length).toBe(7);
+ expect(buttons.wrappers.every(x => x.classes('disabled'))).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/form/__snapshots__/title_spec.js.snap b/spec/frontend/vue_shared/components/form/__snapshots__/title_spec.js.snap
index 980e9b517db..e5035614196 100644
--- a/spec/frontend/vue_shared/components/form/__snapshots__/title_spec.js.snap
+++ b/spec/frontend/vue_shared/components/form/__snapshots__/title_spec.js.snap
@@ -5,8 +5,6 @@ exports[`Title edit field matches the snapshot 1`] = `
label="Title"
label-for="title-field-edit"
>
- <gl-form-input-stub
- id="title-field-edit"
- />
+ <gl-form-input-stub />
</gl-form-group-stub>
`;
diff --git a/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js b/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
index a2e2d2447d5..2c7fce714f0 100644
--- a/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
+++ b/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
@@ -10,8 +10,7 @@ const DEFAULT_PROPS = {
name: 'Administrator',
location: 'Vienna',
bio: null,
- organization: null,
- jobTitle: null,
+ workInformation: null,
status: null,
},
};
@@ -59,8 +58,7 @@ describe('User Popover Component', () => {
username: null,
location: null,
bio: null,
- organization: null,
- jobTitle: null,
+ workInformation: null,
status: null,
},
},
@@ -93,7 +91,7 @@ describe('User Popover Component', () => {
const findWorkInformation = () => wrapper.find({ ref: 'workInformation' });
const findBio = () => wrapper.find({ ref: 'bio' });
- it('should show only bio if organization and job title are not available', () => {
+ it('should show only bio if work information is not available', () => {
const user = { ...DEFAULT_PROPS.user, bio: 'My super interesting bio' };
createWrapper({ user });
@@ -102,27 +100,10 @@ describe('User Popover Component', () => {
expect(findWorkInformation().exists()).toBe(false);
});
- it('should show only organization if job title is not available', () => {
- const user = { ...DEFAULT_PROPS.user, organization: 'GitLab' };
-
- createWrapper({ user });
-
- expect(findWorkInformation().text()).toBe('GitLab');
- });
-
- it('should show only job title if organization is not available', () => {
- const user = { ...DEFAULT_PROPS.user, jobTitle: 'Frontend Engineer' };
-
- createWrapper({ user });
-
- expect(findWorkInformation().text()).toBe('Frontend Engineer');
- });
-
- it('should show organization and job title if they are both available', () => {
+ it('should show work information when it is available', () => {
const user = {
...DEFAULT_PROPS.user,
- organization: 'GitLab',
- jobTitle: 'Frontend Engineer',
+ workInformation: 'Frontend Engineer at GitLab',
};
createWrapper({ user });
@@ -130,17 +111,17 @@ describe('User Popover Component', () => {
expect(findWorkInformation().text()).toBe('Frontend Engineer at GitLab');
});
- it('should display bio and job info in separate lines', () => {
+ it('should display bio and work information in separate lines', () => {
const user = {
...DEFAULT_PROPS.user,
bio: 'My super interesting bio',
- organization: 'GitLab',
+ workInformation: 'Frontend Engineer at GitLab',
};
createWrapper({ user });
expect(findBio().text()).toBe('My super interesting bio');
- expect(findWorkInformation().text()).toBe('GitLab');
+ expect(findWorkInformation().text()).toBe('Frontend Engineer at GitLab');
});
it('should not encode special characters in bio', () => {
@@ -154,40 +135,6 @@ describe('User Popover Component', () => {
expect(findBio().text()).toBe('I like <html> & CSS');
});
- it('should not encode special characters in organization', () => {
- const user = {
- ...DEFAULT_PROPS.user,
- organization: 'Me & my <funky> Company',
- };
-
- createWrapper({ user });
-
- expect(findWorkInformation().text()).toBe('Me & my <funky> Company');
- });
-
- it('should not encode special characters in job title', () => {
- const user = {
- ...DEFAULT_PROPS.user,
- jobTitle: 'Manager & Team Lead',
- };
-
- createWrapper({ user });
-
- expect(findWorkInformation().text()).toBe('Manager & Team Lead');
- });
-
- it('should not encode special characters when both job title and organization are set', () => {
- const user = {
- ...DEFAULT_PROPS.user,
- jobTitle: 'Manager & Team Lead',
- organization: 'Me & my <funky> Company',
- };
-
- createWrapper({ user });
-
- expect(findWorkInformation().text()).toBe('Manager & Team Lead at Me & my <funky> Company');
- });
-
it('shows icon for bio', () => {
const user = {
...DEFAULT_PROPS.user,
@@ -201,10 +148,10 @@ describe('User Popover Component', () => {
);
});
- it('shows icon for organization', () => {
+ it('shows icon for work information', () => {
const user = {
...DEFAULT_PROPS.user,
- organization: 'GitLab',
+ workInformation: 'GitLab',
};
createWrapper({ user });
diff --git a/spec/graphql/resolvers/metrics/dashboards/annotation_resolver_spec.rb b/spec/graphql/resolvers/metrics/dashboards/annotation_resolver_spec.rb
new file mode 100644
index 00000000000..c06fbef53b6
--- /dev/null
+++ b/spec/graphql/resolvers/metrics/dashboards/annotation_resolver_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Resolvers::Metrics::Dashboards::AnnotationResolver do
+ include GraphqlHelpers
+
+ describe '#resolve' do
+ context 'user with developer access' do
+ subject(:resolve_annotations) { resolve(described_class, obj: dashboard, args: args, ctx: { current_user: current_user }) }
+
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:environment) { create(:environment) }
+ let_it_be(:path) { 'config/prometheus/common_metrics.yml' }
+ let(:dashboard) { PerformanceMonitoring::PrometheusDashboard.new(path: path, environment: environment) }
+ let(:args) do
+ {
+ from: 10.minutes.ago,
+ to: 5.minutes.ago
+ }
+ end
+
+ before_all do
+ environment.project.add_developer(current_user)
+ end
+
+ context 'with annotation records' do
+ let_it_be(:annotation_1) { create(:metrics_dashboard_annotation, environment: environment, starting_at: 9.minutes.ago, dashboard_path: path) }
+
+ it 'loads annotations with usage of finder class', :aggregate_failures do
+ expect_next_instance_of(::Metrics::Dashboards::AnnotationsFinder, dashboard: dashboard, params: args) do |finder|
+ expect(finder).to receive(:execute).and_return [annotation_1]
+ end
+
+ expect(resolve_annotations).to eql [annotation_1]
+ end
+
+ context 'dashboard is missing' do
+ let(:dashboard) { nil }
+
+ it 'returns empty array', :aggregate_failures do
+ expect(::Metrics::Dashboards::AnnotationsFinder).not_to receive(:new)
+
+ expect(resolve_annotations).to be_empty
+ end
+ end
+
+ context 'there are no annotations records' do
+ it 'returns empty array' do
+ allow_next_instance_of(::Metrics::Dashboards::AnnotationsFinder) do |finder|
+ allow(finder).to receive(:execute).and_return []
+ end
+
+ expect(resolve_annotations).to be_empty
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/graphql/types/metrics/dashboard_type_spec.rb b/spec/graphql/types/metrics/dashboard_type_spec.rb
index 4795fd77537..76f2b4b8935 100644
--- a/spec/graphql/types/metrics/dashboard_type_spec.rb
+++ b/spec/graphql/types/metrics/dashboard_type_spec.rb
@@ -7,9 +7,16 @@ describe GitlabSchema.types['MetricsDashboard'] do
it 'has the expected fields' do
expected_fields = %w[
- path
- ]
+ path annotations
+ ]
expect(described_class).to have_graphql_fields(*expected_fields)
end
+
+ describe 'annotations field' do
+ subject { described_class.fields['annotations'] }
+
+ it { is_expected.to have_graphql_type(Types::Metrics::Dashboards::AnnotationType.connection_type) }
+ it { is_expected.to have_graphql_resolver(Resolvers::Metrics::Dashboards::AnnotationResolver) }
+ end
end
diff --git a/spec/graphql/types/metrics/dashboards/annotation_type_spec.rb b/spec/graphql/types/metrics/dashboards/annotation_type_spec.rb
new file mode 100644
index 00000000000..2956a2512eb
--- /dev/null
+++ b/spec/graphql/types/metrics/dashboards/annotation_type_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe GitlabSchema.types['MetricsDashboardAnnotation'] do
+ it { expect(described_class.graphql_name).to eq('MetricsDashboardAnnotation') }
+
+ it 'has the expected fields' do
+ expected_fields = %w[
+ description id panel_id starting_at ending_at
+ ]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+
+ it { expect(described_class).to require_graphql_authorizations(:read_metrics_dashboard_annotation) }
+end
diff --git a/spec/initializers/lograge_spec.rb b/spec/initializers/lograge_spec.rb
index 0068b894474..48acdac74ac 100644
--- a/spec/initializers/lograge_spec.rb
+++ b/spec/initializers/lograge_spec.rb
@@ -64,11 +64,11 @@ describe 'lograge', type: :request do
)
expect(Lograge.formatter).to receive(:call)
- .with(a_hash_including(cpu_s: 0.1111115))
+ .with(a_hash_including(cpu_s: 0.11))
.and_call_original
expect(Lograge.logger).to receive(:send)
- .with(anything, include('"cpu_s":0.1111115'))
+ .with(anything, include('"cpu_s":0.11'))
.and_call_original
subject
diff --git a/spec/javascripts/filtered_search/filtered_search_manager_spec.js b/spec/javascripts/filtered_search/filtered_search_manager_spec.js
index e5d1d1d690e..d0b54a16747 100644
--- a/spec/javascripts/filtered_search/filtered_search_manager_spec.js
+++ b/spec/javascripts/filtered_search/filtered_search_manager_spec.js
@@ -8,6 +8,7 @@ import FilteredSearchVisualTokens from '~/filtered_search/filtered_search_visual
import FilteredSearchDropdownManager from '~/filtered_search/filtered_search_dropdown_manager';
import FilteredSearchManager from '~/filtered_search/filtered_search_manager';
import FilteredSearchSpecHelper from '../helpers/filtered_search_spec_helper';
+import { BACKSPACE_KEY_CODE, DELETE_KEY_CODE } from '~/lib/utils/keycodes';
describe('Filtered Search Manager', function() {
let input;
@@ -17,16 +18,35 @@ describe('Filtered Search Manager', function() {
const placeholder = 'Search or filter results...';
function dispatchBackspaceEvent(element, eventType) {
- const backspaceKey = 8;
const event = new Event(eventType);
- event.keyCode = backspaceKey;
+ event.keyCode = BACKSPACE_KEY_CODE;
element.dispatchEvent(event);
}
function dispatchDeleteEvent(element, eventType) {
- const deleteKey = 46;
const event = new Event(eventType);
- event.keyCode = deleteKey;
+ event.keyCode = DELETE_KEY_CODE;
+ element.dispatchEvent(event);
+ }
+
+ function dispatchAltBackspaceEvent(element, eventType) {
+ const event = new Event(eventType);
+ event.altKey = true;
+ event.keyCode = BACKSPACE_KEY_CODE;
+ element.dispatchEvent(event);
+ }
+
+ function dispatchCtrlBackspaceEvent(element, eventType) {
+ const event = new Event(eventType);
+ event.ctrlKey = true;
+ event.keyCode = BACKSPACE_KEY_CODE;
+ element.dispatchEvent(event);
+ }
+
+ function dispatchMetaBackspaceEvent(element, eventType) {
+ const event = new Event(eventType);
+ event.metaKey = true;
+ event.keyCode = BACKSPACE_KEY_CODE;
element.dispatchEvent(event);
}
@@ -299,6 +319,80 @@ describe('Filtered Search Manager', function() {
});
});
+ describe('checkForAltOrCtrlBackspace', () => {
+ beforeEach(() => {
+ initializeManager();
+ spyOn(FilteredSearchVisualTokens, 'removeLastTokenPartial').and.callThrough();
+ });
+
+ describe('tokens and no input', () => {
+ beforeEach(() => {
+ tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(
+ FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '=', '~bug'),
+ );
+ });
+
+ it('removes last token via alt-backspace', () => {
+ dispatchAltBackspaceEvent(input, 'keydown');
+
+ expect(FilteredSearchVisualTokens.removeLastTokenPartial).toHaveBeenCalled();
+ });
+
+ it('removes last token via ctrl-backspace', () => {
+ dispatchCtrlBackspaceEvent(input, 'keydown');
+
+ expect(FilteredSearchVisualTokens.removeLastTokenPartial).toHaveBeenCalled();
+ });
+ });
+
+ describe('tokens and input', () => {
+ beforeEach(() => {
+ tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(
+ FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '=', '~bug'),
+ );
+ });
+
+ it('does not remove token or change input via alt-backspace when there is existing input', () => {
+ input = manager.filteredSearchInput;
+ input.value = 'text';
+ dispatchAltBackspaceEvent(input, 'keydown');
+
+ expect(FilteredSearchVisualTokens.removeLastTokenPartial).not.toHaveBeenCalled();
+ expect(input.value).toEqual('text');
+ });
+
+ it('does not remove token or change input via ctrl-backspace when there is existing input', () => {
+ input = manager.filteredSearchInput;
+ input.value = 'text';
+ dispatchCtrlBackspaceEvent(input, 'keydown');
+
+ expect(FilteredSearchVisualTokens.removeLastTokenPartial).not.toHaveBeenCalled();
+ expect(input.value).toEqual('text');
+ });
+ });
+ });
+
+ describe('checkForMetaBackspace', () => {
+ beforeEach(() => {
+ initializeManager();
+ });
+
+ beforeEach(() => {
+ tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(
+ FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '=', '~bug'),
+ );
+ });
+
+ it('removes all tokens and input', () => {
+ spyOn(FilteredSearchManager.prototype, 'clearSearch').and.callThrough();
+ dispatchMetaBackspaceEvent(input, 'keydown');
+
+ expect(manager.clearSearch).toHaveBeenCalled();
+ expect(manager.filteredSearchInput.value).toEqual('');
+ expect(DropdownUtils.getSearchQuery()).toEqual('');
+ });
+ });
+
describe('removeToken', () => {
beforeEach(() => {
initializeManager();
diff --git a/spec/javascripts/monitoring/components/dashboard_resize_spec.js b/spec/javascripts/monitoring/components/dashboard_resize_spec.js
index 6455346e890..0c3193940e6 100644
--- a/spec/javascripts/monitoring/components/dashboard_resize_spec.js
+++ b/spec/javascripts/monitoring/components/dashboard_resize_spec.js
@@ -2,66 +2,13 @@ import Vue from 'vue';
import { createLocalVue } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import Dashboard from '~/monitoring/components/dashboard.vue';
-import * as types from '~/monitoring/stores/mutation_types';
import { createStore } from '~/monitoring/stores';
import axios from '~/lib/utils/axios_utils';
-import {
- metricsDashboardPayload,
- mockedEmptyResult,
- mockedQueryResultPayload,
- mockedQueryResultPayloadCoresTotal,
- mockApiEndpoint,
- environmentData,
-} from '../mock_data';
+import { mockApiEndpoint, propsData } from '../mock_data';
+import { metricsDashboardPayload } from '../fixture_data';
+import { setupStoreWithData } from '../store_utils';
const localVue = createLocalVue();
-const propsData = {
- hasMetrics: false,
- documentationPath: '/path/to/docs',
- settingsPath: '/path/to/settings',
- clustersPath: '/path/to/clusters',
- tagsPath: '/path/to/tags',
- projectPath: '/path/to/project',
- defaultBranch: 'master',
- metricsEndpoint: mockApiEndpoint,
- deploymentsEndpoint: null,
- emptyGettingStartedSvgPath: '/path/to/getting-started.svg',
- emptyLoadingSvgPath: '/path/to/loading.svg',
- emptyNoDataSvgPath: '/path/to/no-data.svg',
- emptyNoDataSmallSvgPath: '/path/to/no-data-small.svg',
- emptyUnableToConnectSvgPath: '/path/to/unable-to-connect.svg',
- currentEnvironmentName: 'production',
- customMetricsAvailable: false,
- customMetricsPath: '',
- validateQueryPath: '',
-};
-
-function setupComponentStore(component) {
- // Load 2 panel groups
- component.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRICS_DASHBOARD_SUCCESS}`,
- metricsDashboardPayload,
- );
-
- // Load 3 panels to the dashboard, one with an empty result
- component.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
- mockedEmptyResult,
- );
- component.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
- mockedQueryResultPayload,
- );
- component.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
- mockedQueryResultPayloadCoresTotal,
- );
-
- component.$store.commit(
- `monitoringDashboard/${types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS}`,
- environmentData,
- );
-}
describe('Dashboard', () => {
let DashboardComponent;
@@ -109,7 +56,7 @@ describe('Dashboard', () => {
store,
});
- setupComponentStore(component);
+ setupStoreWithData(component.$store);
return Vue.nextTick().then(() => {
[promPanel] = component.$el.querySelectorAll('.prometheus-panel');
diff --git a/spec/javascripts/monitoring/fixture_data.js b/spec/javascripts/monitoring/fixture_data.js
new file mode 100644
index 00000000000..1375c27cdde
--- /dev/null
+++ b/spec/javascripts/monitoring/fixture_data.js
@@ -0,0 +1 @@
+export * from '../../frontend/monitoring/fixture_data';
diff --git a/spec/javascripts/monitoring/store_utils.js b/spec/javascripts/monitoring/store_utils.js
new file mode 100644
index 00000000000..1222716c829
--- /dev/null
+++ b/spec/javascripts/monitoring/store_utils.js
@@ -0,0 +1 @@
+export * from '../../frontend/monitoring/store_utils';
diff --git a/spec/javascripts/pipelines/graph/graph_component_spec.js b/spec/javascripts/pipelines/graph/graph_component_spec.js
deleted file mode 100644
index d2c10362ba3..00000000000
--- a/spec/javascripts/pipelines/graph/graph_component_spec.js
+++ /dev/null
@@ -1,274 +0,0 @@
-import Vue from 'vue';
-import mountComponent from 'spec/helpers/vue_mount_component_helper';
-import PipelineStore from '~/pipelines/stores/pipeline_store';
-import graphComponent from '~/pipelines/components/graph/graph_component.vue';
-import graphJSON from './mock_data';
-import linkedPipelineJSON from '../linked_pipelines_mock.json';
-import PipelinesMediator from '~/pipelines/pipeline_details_mediator';
-
-describe('graph component', () => {
- const GraphComponent = Vue.extend(graphComponent);
- const store = new PipelineStore();
- store.storePipeline(linkedPipelineJSON);
- const mediator = new PipelinesMediator({ endpoint: '' });
-
- let component;
-
- beforeEach(() => {
- setFixtures(`
- <div class="layout-page"></div>
- `);
- });
-
- afterEach(() => {
- component.$destroy();
- });
-
- describe('while is loading', () => {
- it('should render a loading icon', () => {
- component = mountComponent(GraphComponent, {
- isLoading: true,
- pipeline: {},
- mediator,
- });
-
- expect(component.$el.querySelector('.loading-icon')).toBeDefined();
- });
- });
-
- describe('with data', () => {
- it('should render the graph', () => {
- component = mountComponent(GraphComponent, {
- isLoading: false,
- pipeline: graphJSON,
- mediator,
- });
-
- expect(component.$el.classList.contains('js-pipeline-graph')).toEqual(true);
-
- expect(
- component.$el.querySelector('.stage-column:first-child').classList.contains('no-margin'),
- ).toEqual(true);
-
- expect(
- component.$el.querySelector('.stage-column:nth-child(2)').classList.contains('left-margin'),
- ).toEqual(true);
-
- expect(
- component.$el
- .querySelector('.stage-column:nth-child(2) .build:nth-child(1)')
- .classList.contains('left-connector'),
- ).toEqual(true);
-
- expect(component.$el.querySelector('loading-icon')).toBe(null);
-
- expect(component.$el.querySelector('.stage-column-list')).toBeDefined();
- });
- });
-
- describe('when linked pipelines are present', () => {
- beforeEach(() => {
- component = mountComponent(GraphComponent, {
- isLoading: false,
- pipeline: store.state.pipeline,
- mediator,
- });
- });
-
- describe('rendered output', () => {
- it('should include the pipelines graph', () => {
- expect(component.$el.classList.contains('js-pipeline-graph')).toEqual(true);
- });
-
- it('should not include the loading icon', () => {
- expect(component.$el.querySelector('.fa-spinner')).toBeNull();
- });
-
- it('should include the stage column list', () => {
- expect(component.$el.querySelector('.stage-column-list')).not.toBeNull();
- });
-
- it('should include the no-margin class on the first child', () => {
- const firstStageColumnElement = component.$el.querySelector(
- '.stage-column-list .stage-column',
- );
-
- expect(firstStageColumnElement.classList.contains('no-margin')).toEqual(true);
- });
-
- it('should include the has-only-one-job class on the first child', () => {
- const firstStageColumnElement = component.$el.querySelector(
- '.stage-column-list .stage-column',
- );
-
- expect(firstStageColumnElement.classList.contains('has-only-one-job')).toEqual(true);
- });
-
- it('should include the left-margin class on the second child', () => {
- const firstStageColumnElement = component.$el.querySelector(
- '.stage-column-list .stage-column:last-child',
- );
-
- expect(firstStageColumnElement.classList.contains('left-margin')).toEqual(true);
- });
-
- it('should include the js-has-linked-pipelines flag', () => {
- expect(component.$el.querySelector('.js-has-linked-pipelines')).not.toBeNull();
- });
- });
-
- describe('computeds and methods', () => {
- describe('capitalizeStageName', () => {
- it('it capitalizes the stage name', () => {
- expect(component.capitalizeStageName('mystage')).toBe('Mystage');
- });
- });
-
- describe('stageConnectorClass', () => {
- it('it returns left-margin when there is a triggerer', () => {
- expect(component.stageConnectorClass(0, { groups: ['job'] })).toBe('no-margin');
- });
- });
- });
-
- describe('linked pipelines components', () => {
- beforeEach(() => {
- component = mountComponent(GraphComponent, {
- isLoading: false,
- pipeline: store.state.pipeline,
- mediator,
- });
- });
-
- it('should render an upstream pipelines column', () => {
- expect(component.$el.querySelector('.linked-pipelines-column')).not.toBeNull();
- expect(component.$el.innerHTML).toContain('Upstream');
- });
-
- it('should render a downstream pipelines column', () => {
- expect(component.$el.querySelector('.linked-pipelines-column')).not.toBeNull();
- expect(component.$el.innerHTML).toContain('Downstream');
- });
-
- describe('triggered by', () => {
- describe('on click', () => {
- it('should emit `onClickTriggeredBy` when triggered by linked pipeline is clicked', () => {
- spyOn(component, '$emit');
-
- component.$el.querySelector('#js-linked-pipeline-12').click();
-
- expect(component.$emit).toHaveBeenCalledWith(
- 'onClickTriggeredBy',
- component.pipeline.triggered_by[0],
- );
- });
- });
-
- describe('with expanded pipeline', () => {
- it('should render expanded pipeline', done => {
- // expand the pipeline
- store.state.pipeline.triggered_by[0].isExpanded = true;
-
- component = mountComponent(GraphComponent, {
- isLoading: false,
- pipeline: store.state.pipeline,
- mediator,
- });
-
- Vue.nextTick()
- .then(() => {
- expect(component.$el.querySelector('.js-upstream-pipeline-12')).not.toBeNull();
- })
- .then(done)
- .catch(done.fail);
- });
- });
- });
-
- describe('triggered', () => {
- describe('on click', () => {
- it('should emit `onClickTriggered`', () => {
- spyOn(component, '$emit');
- spyOn(component, 'calculateMarginTop').and.callFake(() => '16px');
-
- component.$el.querySelector('#js-linked-pipeline-34993051').click();
-
- expect(component.$emit).toHaveBeenCalledWith(
- 'onClickTriggered',
- component.pipeline.triggered[0],
- );
- });
- });
-
- describe('with expanded pipeline', () => {
- it('should render expanded pipeline', done => {
- // expand the pipeline
- store.state.pipeline.triggered[0].isExpanded = true;
-
- component = mountComponent(GraphComponent, {
- isLoading: false,
- pipeline: store.state.pipeline,
- mediator,
- });
-
- Vue.nextTick()
- .then(() => {
- expect(
- component.$el.querySelector('.js-downstream-pipeline-34993051'),
- ).not.toBeNull();
- })
- .then(done)
- .catch(done.fail);
- });
- });
- });
- });
- });
-
- describe('when linked pipelines are not present', () => {
- beforeEach(() => {
- const pipeline = Object.assign(linkedPipelineJSON, { triggered: null, triggered_by: null });
- component = mountComponent(GraphComponent, {
- isLoading: false,
- pipeline,
- mediator,
- });
- });
-
- describe('rendered output', () => {
- it('should include the first column with a no margin', () => {
- const firstColumn = component.$el.querySelector('.stage-column:first-child');
-
- expect(firstColumn.classList.contains('no-margin')).toEqual(true);
- });
-
- it('should not render a linked pipelines column', () => {
- expect(component.$el.querySelector('.linked-pipelines-column')).toBeNull();
- });
- });
-
- describe('stageConnectorClass', () => {
- it('it returns left-margin when no triggerer and there is one job', () => {
- expect(component.stageConnectorClass(0, { groups: ['job'] })).toBe('no-margin');
- });
-
- it('it returns left-margin when no triggerer and not the first stage', () => {
- expect(component.stageConnectorClass(99, { groups: ['job'] })).toBe('left-margin');
- });
- });
- });
-
- describe('capitalizeStageName', () => {
- it('capitalizes and escapes stage name', () => {
- component = mountComponent(GraphComponent, {
- isLoading: false,
- pipeline: graphJSON,
- mediator,
- });
-
- expect(
- component.$el.querySelector('.stage-column:nth-child(2) .stage-name').textContent.trim(),
- ).toEqual('Deploy &lt;img src=x onerror=alert(document.domain)&gt;');
- });
- });
-});
diff --git a/spec/javascripts/pipelines/graph/job_name_component_spec.js b/spec/javascripts/pipelines/graph/job_name_component_spec.js
deleted file mode 100644
index c861d452dd0..00000000000
--- a/spec/javascripts/pipelines/graph/job_name_component_spec.js
+++ /dev/null
@@ -1,27 +0,0 @@
-import Vue from 'vue';
-import jobNameComponent from '~/pipelines/components/graph/job_name_component.vue';
-
-describe('job name component', () => {
- let component;
-
- beforeEach(() => {
- const JobNameComponent = Vue.extend(jobNameComponent);
- component = new JobNameComponent({
- propsData: {
- name: 'foo',
- status: {
- icon: 'status_success',
- },
- },
- }).$mount();
- });
-
- it('should render the provided name', () => {
- expect(component.$el.querySelector('.ci-status-text').textContent.trim()).toEqual('foo');
- });
-
- it('should render an icon with the provided status', () => {
- expect(component.$el.querySelector('.ci-status-icon-success')).toBeDefined();
- expect(component.$el.querySelector('.ci-status-icon-success svg')).toBeDefined();
- });
-});
diff --git a/spec/javascripts/pipelines/graph/linked_pipelines_column_spec.js b/spec/javascripts/pipelines/graph/linked_pipelines_column_spec.js
deleted file mode 100644
index 613ab2a906f..00000000000
--- a/spec/javascripts/pipelines/graph/linked_pipelines_column_spec.js
+++ /dev/null
@@ -1,43 +0,0 @@
-import Vue from 'vue';
-import mountComponent from 'spec/helpers/vue_mount_component_helper';
-import LinkedPipelinesColumn from '~/pipelines/components/graph/linked_pipelines_column.vue';
-import mockData from './linked_pipelines_mock_data';
-
-describe('Linked Pipelines Column', () => {
- const Component = Vue.extend(LinkedPipelinesColumn);
- const props = {
- columnTitle: 'Upstream',
- linkedPipelines: mockData.triggered,
- graphPosition: 'right',
- projectId: 19,
- };
- let vm;
-
- beforeEach(() => {
- vm = mountComponent(Component, props);
- });
-
- afterEach(() => {
- vm.$destroy();
- });
-
- it('renders the pipeline orientation', () => {
- const titleElement = vm.$el.querySelector('.linked-pipelines-column-title');
-
- expect(titleElement.innerText).toContain(props.columnTitle);
- });
-
- it('has the correct number of linked pipeline child components', () => {
- expect(vm.$children.length).toBe(props.linkedPipelines.length);
- });
-
- it('renders the correct number of linked pipelines', () => {
- const linkedPipelineElements = vm.$el.querySelectorAll('.linked-pipeline');
-
- expect(linkedPipelineElements.length).toBe(props.linkedPipelines.length);
- });
-
- it('renders cross project triangle when column is upstream', () => {
- expect(vm.$el.querySelector('.cross-project-triangle')).toBeDefined();
- });
-});
diff --git a/spec/javascripts/pipelines/graph/linked_pipelines_mock_data.js b/spec/javascripts/pipelines/graph/linked_pipelines_mock_data.js
deleted file mode 100644
index 3079d5e4e68..00000000000
--- a/spec/javascripts/pipelines/graph/linked_pipelines_mock_data.js
+++ /dev/null
@@ -1,3 +0,0 @@
-import mockData from '../../../frontend/pipelines/graph/linked_pipelines_mock_data';
-
-export default mockData;
diff --git a/spec/javascripts/pipelines/graph/stage_column_component_spec.js b/spec/javascripts/pipelines/graph/stage_column_component_spec.js
deleted file mode 100644
index dbfeeae43fe..00000000000
--- a/spec/javascripts/pipelines/graph/stage_column_component_spec.js
+++ /dev/null
@@ -1,122 +0,0 @@
-import Vue from 'vue';
-import mountComponent from 'spec/helpers/vue_mount_component_helper';
-import stageColumnComponent from '~/pipelines/components/graph/stage_column_component.vue';
-
-describe('stage column component', () => {
- let component;
- const StageColumnComponent = Vue.extend(stageColumnComponent);
-
- const mockJob = {
- id: 4250,
- name: 'test',
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- details_path: '/root/ci-mock/builds/4250',
- action: {
- icon: 'retry',
- title: 'Retry',
- path: '/root/ci-mock/builds/4250/retry',
- method: 'post',
- },
- },
- };
-
- beforeEach(() => {
- const mockGroups = [];
- for (let i = 0; i < 3; i += 1) {
- const mockedJob = Object.assign({}, mockJob);
- mockedJob.id += i;
- mockGroups.push(mockedJob);
- }
-
- component = mountComponent(StageColumnComponent, {
- title: 'foo',
- groups: mockGroups,
- hasTriggeredBy: false,
- });
- });
-
- it('should render provided title', () => {
- expect(component.$el.querySelector('.stage-name').textContent.trim()).toEqual('foo');
- });
-
- it('should render the provided groups', () => {
- expect(component.$el.querySelectorAll('.builds-container > ul > li').length).toEqual(3);
- });
-
- describe('jobId', () => {
- it('escapes job name', () => {
- component = mountComponent(StageColumnComponent, {
- groups: [
- {
- id: 4259,
- name: '<img src=x onerror=alert(document.domain)>',
- status: {
- icon: 'status_success',
- label: 'success',
- tooltip: '<img src=x onerror=alert(document.domain)>',
- },
- },
- ],
- title: 'test',
- hasTriggeredBy: false,
- });
-
- expect(component.$el.querySelector('.builds-container li').getAttribute('id')).toEqual(
- 'ci-badge-&lt;img src=x onerror=alert(document.domain)&gt;',
- );
- });
- });
-
- describe('with action', () => {
- it('renders action button', () => {
- component = mountComponent(StageColumnComponent, {
- groups: [
- {
- id: 4259,
- name: '<img src=x onerror=alert(document.domain)>',
- status: {
- icon: 'status_success',
- label: 'success',
- tooltip: '<img src=x onerror=alert(document.domain)>',
- },
- },
- ],
- title: 'test',
- hasTriggeredBy: false,
- action: {
- icon: 'play',
- title: 'Play all',
- path: 'action',
- },
- });
-
- expect(component.$el.querySelector('.js-stage-action')).not.toBeNull();
- });
- });
-
- describe('without action', () => {
- it('does not render action button', () => {
- component = mountComponent(StageColumnComponent, {
- groups: [
- {
- id: 4259,
- name: '<img src=x onerror=alert(document.domain)>',
- status: {
- icon: 'status_success',
- label: 'success',
- tooltip: '<img src=x onerror=alert(document.domain)>',
- },
- },
- ],
- title: 'test',
- hasTriggeredBy: false,
- });
-
- expect(component.$el.querySelector('.js-stage-action')).toBeNull();
- });
- });
-});
diff --git a/spec/javascripts/sidebar/sidebar_assignees_spec.js b/spec/javascripts/sidebar/sidebar_assignees_spec.js
deleted file mode 100644
index 23b8dc69925..00000000000
--- a/spec/javascripts/sidebar/sidebar_assignees_spec.js
+++ /dev/null
@@ -1,64 +0,0 @@
-import Vue from 'vue';
-import mountComponent from 'spec/helpers/vue_mount_component_helper';
-import SidebarAssignees from '~/sidebar/components/assignees/sidebar_assignees.vue';
-import SidebarMediator from '~/sidebar/sidebar_mediator';
-import SidebarService from '~/sidebar/services/sidebar_service';
-import SidebarStore from '~/sidebar/stores/sidebar_store';
-import Mock from './mock_data';
-
-describe('sidebar assignees', () => {
- let vm;
- let mediator;
- let sidebarAssigneesEl;
- preloadFixtures('issues/open-issue.html');
-
- beforeEach(() => {
- loadFixtures('issues/open-issue.html');
-
- mediator = new SidebarMediator(Mock.mediator);
- spyOn(mediator, 'saveAssignees').and.callThrough();
- spyOn(mediator, 'assignYourself').and.callThrough();
-
- const SidebarAssigneeComponent = Vue.extend(SidebarAssignees);
- sidebarAssigneesEl = document.querySelector('#js-vue-sidebar-assignees');
- vm = mountComponent(
- SidebarAssigneeComponent,
- {
- mediator,
- field: sidebarAssigneesEl.dataset.field,
- },
- sidebarAssigneesEl,
- );
- });
-
- afterEach(() => {
- SidebarService.singleton = null;
- SidebarStore.singleton = null;
- SidebarMediator.singleton = null;
- });
-
- it('calls the mediator when saves the assignees', () => {
- vm.saveAssignees();
-
- expect(mediator.saveAssignees).toHaveBeenCalled();
- });
-
- it('calls the mediator when "assignSelf" method is called', () => {
- vm.assignSelf();
-
- expect(mediator.assignYourself).toHaveBeenCalled();
- expect(mediator.store.assignees.length).toEqual(1);
- });
-
- it('hides assignees until fetched', done => {
- const currentAssignee = sidebarAssigneesEl.querySelector('.value');
-
- expect(currentAssignee).toBe(null);
-
- vm.store.isFetching.assignees = false;
- Vue.nextTick(() => {
- expect(vm.$el.querySelector('.value')).toBeVisible();
- done();
- });
- });
-});
diff --git a/spec/lib/api/entities/project_import_failed_relation_spec.rb b/spec/lib/api/entities/project_import_failed_relation_spec.rb
new file mode 100644
index 00000000000..f8330713480
--- /dev/null
+++ b/spec/lib/api/entities/project_import_failed_relation_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe API::Entities::ProjectImportFailedRelation do
+ describe '#as_json' do
+ subject { entity.as_json }
+
+ let(:import_failure) { build(:import_failure) }
+ let(:entity) { described_class.new(import_failure) }
+
+ it 'includes basic fields', :aggregate_failures do
+ expect(subject).to eq(
+ id: import_failure.id,
+ created_at: import_failure.created_at,
+ exception_class: import_failure.exception_class,
+ exception_message: import_failure.exception_message,
+ relation_name: import_failure.relation_key,
+ source: import_failure.source
+ )
+ end
+ end
+end
diff --git a/spec/lib/api/entities/project_import_status_spec.rb b/spec/lib/api/entities/project_import_status_spec.rb
new file mode 100644
index 00000000000..650f9c156a3
--- /dev/null
+++ b/spec/lib/api/entities/project_import_status_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe API::Entities::ProjectImportStatus do
+ describe '#as_json' do
+ subject { entity.as_json }
+
+ let(:correlation_id) { 'cid' }
+
+ context 'when import has not finished yet' do
+ let(:project) { create(:project, :import_scheduled, import_correlation_id: correlation_id) }
+ let(:entity) { described_class.new(project) }
+
+ it 'includes basic fields and no failures', :aggregate_failures do
+ expect(subject[:import_status]).to eq('scheduled')
+ expect(subject[:correlation_id]).to eq(correlation_id)
+ expect(subject[:import_error]).to be_nil
+ expect(subject[:failed_relations]).to eq([])
+ end
+ end
+
+ context 'when import has finished with failed relations' do
+ let(:project) { create(:project, :import_finished, import_correlation_id: correlation_id) }
+ let(:entity) { described_class.new(project) }
+
+ it 'includes basic fields with failed relations', :aggregate_failures do
+ create(:import_failure, :hard_failure, project: project, correlation_id_value: correlation_id)
+
+ expect(subject[:import_status]).to eq('finished')
+ expect(subject[:correlation_id]).to eq(correlation_id)
+ expect(subject[:import_error]).to be_nil
+ expect(subject[:failed_relations]).not_to be_empty
+ end
+ end
+
+ context 'when import has failed' do
+ let(:project) { create(:project, :import_failed, import_correlation_id: correlation_id, import_last_error: 'error') }
+ let(:entity) { described_class.new(project) }
+
+ it 'includes basic fields with import error', :aggregate_failures do
+ expect(subject[:import_status]).to eq('failed')
+ expect(subject[:correlation_id]).to eq(correlation_id)
+ expect(subject[:import_error]).to eq('error')
+ expect(subject[:failed_relations]).to eq([])
+ end
+ end
+ end
+end
diff --git a/spec/lib/api/entities/user_spec.rb b/spec/lib/api/entities/user_spec.rb
new file mode 100644
index 00000000000..20524b197e0
--- /dev/null
+++ b/spec/lib/api/entities/user_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe API::Entities::User do
+ let(:user) { create(:user) }
+ let(:current_user) { create(:user) }
+
+ subject { described_class.new(user, current_user: current_user).as_json }
+
+ it 'exposes correct attributes' do
+ expect(subject).to include(:bio, :location, :public_email, :skype, :linkedin, :twitter, :website_url, :organization, :job_title, :work_information)
+ end
+
+ it 'exposes created_at if the current user can read the user profile' do
+ allow(Ability).to receive(:allowed?).with(current_user, :read_user_profile, user).and_return(true)
+
+ expect(subject).to include(:created_at)
+ end
+
+ it 'does not expose created_at if the current user cannot read the user profile' do
+ allow(Ability).to receive(:allowed?).with(current_user, :read_user_profile, user).and_return(false)
+
+ expect(subject).not_to include(:created_at)
+ end
+end
diff --git a/spec/lib/api/validations/validators/limit_spec.rb b/spec/lib/api/validations/validators/limit_spec.rb
new file mode 100644
index 00000000000..600f74e1fb2
--- /dev/null
+++ b/spec/lib/api/validations/validators/limit_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe API::Validations::Validators::Limit do
+ include ApiValidatorsHelpers
+
+ subject do
+ described_class.new(['test'], 255, false, scope.new)
+ end
+
+ context 'valid limit param' do
+ it 'does not raise a validation error' do
+ expect_no_validation_error('test' => '123-456')
+ expect_no_validation_error('test' => '00000000-ffff-0000-ffff-000000000000')
+ expect_no_validation_error('test' => "#{'a' * 255}")
+ end
+ end
+
+ context 'longer than limit param' do
+ it 'raises a validation error' do
+ expect_validation_error('test' => "#{'a' * 256}")
+ end
+ end
+end
diff --git a/spec/lib/banzai/pipeline_spec.rb b/spec/lib/banzai/pipeline_spec.rb
new file mode 100644
index 00000000000..eeff7287ff5
--- /dev/null
+++ b/spec/lib/banzai/pipeline_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Banzai::Pipeline do
+ describe '.[]' do
+ subject { described_class[name] }
+
+ shared_examples 'error' do |exception, message|
+ it do
+ expect { subject }.to raise_error(exception, message)
+ end
+ end
+
+ context 'for nil' do
+ let(:name) { nil }
+
+ it { is_expected.to eq(Banzai::Pipeline::FullPipeline) }
+ end
+
+ context 'for symbols' do
+ context 'when known' do
+ let(:name) { :full }
+
+ it { is_expected.to eq(Banzai::Pipeline::FullPipeline) }
+ end
+
+ context 'when unknown' do
+ let(:name) { :unknown }
+
+ it_behaves_like 'error', NameError,
+ 'uninitialized constant Banzai::Pipeline::UnknownPipeline'
+ end
+ end
+
+ context 'for classes' do
+ let(:name) { klass }
+
+ context 'subclassing Banzai::Pipeline::BasePipeline' do
+ let(:klass) { Class.new(Banzai::Pipeline::BasePipeline) }
+
+ it { is_expected.to eq(klass) }
+ end
+
+ context 'subclassing other types' do
+ let(:klass) { Class.new(Banzai::RenderContext) }
+
+ before do
+ stub_const('Foo', klass)
+ end
+
+ it_behaves_like 'error', ArgumentError,
+ 'unsupported pipeline name Foo (Class)'
+ end
+ end
+
+ context 'for other types' do
+ let(:name) { 'label' }
+
+ it_behaves_like 'error', ArgumentError,
+ 'unsupported pipeline name "label" (String)'
+ end
+ end
+end
diff --git a/spec/lib/csv_builder_spec.rb b/spec/lib/csv_builder_spec.rb
new file mode 100644
index 00000000000..0d5e2b81b16
--- /dev/null
+++ b/spec/lib/csv_builder_spec.rb
@@ -0,0 +1,109 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe CsvBuilder do
+ let(:object) { double(question: :answer) }
+ let(:fake_relation) { FakeRelation.new([object]) }
+ let(:subject) { described_class.new(fake_relation, 'Q & A' => :question, 'Reversed' => -> (o) { o.question.to_s.reverse }) }
+ let(:csv_data) { subject.render }
+
+ before do
+ stub_const('FakeRelation', Array)
+
+ FakeRelation.class_eval do
+ def find_each(&block)
+ each(&block)
+ end
+ end
+ end
+
+ it 'generates a csv' do
+ expect(csv_data.scan(/(,|\n)/).join).to include ",\n,"
+ end
+
+ it 'uses a temporary file to reduce memory allocation' do
+ expect(CSV).to receive(:new).with(instance_of(Tempfile)).and_call_original
+
+ subject.render
+ end
+
+ it 'counts the number of rows' do
+ subject.render
+
+ expect(subject.rows_written).to eq 1
+ end
+
+ describe 'rows_expected' do
+ it 'uses rows_written if CSV rendered successfully' do
+ subject.render
+
+ expect(fake_relation).not_to receive(:count)
+ expect(subject.rows_expected).to eq 1
+ end
+
+ it 'falls back to calling .count before rendering begins' do
+ expect(subject.rows_expected).to eq 1
+ end
+ end
+
+ describe 'truncation' do
+ let(:big_object) { double(question: 'Long' * 1024) }
+ let(:row_size) { big_object.question.length * 2 }
+ let(:fake_relation) { FakeRelation.new([big_object, big_object, big_object]) }
+
+ it 'occurs after given number of bytes' do
+ expect(subject.render(row_size * 2).length).to be_between(row_size * 2, row_size * 3)
+ expect(subject).to be_truncated
+ expect(subject.rows_written).to eq 2
+ end
+
+ it 'is ignored by default' do
+ expect(subject.render.length).to be > row_size * 3
+ expect(subject.rows_written).to eq 3
+ end
+
+ it 'causes rows_expected to fall back to .count' do
+ subject.render(0)
+
+ expect(fake_relation).to receive(:count).and_call_original
+ expect(subject.rows_expected).to eq 3
+ end
+ end
+
+ it 'avoids loading all data in a single query' do
+ expect(fake_relation).to receive(:find_each)
+
+ subject.render
+ end
+
+ it 'uses hash keys as headers' do
+ expect(csv_data).to start_with 'Q & A'
+ end
+
+ it 'gets data by calling method provided as hash value' do
+ expect(csv_data).to include 'answer'
+ end
+
+ it 'allows lamdas to look up more complicated data' do
+ expect(csv_data).to include 'rewsna'
+ end
+
+ describe 'excel sanitization' do
+ let(:dangerous_title) { double(title: "=cmd|' /C calc'!A0 title", description: "*safe_desc") }
+ let(:dangerous_desc) { double(title: "*safe_title", description: "=cmd|' /C calc'!A0 desc") }
+ let(:fake_relation) { FakeRelation.new([dangerous_title, dangerous_desc]) }
+ let(:subject) { described_class.new(fake_relation, 'Title' => 'title', 'Description' => 'description') }
+ let(:csv_data) { subject.render }
+
+ it 'sanitizes dangerous characters at the beginning of a column' do
+ expect(csv_data).to include "'=cmd|' /C calc'!A0 title"
+ expect(csv_data).to include "'=cmd|' /C calc'!A0 desc"
+ end
+
+ it 'does not sanitize safe symbols at the beginning of a column' do
+ expect(csv_data).not_to include "'*safe_desc"
+ expect(csv_data).not_to include "'*safe_title"
+ end
+ end
+end
diff --git a/spec/lib/gitlab/application_context_spec.rb b/spec/lib/gitlab/application_context_spec.rb
index 0903ca6f9e8..6674ea059a0 100644
--- a/spec/lib/gitlab/application_context_spec.rb
+++ b/spec/lib/gitlab/application_context_spec.rb
@@ -42,6 +42,18 @@ describe Gitlab::ApplicationContext do
end
end
+ describe '.current_context_include?' do
+ it 'returns true if the key was present in the context' do
+ described_class.with_context(caller_id: "Hello") do
+ expect(described_class.current_context_include?(:caller_id)).to be(true)
+ end
+ end
+
+ it 'returns false if the key was not present in the current context' do
+ expect(described_class.current_context_include?(:caller_id)).to be(false)
+ end
+ end
+
describe '#to_lazy_hash' do
let(:user) { build(:user) }
let(:project) { build(:project) }
diff --git a/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb b/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb
index ff8b9dd1005..d4f52a11ce7 100644
--- a/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb
+++ b/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb
@@ -79,7 +79,7 @@ describe Gitlab::BackgroundMigration::UserMentions::CreateResourceUserMention, s
context 'migrate commit mentions' do
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
- let(:commit) { Commit.new(RepoHelpers.sample_commit, project.becomes(Project)) }
+ let(:commit) { Commit.new(RepoHelpers.sample_commit, project) }
let(:commit_user_mentions) { table(:commit_user_mentions) }
let!(:note1) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: author.id, note: description_mentions) }
diff --git a/spec/lib/gitlab/ci/jwt_spec.rb b/spec/lib/gitlab/ci/jwt_spec.rb
new file mode 100644
index 00000000000..f2897708b08
--- /dev/null
+++ b/spec/lib/gitlab/ci/jwt_spec.rb
@@ -0,0 +1,124 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Jwt do
+ let(:namespace) { build_stubbed(:namespace) }
+ let(:project) { build_stubbed(:project, namespace: namespace) }
+ let(:user) { build_stubbed(:user) }
+ let(:pipeline) { build_stubbed(:ci_pipeline, ref: 'auto-deploy-2020-03-19') }
+ let(:build) do
+ build_stubbed(
+ :ci_build,
+ project: project,
+ user: user,
+ pipeline: pipeline
+ )
+ end
+
+ describe '#payload' do
+ subject(:payload) { described_class.new(build, ttl: 30).payload }
+
+ it 'has correct values for the standard JWT attributes' do
+ Timecop.freeze do
+ now = Time.now.to_i
+
+ aggregate_failures do
+ expect(payload[:iss]).to eq(Settings.gitlab.host)
+ expect(payload[:iat]).to eq(now)
+ expect(payload[:exp]).to eq(now + 30)
+ expect(payload[:sub]).to eq("job_#{build.id}")
+ end
+ end
+ end
+
+ it 'has correct values for the custom attributes' do
+ aggregate_failures do
+ expect(payload[:namespace_id]).to eq(namespace.id.to_s)
+ expect(payload[:namespace_path]).to eq(namespace.full_path)
+ expect(payload[:project_id]).to eq(project.id.to_s)
+ expect(payload[:project_path]).to eq(project.full_path)
+ expect(payload[:user_id]).to eq(user.id.to_s)
+ expect(payload[:user_email]).to eq(user.email)
+ expect(payload[:user_login]).to eq(user.username)
+ expect(payload[:pipeline_id]).to eq(pipeline.id.to_s)
+ expect(payload[:job_id]).to eq(build.id.to_s)
+ expect(payload[:ref]).to eq(pipeline.source_ref)
+ end
+ end
+
+ it 'skips user related custom attributes if build has no user assigned' do
+ allow(build).to receive(:user).and_return(nil)
+
+ expect { payload }.not_to raise_error
+ end
+
+ describe 'ref type' do
+ context 'branches' do
+ it 'is "branch"' do
+ expect(payload[:ref_type]).to eq('branch')
+ end
+ end
+
+ context 'tags' do
+ let(:build) { build_stubbed(:ci_build, :on_tag, project: project) }
+
+ it 'is "tag"' do
+ expect(payload[:ref_type]).to eq('tag')
+ end
+ end
+
+ context 'merge requests' do
+ let(:pipeline) { build_stubbed(:ci_pipeline, :detached_merge_request_pipeline) }
+
+ it 'is "branch"' do
+ expect(payload[:ref_type]).to eq('branch')
+ end
+ end
+ end
+
+ describe 'ref_protected' do
+ it 'is false when ref is not protected' do
+ expect(build).to receive(:protected).and_return(false)
+
+ expect(payload[:ref_protected]).to eq('false')
+ end
+
+ it 'is true when ref is protected' do
+ expect(build).to receive(:protected).and_return(true)
+
+ expect(payload[:ref_protected]).to eq('true')
+ end
+ end
+ end
+
+ describe '.for_build' do
+ let(:rsa_key) { OpenSSL::PKey::RSA.new(Rails.application.secrets.openid_connect_signing_key) }
+
+ subject(:jwt) { described_class.for_build(build) }
+
+ it 'generates JWT with key id' do
+ _payload, headers = JWT.decode(jwt, rsa_key.public_key, true, { algorithm: 'RS256' })
+
+ expect(headers['kid']).to eq(rsa_key.public_key.to_jwk['kid'])
+ end
+
+ it 'generates JWT for the given job with ttl equal to build timeout' do
+ expect(build).to receive(:metadata_timeout).and_return(3_600)
+
+ payload, _headers = JWT.decode(jwt, rsa_key.public_key, true, { algorithm: 'RS256' })
+ ttl = payload["exp"] - payload["iat"]
+
+ expect(ttl).to eq(3_600)
+ end
+
+ it 'generates JWT for the given job with default ttl if build timeout is not set' do
+ expect(build).to receive(:metadata_timeout).and_return(nil)
+
+ payload, _headers = JWT.decode(jwt, rsa_key.public_key, true, { algorithm: 'RS256' })
+ ttl = payload["exp"] - payload["iat"]
+
+ expect(ttl).to eq(5.minutes.to_i)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/status/bridge/factory_spec.rb b/spec/lib/gitlab/ci/status/bridge/factory_spec.rb
new file mode 100644
index 00000000000..1f417781988
--- /dev/null
+++ b/spec/lib/gitlab/ci/status/bridge/factory_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Status::Bridge::Factory do
+ let(:user) { create(:user) }
+ let(:project) { bridge.project }
+ let(:status) { factory.fabricate! }
+ let(:factory) { described_class.new(bridge, user) }
+
+ before do
+ stub_not_protect_default_branch
+
+ project.add_developer(user)
+ end
+
+ context 'when bridge is created' do
+ let(:bridge) { create(:ci_bridge) }
+
+ it 'matches correct core status' do
+ expect(factory.core_status).to be_a Gitlab::Ci::Status::Created
+ end
+
+ it 'fabricates status with correct details' do
+ expect(status.text).to eq s_('CiStatusText|created')
+ expect(status.icon).to eq 'status_created'
+ expect(status.favicon).to eq 'favicon_status_created'
+ expect(status.label).to be_nil
+ expect(status).not_to have_details
+ expect(status).not_to have_action
+ end
+ end
+
+ context 'when bridge is failed' do
+ let(:bridge) { create(:ci_bridge, :failed) }
+
+ it 'matches correct core status' do
+ expect(factory.core_status).to be_a Gitlab::Ci::Status::Failed
+ end
+
+ it 'matches correct extended statuses' do
+ expect(factory.extended_statuses)
+ .to eq [Gitlab::Ci::Status::Bridge::Failed]
+ end
+
+ it 'fabricates a failed bridge status' do
+ expect(status).to be_a Gitlab::Ci::Status::Bridge::Failed
+ end
+
+ it 'fabricates status with correct details' do
+ expect(status.text).to eq s_('CiStatusText|failed')
+ expect(status.icon).to eq 'status_failed'
+ expect(status.favicon).to eq 'favicon_status_failed'
+ expect(status.label).to be_nil
+ expect(status.status_tooltip).to eq "#{s_('CiStatusText|failed')} - (unknown failure)"
+ expect(status).not_to have_details
+ expect(status).not_to have_action
+ end
+
+ context 'failed with downstream_pipeline_creation_failed' do
+ before do
+ bridge.failure_reason = 'downstream_pipeline_creation_failed'
+ end
+
+ it 'fabricates correct status_tooltip' do
+ expect(status.status_tooltip).to eq(
+ "#{s_('CiStatusText|failed')} - (downstream pipeline can not be created)"
+ )
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/current_settings_spec.rb b/spec/lib/gitlab/current_settings_spec.rb
index adbd7eabd18..bfd9980ee9c 100644
--- a/spec/lib/gitlab/current_settings_spec.rb
+++ b/spec/lib/gitlab/current_settings_spec.rb
@@ -49,20 +49,16 @@ describe Gitlab::CurrentSettings do
end
end
- context 'with DB unavailable' do
- context 'and settings in cache' do
- include_context 'with settings in cache'
-
- it 'fetches the settings from cache without issuing any query' do
- expect(ActiveRecord::QueryRecorder.new { described_class.current_application_settings }.count).to eq(0)
- end
+ context 'in a Rake task with DB unavailable' do
+ before do
+ allow(Gitlab::Runtime).to receive(:rake?).and_return(true)
+ # For some reason, `allow(described_class).to receive(:connect_to_db?).and_return(false)` causes issues
+ # during the initialization phase of the test suite, so instead let's mock the internals of it
+ allow(ActiveRecord::Base.connection).to receive(:active?).and_return(false)
end
context 'and no settings in cache' do
before do
- # For some reason, `allow(described_class).to receive(:connect_to_db?).and_return(false)` causes issues
- # during the initialization phase of the test suite, so instead let's mock the internals of it
- allow(ActiveRecord::Base.connection).to receive(:active?).and_return(false)
expect(ApplicationSetting).not_to receive(:current)
end
@@ -185,17 +181,6 @@ describe Gitlab::CurrentSettings do
expect(described_class.current_application_settings).to eq(:current_settings)
end
end
-
- context 'when the application_settings table does not exist' do
- it 'returns a FakeApplicationSettings object' do
- expect(Gitlab::Database)
- .to receive(:cached_table_exists?)
- .with('application_settings')
- .and_return(false)
-
- expect(described_class.current_application_settings).to be_a(Gitlab::FakeApplicationSettings)
- end
- end
end
end
end
diff --git a/spec/lib/gitlab/cycle_analytics/group_stage_summary_spec.rb b/spec/lib/gitlab/cycle_analytics/group_stage_summary_spec.rb
index 664009f140f..2242895f8ea 100644
--- a/spec/lib/gitlab/cycle_analytics/group_stage_summary_spec.rb
+++ b/spec/lib/gitlab/cycle_analytics/group_stage_summary_spec.rb
@@ -20,7 +20,7 @@ describe Gitlab::CycleAnalytics::GroupStageSummary do
end
it "finds the number of issues created after it" do
- expect(subject.first[:value]).to eq(2)
+ expect(subject.first[:value]).to eq('2')
end
context 'with subgroups' do
@@ -29,7 +29,7 @@ describe Gitlab::CycleAnalytics::GroupStageSummary do
end
it "finds issues from them" do
- expect(subject.first[:value]).to eq(3)
+ expect(subject.first[:value]).to eq('3')
end
end
@@ -41,7 +41,7 @@ describe Gitlab::CycleAnalytics::GroupStageSummary do
subject { described_class.new(group, options: { from: Time.now, current_user: user, projects: [project.id, project_2.id] }).data }
it 'finds issues from those projects' do
- expect(subject.first[:value]).to eq(2)
+ expect(subject.first[:value]).to eq('2')
end
end
@@ -49,7 +49,7 @@ describe Gitlab::CycleAnalytics::GroupStageSummary do
subject { described_class.new(group, options: { from: 10.days.ago, to: Time.now, current_user: user }).data }
it 'finds issues from 5 days ago' do
- expect(subject.first[:value]).to eq(2)
+ expect(subject.first[:value]).to eq('2')
end
end
end
@@ -62,7 +62,7 @@ describe Gitlab::CycleAnalytics::GroupStageSummary do
end
it "doesn't find issues from them" do
- expect(subject.first[:value]).to eq(2)
+ expect(subject.first[:value]).to eq('2')
end
end
end
@@ -77,7 +77,7 @@ describe Gitlab::CycleAnalytics::GroupStageSummary do
end
it "finds the number of deploys made created after it" do
- expect(subject.second[:value]).to eq(2)
+ expect(subject.second[:value]).to eq('2')
end
context 'with subgroups' do
@@ -88,7 +88,7 @@ describe Gitlab::CycleAnalytics::GroupStageSummary do
end
it "finds deploys from them" do
- expect(subject.second[:value]).to eq(3)
+ expect(subject.second[:value]).to eq('3')
end
end
@@ -102,7 +102,7 @@ describe Gitlab::CycleAnalytics::GroupStageSummary do
subject { described_class.new(group, options: { from: Time.now, current_user: user, projects: [project.id, project_2.id] }).data }
it 'shows deploys from those projects' do
- expect(subject.second[:value]).to eq(2)
+ expect(subject.second[:value]).to eq('2')
end
end
@@ -110,7 +110,7 @@ describe Gitlab::CycleAnalytics::GroupStageSummary do
subject { described_class.new(group, options: { from: 10.days.ago, to: Time.now, current_user: user }).data }
it 'finds deployments from 5 days ago' do
- expect(subject.second[:value]).to eq(2)
+ expect(subject.second[:value]).to eq('2')
end
end
end
@@ -123,7 +123,53 @@ describe Gitlab::CycleAnalytics::GroupStageSummary do
end
it "doesn't find deploys from them" do
- expect(subject.second[:value]).to eq(0)
+ expect(subject.second[:value]).to eq('-')
+ end
+ end
+ end
+
+ describe '#deployment_frequency' do
+ let(:from) { 6.days.ago }
+ let(:to) { nil }
+
+ subject do
+ described_class.new(group, options: {
+ from: from,
+ to: to,
+ current_user: user
+ }).data.third
+ end
+
+ it 'includes the unit: `per day`' do
+ expect(subject[:unit]).to eq(_('per day'))
+ end
+
+ before do
+ Timecop.freeze(5.days.ago) do
+ create(:deployment, :success, project: project)
+ end
+ end
+
+ context 'when `to` is nil' do
+ it 'includes range until now' do
+ # 1 deployment over 7 days
+ expect(subject[:value]).to eq('0.1')
+ end
+ end
+
+ context 'when `to` is given' do
+ let(:from) { 10.days.ago }
+ let(:to) { 10.days.from_now }
+
+ before do
+ Timecop.freeze(5.days.from_now) do
+ create(:deployment, :success, project: project)
+ end
+ end
+
+ it 'returns deployment frequency within `from` and `to` range' do
+ # 2 deployments over 20 days
+ expect(subject[:value]).to eq('0.1')
end
end
end
diff --git a/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb b/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb
index 94edef20296..a86278871ff 100644
--- a/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb
+++ b/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb
@@ -20,13 +20,13 @@ describe Gitlab::CycleAnalytics::StageSummary do
Timecop.freeze(5.days.ago) { create(:issue, project: project) }
Timecop.freeze(5.days.from_now) { create(:issue, project: project) }
- expect(subject).to eq(1)
+ expect(subject).to eq('1')
end
it "doesn't find issues from other projects" do
Timecop.freeze(5.days.from_now) { create(:issue, project: create(:project)) }
- expect(subject).to eq(0)
+ expect(subject).to eq('-')
end
context 'when `to` parameter is given' do
@@ -38,14 +38,14 @@ describe Gitlab::CycleAnalytics::StageSummary do
it "doesn't find any record" do
options[:to] = Time.now
- expect(subject).to eq(0)
+ expect(subject).to eq('-')
end
it "finds records created between `from` and `to` range" do
options[:from] = 10.days.ago
options[:to] = 10.days.from_now
- expect(subject).to eq(2)
+ expect(subject).to eq('2')
end
end
end
@@ -57,19 +57,19 @@ describe Gitlab::CycleAnalytics::StageSummary do
Timecop.freeze(5.days.ago) { create_commit("Test message", project, user, 'master') }
Timecop.freeze(5.days.from_now) { create_commit("Test message", project, user, 'master') }
- expect(subject).to eq(1)
+ expect(subject).to eq('1')
end
it "doesn't find commits from other projects" do
Timecop.freeze(5.days.from_now) { create_commit("Test message", create(:project, :repository), user, 'master') }
- expect(subject).to eq(0)
+ expect(subject).to eq('-')
end
- it "finds a large (> 100) snumber of commits if present" do
+ it "finds a large (> 100) number of commits if present" do
Timecop.freeze(5.days.from_now) { create_commit("Test message", project, user, 'master', count: 100) }
- expect(subject).to eq(100)
+ expect(subject).to eq('100')
end
context 'when `to` parameter is given' do
@@ -81,14 +81,14 @@ describe Gitlab::CycleAnalytics::StageSummary do
it "doesn't find any record" do
options[:to] = Time.now
- expect(subject).to eq(0)
+ expect(subject).to eq('-')
end
it "finds records created between `from` and `to` range" do
options[:from] = 10.days.ago
options[:to] = 10.days.from_now
- expect(subject).to eq(2)
+ expect(subject).to eq('2')
end
end
@@ -118,7 +118,7 @@ describe Gitlab::CycleAnalytics::StageSummary do
Timecop.freeze(5.days.ago) { create(:deployment, :success, project: project) }
Timecop.freeze(5.days.from_now) { create(:deployment, :success, project: project) }
- expect(subject).to eq(1)
+ expect(subject).to eq('1')
end
it "doesn't find commits from other projects" do
@@ -126,7 +126,7 @@ describe Gitlab::CycleAnalytics::StageSummary do
create(:deployment, :success, project: create(:project, :repository))
end
- expect(subject).to eq(0)
+ expect(subject).to eq('-')
end
context 'when `to` parameter is given' do
@@ -138,14 +138,76 @@ describe Gitlab::CycleAnalytics::StageSummary do
it "doesn't find any record" do
options[:to] = Time.now
- expect(subject).to eq(0)
+ expect(subject).to eq('-')
end
it "finds records created between `from` and `to` range" do
options[:from] = 10.days.ago
options[:to] = 10.days.from_now
- expect(subject).to eq(2)
+ expect(subject).to eq('2')
+ end
+ end
+ end
+
+ describe '#deployment_frequency' do
+ subject { stage_summary.fourth[:value] }
+
+ it 'includes the unit: `per day`' do
+ expect(stage_summary.fourth[:unit]).to eq _('per day')
+ end
+
+ before do
+ Timecop.freeze(5.days.ago) { create(:deployment, :success, project: project) }
+ end
+
+ it 'returns 0.0 when there were deploys but the frequency was too low' do
+ options[:from] = 30.days.ago
+
+ # 1 deployment over 30 days
+ # frequency of 0.03, rounded off to 0.0
+ expect(subject).to eq('0')
+ end
+
+ it 'returns `-` when there were no deploys' do
+ options[:from] = 4.days.ago
+
+ # 0 deployment in the last 4 days
+ expect(subject).to eq('-')
+ end
+
+ context 'when `to` is nil' do
+ it 'includes range until now' do
+ options[:from] = 6.days.ago
+ options[:to] = nil
+
+ # 1 deployment over 7 days
+ expect(subject).to eq('0.1')
+ end
+ end
+
+ context 'when `to` is given' do
+ before do
+ Timecop.freeze(5.days.from_now) { create(:deployment, :success, project: project) }
+ end
+
+ it 'finds records created between `from` and `to` range' do
+ options[:from] = 10.days.ago
+ options[:to] = 10.days.from_now
+
+ # 2 deployments over 20 days
+ expect(subject).to eq('0.1')
+ end
+
+ context 'when `from` and `to` are within a day' do
+ it 'returns the number of deployments made on that day' do
+ Timecop.freeze(Time.now) do
+ create(:deployment, :success, project: project)
+ options[:from] = options[:to] = Time.now
+
+ expect(subject).to eq('1')
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/data_builder/pipeline_spec.rb b/spec/lib/gitlab/data_builder/pipeline_spec.rb
index da22da8de0f..519f5873d75 100644
--- a/spec/lib/gitlab/data_builder/pipeline_spec.rb
+++ b/spec/lib/gitlab/data_builder/pipeline_spec.rb
@@ -83,7 +83,7 @@ describe Gitlab::DataBuilder::Pipeline do
expect(merge_request_attrs[:target_branch]).to eq(merge_request.target_branch)
expect(merge_request_attrs[:target_project_id]).to eq(merge_request.target_project_id)
expect(merge_request_attrs[:state]).to eq(merge_request.state)
- expect(merge_request_attrs[:merge_status]).to eq(merge_request.merge_status)
+ expect(merge_request_attrs[:merge_status]).to eq(merge_request.public_merge_status)
expect(merge_request_attrs[:url]).to eq("http://localhost/#{merge_request.target_project.full_path}/-/merge_requests/#{merge_request.iid}")
end
end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 3db9320c021..3a0148615b9 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -215,6 +215,7 @@ describe Gitlab::Database::MigrationHelpers do
context 'ON DELETE statements' do
context 'on_delete: :nullify' do
it 'appends ON DELETE SET NULL statement' do
+ expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:execute).with(/statement_timeout/)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
@@ -230,6 +231,7 @@ describe Gitlab::Database::MigrationHelpers do
context 'on_delete: :cascade' do
it 'appends ON DELETE CASCADE statement' do
+ expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:execute).with(/statement_timeout/)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
@@ -245,6 +247,7 @@ describe Gitlab::Database::MigrationHelpers do
context 'on_delete: nil' do
it 'appends no ON DELETE statement' do
+ expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:execute).with(/statement_timeout/)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
@@ -261,6 +264,7 @@ describe Gitlab::Database::MigrationHelpers do
context 'when no custom key name is supplied' do
it 'creates a concurrent foreign key and validates it' do
+ expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:execute).with(/statement_timeout/)
expect(model).to receive(:execute).ordered.with(/NOT VALID/)
@@ -287,6 +291,7 @@ describe Gitlab::Database::MigrationHelpers do
context 'when a custom key name is supplied' do
context 'for creating a new foreign key for a column that does not presently exist' do
it 'creates a new foreign key' do
+ expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:execute).with(/statement_timeout/)
expect(model).to receive(:execute).ordered.with(/NOT VALID/)
@@ -314,6 +319,7 @@ describe Gitlab::Database::MigrationHelpers do
context 'when the supplied key name is different from the existing foreign key name' do
it 'creates a new foreign key' do
+ expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:execute).with(/statement_timeout/)
expect(model).to receive(:execute).ordered.with(/NOT VALID/)
diff --git a/spec/lib/gitlab/diff/formatters/text_formatter_spec.rb b/spec/lib/gitlab/diff/formatters/text_formatter_spec.rb
index 33d4994f5db..e275ebef2c9 100644
--- a/spec/lib/gitlab/diff/formatters/text_formatter_spec.rb
+++ b/spec/lib/gitlab/diff/formatters/text_formatter_spec.rb
@@ -9,7 +9,8 @@ describe Gitlab::Diff::Formatters::TextFormatter do
start_sha: 456,
head_sha: 789,
old_path: 'old_path.txt',
- new_path: 'new_path.txt'
+ new_path: 'new_path.txt',
+ line_range: nil
}
end
diff --git a/spec/lib/gitlab/diff/highlight_cache_spec.rb b/spec/lib/gitlab/diff/highlight_cache_spec.rb
index a16e5e185bb..3c128aad976 100644
--- a/spec/lib/gitlab/diff/highlight_cache_spec.rb
+++ b/spec/lib/gitlab/diff/highlight_cache_spec.rb
@@ -113,7 +113,7 @@ describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do
allow(redis).to receive(:info).and_return({ "redis_version" => "3.0.0" })
expect(described_class.gitlab_redis_diff_caching_memory_usage_bytes)
- .not_to receive(:observe).and_call_original
+ .not_to receive(:observe)
cache.send(:write_to_redis_hash, diff_hash)
end
@@ -163,6 +163,56 @@ describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do
end
end
+ describe "GZip usage" do
+ let(:diff_file) do
+ diffs = merge_request.diffs
+ raw_diff = diffs.diffable.raw_diffs(diffs.diff_options.merge(paths: ['CHANGELOG'])).first
+ Gitlab::Diff::File.new(raw_diff,
+ repository: diffs.project.repository,
+ diff_refs: diffs.diff_refs,
+ fallback_diff_refs: diffs.fallback_diff_refs)
+ end
+
+ context "feature flag :gzip_diff_cache disabled" do
+ before do
+ stub_feature_flags(gzip_diff_cache: true)
+ end
+
+ it "uses ActiveSupport::Gzip when reading from the cache" do
+ expect(ActiveSupport::Gzip).to receive(:decompress).at_least(:once).and_call_original
+
+ cache.write_if_empty
+ cache.decorate(diff_file)
+ end
+
+ it "uses ActiveSupport::Gzip to compress data when writing to cache" do
+ expect(ActiveSupport::Gzip).to receive(:compress).and_call_original
+
+ cache.send(:write_to_redis_hash, diff_hash)
+ end
+ end
+
+ context "feature flag :gzip_diff_cache disabled" do
+ before do
+ stub_feature_flags(gzip_diff_cache: false)
+ end
+
+ it "doesn't use ActiveSupport::Gzip when reading from the cache" do
+ expect(ActiveSupport::Gzip).not_to receive(:decompress)
+
+ cache.write_if_empty
+ cache.decorate(diff_file)
+ end
+
+ it "doesn't use ActiveSupport::Gzip to compress data when writing to cache" do
+ expect(ActiveSupport::Gzip).not_to receive(:compress)
+
+ expect { cache.send(:write_to_redis_hash, diff_hash) }
+ .to change { Gitlab::Redis::Cache.with { |r| r.hgetall(cache_key) } }
+ end
+ end
+ end
+
describe 'metrics' do
it 'defines :gitlab_redis_diff_caching_memory_usage_bytes histogram' do
expect(described_class).to respond_to(:gitlab_redis_diff_caching_memory_usage_bytes)
diff --git a/spec/lib/gitlab/diff/position_spec.rb b/spec/lib/gitlab/diff/position_spec.rb
index 4b11ff16c38..a83c0f35d92 100644
--- a/spec/lib/gitlab/diff/position_spec.rb
+++ b/spec/lib/gitlab/diff/position_spec.rb
@@ -28,6 +28,7 @@ describe Gitlab::Diff::Position do
new_path: "files/ruby/popen.rb",
old_line: nil,
new_line: 14,
+ line_range: nil,
base_sha: nil,
head_sha: nil,
start_sha: nil,
diff --git a/spec/lib/gitlab/elasticsearch/logs_spec.rb b/spec/lib/gitlab/elasticsearch/logs/lines_spec.rb
index 6b9d1dbef99..8b6a19fa2c5 100644
--- a/spec/lib/gitlab/elasticsearch/logs_spec.rb
+++ b/spec/lib/gitlab/elasticsearch/logs/lines_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::Elasticsearch::Logs do
+describe Gitlab::Elasticsearch::Logs::Lines do
let(:client) { Elasticsearch::Transport::Client }
let(:es_message_1) { { timestamp: "2019-12-13T14:35:34.034Z", pod: "production-6866bc8974-m4sk4", message: "10.8.2.1 - - [25/Oct/2019:08:03:22 UTC] \"GET / HTTP/1.1\" 200 13" } }
diff --git a/spec/lib/gitlab/elasticsearch/logs/pods_spec.rb b/spec/lib/gitlab/elasticsearch/logs/pods_spec.rb
new file mode 100644
index 00000000000..0a4ab0780c5
--- /dev/null
+++ b/spec/lib/gitlab/elasticsearch/logs/pods_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Elasticsearch::Logs::Pods do
+ let(:client) { Elasticsearch::Transport::Client }
+
+ let(:es_query) { JSON.parse(fixture_file('lib/elasticsearch/pods_query.json'), symbolize_names: true) }
+ let(:es_response) { JSON.parse(fixture_file('lib/elasticsearch/pods_response.json')) }
+ let(:namespace) { "autodevops-deploy-9-production" }
+
+ subject { described_class.new(client) }
+
+ describe '#pods' do
+ it 'returns the pods' do
+ expect(client).to receive(:search).with(body: es_query).and_return(es_response)
+
+ result = subject.pods(namespace)
+ expect(result).to eq([
+ {
+ name: "runner-gitlab-runner-7bbfb5dcb5-p6smb",
+ container_names: %w[runner-gitlab-runner]
+ },
+ {
+ name: "elastic-stack-elasticsearch-master-1",
+ container_names: %w[elasticsearch chown sysctl]
+ },
+ {
+ name: "ingress-nginx-ingress-controller-76449bcc8d-8qgl6",
+ container_names: %w[nginx-ingress-controller]
+ }
+ ])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/file_hook_spec.rb b/spec/lib/gitlab/file_hook_spec.rb
index d184eb483d4..fda3583289b 100644
--- a/spec/lib/gitlab/file_hook_spec.rb
+++ b/spec/lib/gitlab/file_hook_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::FileHook do
- let(:file_hook) { Rails.root.join('plugins', 'test.rb') }
+ let(:file_hook) { Rails.root.join('file_hooks', 'test.rb') }
let(:tmp_file) { Tempfile.new('file_hook-dump') }
let(:file_hook_source) do
diff --git a/spec/lib/gitlab/gitaly_client_spec.rb b/spec/lib/gitlab/gitaly_client_spec.rb
index b03c1feb429..2c6aee58326 100644
--- a/spec/lib/gitlab/gitaly_client_spec.rb
+++ b/spec/lib/gitlab/gitaly_client_spec.rb
@@ -19,6 +19,15 @@ describe Gitlab::GitalyClient do
})
end
+ describe '.query_time', :request_store do
+ it 'increments query times' do
+ subject.query_time += 0.451
+ subject.query_time += 0.322
+
+ expect(subject.query_time).to eq(0.773)
+ end
+ end
+
describe '.long_timeout' do
context 'default case' do
it { expect(subject.long_timeout).to eq(6.hours) }
diff --git a/spec/lib/gitlab/grape_logging/loggers/perf_logger_spec.rb b/spec/lib/gitlab/grape_logging/loggers/perf_logger_spec.rb
index 6f20b8877e0..09ba4b89a1a 100644
--- a/spec/lib/gitlab/grape_logging/loggers/perf_logger_spec.rb
+++ b/spec/lib/gitlab/grape_logging/loggers/perf_logger_spec.rb
@@ -21,7 +21,7 @@ describe Gitlab::GrapeLogging::Loggers::PerfLogger do
payload = subject.parameters(mock_request, nil)
expect(payload[:redis_calls]).to eq(1)
- expect(payload[:redis_duration_ms]).to be >= 0
+ expect(payload[:redis_duration_s]).to be >= 0
end
end
end
diff --git a/spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb b/spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb
index c0762e9892b..17c0659327d 100644
--- a/spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb
+++ b/spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb
@@ -25,11 +25,11 @@ describe Gitlab::GrapeLogging::Loggers::QueueDurationLogger do
)
end
- it 'returns the correct duration in ms' do
+ it 'returns the correct duration in seconds' do
Timecop.freeze(start_time) do
subject.before
- expect(subject.parameters(mock_request, nil)).to eq( { 'queue_duration': 1.hour.to_f * 1000 })
+ expect(subject.parameters(mock_request, nil)).to eq( { 'queue_duration_s': 1.hour.to_f })
end
end
end
diff --git a/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/group/legacy_tree_restorer_spec.rb
index fc7e4737d13..3030cdf4cf8 100644
--- a/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/group/legacy_tree_restorer_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::ImportExport::Group::TreeRestorer do
+describe Gitlab::ImportExport::Group::LegacyTreeRestorer do
include ImportExport::CommonUtil
let(:shared) { Gitlab::ImportExport::Shared.new(group) }
diff --git a/spec/lib/gitlab/import_export/project/import_task_spec.rb b/spec/lib/gitlab/import_export/project/import_task_spec.rb
index f7b9cbaa095..4f4fcd3ad8a 100644
--- a/spec/lib/gitlab/import_export/project/import_task_spec.rb
+++ b/spec/lib/gitlab/import_export/project/import_task_spec.rb
@@ -2,7 +2,7 @@
require 'rake_helper'
-describe Gitlab::ImportExport::Project::ImportTask do
+describe Gitlab::ImportExport::Project::ImportTask, :request_store do
let(:username) { 'root' }
let(:namespace_path) { username }
let!(:user) { create(:user, username: username) }
diff --git a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
index 80ae9a08257..04e8bd05666 100644
--- a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
@@ -6,7 +6,7 @@ def match_mr1_note(content_regex)
MergeRequest.find_by(title: 'MR1').notes.select { |n| n.note.match(/#{content_regex}/)}.first
end
-describe Gitlab::ImportExport::Project::TreeRestorer, quarantine: { flaky: 'https://gitlab.com/gitlab-org/gitlab/-/issues/213793' } do
+describe Gitlab::ImportExport::Project::TreeRestorer do
include ImportExport::CommonUtil
let(:shared) { project.import_export_shared }
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index 55b907fff7c..88d7fdaef36 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -595,6 +595,7 @@ ProjectFeature:
- builds_access_level
- repository_access_level
- pages_access_level
+- metrics_dashboard_access_level
- created_at
- updated_at
ProtectedBranch::MergeAccessLevel:
@@ -811,6 +812,7 @@ ContainerExpirationPolicy:
- next_run_at
- project_id
- name_regex
+- name_regex_keep
- cadence
- older_than
- keep_n
diff --git a/spec/lib/gitlab/instrumentation_helper_spec.rb b/spec/lib/gitlab/instrumentation_helper_spec.rb
index 9788c9f4a3c..858fa044a52 100644
--- a/spec/lib/gitlab/instrumentation_helper_spec.rb
+++ b/spec/lib/gitlab/instrumentation_helper_spec.rb
@@ -26,7 +26,7 @@ describe Gitlab::InstrumentationHelper do
subject
expect(payload[:gitaly_calls]).to eq(1)
- expect(payload[:gitaly_duration]).to be >= 0
+ expect(payload[:gitaly_duration_s]).to be >= 0
expect(payload[:redis_calls]).to be_nil
expect(payload[:redis_duration_ms]).to be_nil
end
@@ -39,7 +39,7 @@ describe Gitlab::InstrumentationHelper do
subject
expect(payload[:redis_calls]).to eq(1)
- expect(payload[:redis_duration_ms]).to be >= 0
+ expect(payload[:redis_duration_s]).to be >= 0
expect(payload[:gitaly_calls]).to be_nil
expect(payload[:gitaly_duration]).to be_nil
end
@@ -49,12 +49,12 @@ describe Gitlab::InstrumentationHelper do
describe '.queue_duration_for_job' do
where(:enqueued_at, :created_at, :time_now, :expected_duration) do
"2019-06-01T00:00:00.000+0000" | nil | "2019-06-01T02:00:00.000+0000" | 2.hours.to_f
- "2019-06-01T02:00:00.000+0000" | nil | "2019-06-01T02:00:00.001+0000" | 0.001
+ "2019-06-01T02:00:00.000+0000" | nil | "2019-06-01T02:00:00.001+0000" | 0.0
"2019-06-01T02:00:00.000+0000" | "2019-05-01T02:00:00.000+0000" | "2019-06-01T02:00:01.000+0000" | 1
- nil | "2019-06-01T02:00:00.000+0000" | "2019-06-01T02:00:00.001+0000" | 0.001
+ nil | "2019-06-01T02:00:00.000+0000" | "2019-06-01T02:00:00.001+0000" | 0.0
nil | nil | "2019-06-01T02:00:00.001+0000" | nil
"2019-06-01T02:00:00.000+0200" | nil | "2019-06-01T02:00:00.000-0200" | 4.hours.to_f
- 1571825569.998168 | nil | "2019-10-23T12:13:16.000+0200" | 26.001832
+ 1571825569.998168 | nil | "2019-10-23T12:13:16.000+0200" | 26.00
1571825569 | nil | "2019-10-23T12:13:16.000+0200" | 27
"invalid_date" | nil | "2019-10-23T12:13:16.000+0200" | nil
"" | nil | "2019-10-23T12:13:16.000+0200" | nil
diff --git a/spec/lib/gitlab/json_spec.rb b/spec/lib/gitlab/json_spec.rb
new file mode 100644
index 00000000000..5186ab041da
--- /dev/null
+++ b/spec/lib/gitlab/json_spec.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Gitlab::Json do
+ describe ".parse" do
+ it "parses an object" do
+ expect(subject.parse('{ "foo": "bar" }')).to eq({ "foo" => "bar" })
+ end
+
+ it "parses an array" do
+ expect(subject.parse('[{ "foo": "bar" }]')).to eq([{ "foo" => "bar" }])
+ end
+
+ it "raises an error on a string" do
+ expect { subject.parse('"foo"') }.to raise_error(JSON::ParserError)
+ end
+
+ it "raises an error on a true bool" do
+ expect { subject.parse("true") }.to raise_error(JSON::ParserError)
+ end
+
+ it "raises an error on a false bool" do
+ expect { subject.parse("false") }.to raise_error(JSON::ParserError)
+ end
+ end
+
+ describe ".parse!" do
+ it "parses an object" do
+ expect(subject.parse!('{ "foo": "bar" }')).to eq({ "foo" => "bar" })
+ end
+
+ it "parses an array" do
+ expect(subject.parse!('[{ "foo": "bar" }]')).to eq([{ "foo" => "bar" }])
+ end
+
+ it "raises an error on a string" do
+ expect { subject.parse!('"foo"') }.to raise_error(JSON::ParserError)
+ end
+
+ it "raises an error on a true bool" do
+ expect { subject.parse!("true") }.to raise_error(JSON::ParserError)
+ end
+
+ it "raises an error on a false bool" do
+ expect { subject.parse!("false") }.to raise_error(JSON::ParserError)
+ end
+ end
+
+ describe ".dump" do
+ it "dumps an object" do
+ expect(subject.dump({ "foo" => "bar" })).to eq('{"foo":"bar"}')
+ end
+
+ it "dumps an array" do
+ expect(subject.dump([{ "foo" => "bar" }])).to eq('[{"foo":"bar"}]')
+ end
+
+ it "dumps a string" do
+ expect(subject.dump("foo")).to eq('"foo"')
+ end
+
+ it "dumps a true bool" do
+ expect(subject.dump(true)).to eq("true")
+ end
+
+ it "dumps a false bool" do
+ expect(subject.dump(false)).to eq("false")
+ end
+ end
+
+ describe ".generate" do
+ it "delegates to the adapter" do
+ args = [{ foo: "bar" }]
+
+ expect(JSON).to receive(:generate).with(*args)
+
+ subject.generate(*args)
+ end
+ end
+
+ describe ".pretty_generate" do
+ it "delegates to the adapter" do
+ args = [{ foo: "bar" }]
+
+ expect(JSON).to receive(:pretty_generate).with(*args)
+
+ subject.pretty_generate(*args)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/kubernetes/helm/base_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/base_command_spec.rb
index c59078449b8..a11a9d08503 100644
--- a/spec/lib/gitlab/kubernetes/helm/base_command_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/base_command_spec.rb
@@ -61,4 +61,56 @@ describe Gitlab::Kubernetes::Helm::BaseCommand do
it { is_expected.to eq('install-test-class-name') }
end
+
+ describe '#service_account_resource' do
+ let(:resource) do
+ Kubeclient::Resource.new(metadata: { name: 'tiller', namespace: 'gitlab-managed-apps' })
+ end
+
+ subject { base_command.service_account_resource }
+
+ context 'rbac is enabled' do
+ let(:rbac) { true }
+
+ it 'generates a Kubeclient resource for the tiller ServiceAccount' do
+ is_expected.to eq(resource)
+ end
+ end
+
+ context 'rbac is not enabled' do
+ let(:rbac) { false }
+
+ it 'generates nothing' do
+ is_expected.to be_nil
+ end
+ end
+ end
+
+ describe '#cluster_role_binding_resource' do
+ let(:resource) do
+ Kubeclient::Resource.new(
+ metadata: { name: 'tiller-admin' },
+ roleRef: { apiGroup: 'rbac.authorization.k8s.io', kind: 'ClusterRole', name: 'cluster-admin' },
+ subjects: [{ kind: 'ServiceAccount', name: 'tiller', namespace: 'gitlab-managed-apps' }]
+ )
+ end
+
+ subject { base_command.cluster_role_binding_resource }
+
+ context 'rbac is enabled' do
+ let(:rbac) { true }
+
+ it 'generates a Kubeclient resource for the ClusterRoleBinding for tiller' do
+ is_expected.to eq(resource)
+ end
+ end
+
+ context 'rbac is not enabled' do
+ let(:rbac) { false }
+
+ it 'generates nothing' do
+ is_expected.to be_nil
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/kubernetes/helm/init_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/init_command_spec.rb
index f87ceb45766..13021a08f9f 100644
--- a/spec/lib/gitlab/kubernetes/helm/init_command_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/init_command_spec.rb
@@ -83,56 +83,4 @@ describe Gitlab::Kubernetes::Helm::InitCommand do
end
end
end
-
- describe '#service_account_resource' do
- let(:resource) do
- Kubeclient::Resource.new(metadata: { name: 'tiller', namespace: 'gitlab-managed-apps' })
- end
-
- subject { init_command.service_account_resource }
-
- context 'rbac is enabled' do
- let(:rbac) { true }
-
- it 'generates a Kubeclient resource for the tiller ServiceAccount' do
- is_expected.to eq(resource)
- end
- end
-
- context 'rbac is not enabled' do
- let(:rbac) { false }
-
- it 'generates nothing' do
- is_expected.to be_nil
- end
- end
- end
-
- describe '#cluster_role_binding_resource' do
- let(:resource) do
- Kubeclient::Resource.new(
- metadata: { name: 'tiller-admin' },
- roleRef: { apiGroup: 'rbac.authorization.k8s.io', kind: 'ClusterRole', name: 'cluster-admin' },
- subjects: [{ kind: 'ServiceAccount', name: 'tiller', namespace: 'gitlab-managed-apps' }]
- )
- end
-
- subject { init_command.cluster_role_binding_resource }
-
- context 'rbac is enabled' do
- let(:rbac) { true }
-
- it 'generates a Kubeclient resource for the ClusterRoleBinding for tiller' do
- is_expected.to eq(resource)
- end
- end
-
- context 'rbac is not enabled' do
- let(:rbac) { false }
-
- it 'generates nothing' do
- is_expected.to be_nil
- end
- end
- end
end
diff --git a/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb
index f94ceae362a..a5ed8f57bf3 100644
--- a/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb
@@ -305,20 +305,4 @@ describe Gitlab::Kubernetes::Helm::InstallCommand do
is_expected.to eq(resource)
end
end
-
- describe '#service_account_resource' do
- subject { install_command.service_account_resource }
-
- it 'returns nothing' do
- is_expected.to be_nil
- end
- end
-
- describe '#cluster_role_binding_resource' do
- subject { install_command.cluster_role_binding_resource }
-
- it 'returns nothing' do
- is_expected.to be_nil
- end
- end
end
diff --git a/spec/lib/gitlab/kubernetes/helm/patch_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/patch_command_spec.rb
index 064efebdb96..e69570f5371 100644
--- a/spec/lib/gitlab/kubernetes/helm/patch_command_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/patch_command_spec.rb
@@ -199,20 +199,4 @@ describe Gitlab::Kubernetes::Helm::PatchCommand do
is_expected.to eq(resource)
end
end
-
- describe '#service_account_resource' do
- subject { patch_command.service_account_resource }
-
- it 'returns nothing' do
- is_expected.to be_nil
- end
- end
-
- describe '#cluster_role_binding_resource' do
- subject { patch_command.cluster_role_binding_resource }
-
- it 'returns nothing' do
- is_expected.to be_nil
- end
- end
end
diff --git a/spec/lib/gitlab/project_template_spec.rb b/spec/lib/gitlab/project_template_spec.rb
index ddc41e64147..aa18a1a843c 100644
--- a/spec/lib/gitlab/project_template_spec.rb
+++ b/spec/lib/gitlab/project_template_spec.rb
@@ -19,6 +19,7 @@ describe Gitlab::ProjectTemplate do
described_class.new('plainhtml', 'Pages/Plain HTML', 'Everything you need to get started using a plain HTML Pages site.', 'https://gitlab.com/pages/plain-html'),
described_class.new('gitbook', 'Pages/GitBook', 'Everything you need to get started using a GitBook Pages site.', 'https://gitlab.com/pages/gitbook'),
described_class.new('hexo', 'Pages/Hexo', 'Everything you need to get started using a Hexo Pages site.', 'https://gitlab.com/pages/hexo'),
+ described_class.new('sse_middleman', 'Static Site Editor/Middleman', _('Middleman project with Static Site Editor support'), 'https://gitlab.com/gitlab-org/project-templates/static-site-editor-middleman'),
described_class.new('nfhugo', 'Netlify/Hugo', _('A Hugo site that uses Netlify for CI/CD instead of GitLab, but still with all the other great GitLab features.'), 'https://gitlab.com/pages/nfhugo'),
described_class.new('nfjekyll', 'Netlify/Jekyll', _('A Jekyll site that uses Netlify for CI/CD instead of GitLab, but still with all the other great GitLab features.'), 'https://gitlab.com/pages/nfjekyll'),
described_class.new('nfplainhtml', 'Netlify/Plain HTML', _('A plain HTML site that uses Netlify for CI/CD instead of GitLab, but still with all the other great GitLab features.'), 'https://gitlab.com/pages/nfplain-html'),
diff --git a/spec/lib/gitlab/prometheus/adapter_spec.rb b/spec/lib/gitlab/prometheus/adapter_spec.rb
index 202bf65f92b..afee95467fa 100644
--- a/spec/lib/gitlab/prometheus/adapter_spec.rb
+++ b/spec/lib/gitlab/prometheus/adapter_spec.rb
@@ -19,6 +19,14 @@ describe Gitlab::Prometheus::Adapter do
it 'return prometheus service as prometheus adapter' do
expect(subject.prometheus_adapter).to eq(prometheus_service)
end
+
+ context 'with cluster with prometheus available' do
+ let!(:prometheus) { create(:clusters_applications_prometheus, :installed, cluster: cluster) }
+
+ it 'returns prometheus service' do
+ expect(subject.prometheus_adapter).to eq(prometheus_service)
+ end
+ end
end
context "prometheus service can't execute queries" do
diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
index db7c5f771b7..f4b939c3013 100644
--- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
@@ -42,11 +42,10 @@ describe Gitlab::SidekiqLogging::StructuredLogger do
start_payload.merge(
'message' => 'TestWorker JID-da883554ee4fe414012f5f42: done: 0.0 sec',
'job_status' => 'done',
- 'duration' => 0.0,
+ 'duration_s' => 0.0,
'completed_at' => timestamp.to_f,
- 'cpu_s' => 1.111112,
- 'db_duration' => 0,
- 'db_duration_s' => 0
+ 'cpu_s' => 1.11,
+ 'db_duration_s' => 0.0
)
end
let(:exception_payload) do
@@ -160,11 +159,11 @@ describe Gitlab::SidekiqLogging::StructuredLogger do
let(:timing_data) do
{
gitaly_calls: 10,
- gitaly_duration: 10000,
+ gitaly_duration_s: 10000,
rugged_calls: 1,
- rugged_duration_ms: 5000,
+ rugged_duration_s: 5000,
redis_calls: 3,
- redis_duration_ms: 1234
+ redis_duration_s: 1234
}
end
@@ -193,12 +192,11 @@ describe Gitlab::SidekiqLogging::StructuredLogger do
let(:expected_start_payload) { start_payload.except('args') }
let(:expected_end_payload) do
- end_payload.except('args').merge('cpu_s' => a_value > 0)
+ end_payload.except('args').merge('cpu_s' => a_value >= 0)
end
let(:expected_end_payload_with_db) do
expected_end_payload.merge(
- 'db_duration' => a_value >= 100,
'db_duration_s' => a_value >= 0.1
)
end
@@ -226,7 +224,7 @@ describe Gitlab::SidekiqLogging::StructuredLogger do
let(:time) { { duration: 0.1231234, cputime: 1.2342345 } }
let(:payload) { { 'class' => 'my-class', 'message' => 'my-message', 'job_status' => 'my-job-status' } }
let(:current_utc_time) { Time.now.utc }
- let(:payload_with_time_keys) { { 'class' => 'my-class', 'message' => 'my-message', 'job_status' => 'my-job-status', 'duration' => 0.123123, 'cpu_s' => 1.234235, 'completed_at' => current_utc_time.to_f } }
+ let(:payload_with_time_keys) { { 'class' => 'my-class', 'message' => 'my-message', 'job_status' => 'my-job-status', 'duration_s' => 0.12, 'cpu_s' => 1.23, 'completed_at' => current_utc_time.to_f } }
subject { described_class.new }
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/server_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/server_spec.rb
index 0ea248fbcf1..312ebd30a76 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/server_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/server_spec.rb
@@ -21,18 +21,9 @@ describe Gitlab::SidekiqMiddleware::DuplicateJobs::Server, :clean_gitlab_redis_q
end
around do |example|
- Sidekiq::Testing.inline! { example.run }
- end
-
- before(:context) do
- Sidekiq::Testing.server_middleware do |chain|
+ with_sidekiq_server_middleware do |chain|
chain.add described_class
- end
- end
-
- after(:context) do
- Sidekiq::Testing.server_middleware do |chain|
- chain.remove described_class
+ Sidekiq::Testing.inline! { example.run }
end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb b/spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb
index f64ebece930..fdf643a8ad1 100644
--- a/spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb
@@ -41,18 +41,9 @@ describe Gitlab::SidekiqMiddleware::WorkerContext::Server do
end
around do |example|
- Sidekiq::Testing.inline! { example.run }
- end
-
- before(:context) do
- Sidekiq::Testing.server_middleware do |chain|
+ with_sidekiq_server_middleware do |chain|
chain.add described_class
- end
- end
-
- after(:context) do
- Sidekiq::Testing.server_middleware do |chain|
- chain.remove described_class
+ Sidekiq::Testing.inline! { example.run }
end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware_spec.rb b/spec/lib/gitlab/sidekiq_middleware_spec.rb
index 32c1807ba6e..752ec6a0a3f 100644
--- a/spec/lib/gitlab/sidekiq_middleware_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware_spec.rb
@@ -28,11 +28,16 @@ describe Gitlab::SidekiqMiddleware do
# 2) yielding exactly once
describe '.server_configurator' do
around do |example|
- original = Sidekiq::Testing.server_middleware.dup
-
- example.run
-
- Sidekiq::Testing.instance_variable_set :@server_chain, original
+ with_sidekiq_server_middleware do |chain|
+ described_class.server_configurator(
+ metrics: metrics,
+ arguments_logger: arguments_logger,
+ memory_killer: memory_killer,
+ request_store: request_store
+ ).call(chain)
+
+ example.run
+ end
end
let(:middleware_expected_args) { [a_kind_of(worker_class), hash_including({ 'args' => job_args }), anything] }
@@ -54,21 +59,17 @@ describe Gitlab::SidekiqMiddleware do
end
let(:enabled_sidekiq_middlewares) { all_sidekiq_middlewares - disabled_sidekiq_middlewares }
- before do
- Sidekiq::Testing.server_middleware.clear
- Sidekiq::Testing.server_middleware(&described_class.server_configurator(
- metrics: metrics,
- arguments_logger: arguments_logger,
- memory_killer: memory_killer,
- request_store: request_store
- ))
-
- enabled_sidekiq_middlewares.each do |middleware|
- expect_any_instance_of(middleware).to receive(:call).with(*middleware_expected_args).once.and_call_original
- end
+ shared_examples "a server middleware chain" do
+ it "passes through the right server middlewares" do
+ enabled_sidekiq_middlewares.each do |middleware|
+ expect_any_instance_of(middleware).to receive(:call).with(*middleware_expected_args).once.and_call_original
+ end
- disabled_sidekiq_middlewares.each do |middleware|
- expect_any_instance_of(Gitlab::SidekiqMiddleware::ArgumentsLogger).not_to receive(:call)
+ disabled_sidekiq_middlewares.each do |middleware|
+ expect_any_instance_of(middleware).not_to receive(:call)
+ end
+
+ worker_class.perform_async(*job_args)
end
end
@@ -86,9 +87,7 @@ describe Gitlab::SidekiqMiddleware do
]
end
- it "passes through server middlewares" do
- worker_class.perform_async(*job_args)
- end
+ it_behaves_like "a server middleware chain"
end
context "all optional middlewares on" do
@@ -98,9 +97,7 @@ describe Gitlab::SidekiqMiddleware do
let(:request_store) { true }
let(:disabled_sidekiq_middlewares) { [] }
- it "passes through server middlewares" do
- worker_class.perform_async(*job_args)
- end
+ it_behaves_like "a server middleware chain"
context "server metrics" do
let(:gitaly_histogram) { double(:gitaly_histogram) }
diff --git a/spec/lib/gitlab/slash_commands/presenters/issue_show_spec.rb b/spec/lib/gitlab/slash_commands/presenters/issue_show_spec.rb
index 56d6bf1c788..47b9a67f54f 100644
--- a/spec/lib/gitlab/slash_commands/presenters/issue_show_spec.rb
+++ b/spec/lib/gitlab/slash_commands/presenters/issue_show_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
describe Gitlab::SlashCommands::Presenters::IssueShow do
- let(:project) { create(:project) }
+ let(:user) { create(:user, :with_avatar) }
+ let(:project) { create(:project, creator: user) }
let(:issue) { create(:issue, project: project) }
let(:attachment) { subject[:attachments].first }
@@ -15,6 +16,7 @@ describe Gitlab::SlashCommands::Presenters::IssueShow do
expect(subject[:response_type]).to be(:in_channel)
expect(subject).to have_key(:attachments)
expect(attachment[:title]).to start_with(issue.title)
+ expect(attachment[:author_icon]).to eq(user.avatar_url(only_path: false))
end
context 'with upvotes' do
diff --git a/spec/lib/gitlab/utils_spec.rb b/spec/lib/gitlab/utils_spec.rb
index d3780d22241..e34367cbbf9 100644
--- a/spec/lib/gitlab/utils_spec.rb
+++ b/spec/lib/gitlab/utils_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe Gitlab::Utils do
delegate :to_boolean, :boolean_to_yes_no, :slugify, :random_string, :which,
:ensure_array_from_string, :to_exclusive_sentence, :bytes_to_megabytes,
- :append_path, :check_path_traversal!, to: :described_class
+ :append_path, :check_path_traversal!, :ms_to_round_sec, to: :described_class
describe '.check_path_traversal!' do
it 'detects path traversal at the start of the string' do
@@ -55,6 +55,22 @@ describe Gitlab::Utils do
end
end
+ describe '.ms_to_round_sec' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:original, :expected) do
+ 1999.8999 | 2
+ 12384 | 12.38
+ 333 | 0.33
+ end
+
+ with_them do
+ it "returns rounded seconds" do
+ expect(ms_to_round_sec(original)).to eq(expected)
+ end
+ end
+ end
+
describe '.to_exclusive_sentence' do
it 'calls #to_sentence on the array' do
array = double
diff --git a/spec/lib/marginalia_spec.rb b/spec/lib/marginalia_spec.rb
index d4b84c5cdc4..2f446694083 100644
--- a/spec/lib/marginalia_spec.rb
+++ b/spec/lib/marginalia_spec.rb
@@ -24,20 +24,6 @@ describe 'Marginalia spec' do
end
end
- def add_sidekiq_middleware
- # Reference: https://github.com/mperham/sidekiq/wiki/Testing#testing-server-middlewaresidekiq
- # Sidekiq test harness fakes worker without its server middlewares, so include instrumentation to 'Sidekiq::Testing' server middleware.
- Sidekiq::Testing.server_middleware do |chain|
- chain.add Marginalia::SidekiqInstrumentation::Middleware
- end
- end
-
- def remove_sidekiq_middleware
- Sidekiq::Testing.server_middleware do |chain|
- chain.remove Marginalia::SidekiqInstrumentation::Middleware
- end
- end
-
def stub_feature(value)
allow(Gitlab::Marginalia).to receive(:cached_feature_enabled?).and_return(value)
end
@@ -88,20 +74,16 @@ describe 'Marginalia spec' do
end
describe 'for Sidekiq worker jobs' do
- before(:all) do
- add_sidekiq_middleware
-
- # Because of faking, 'Sidekiq.server?' does not work so implicitly set application name which is done in config/initializers/0_marginalia.rb
- Marginalia.application_name = "sidekiq"
+ around do |example|
+ with_sidekiq_server_middleware do |chain|
+ chain.add Marginalia::SidekiqInstrumentation::Middleware
+ Marginalia.application_name = "sidekiq"
+ example.run
+ end
end
after(:all) do
MarginaliaTestJob.clear
- remove_sidekiq_middleware
- end
-
- around do |example|
- Sidekiq::Testing.fake! { example.run }
end
before do
diff --git a/spec/mailers/emails/issues_spec.rb b/spec/mailers/emails/issues_spec.rb
index 5b5bd6f4308..dfd974aa5f3 100644
--- a/spec/mailers/emails/issues_spec.rb
+++ b/spec/mailers/emails/issues_spec.rb
@@ -6,6 +6,12 @@ require 'email_spec'
describe Emails::Issues do
include EmailSpec::Matchers
+ it 'adds email methods to Notify' do
+ subject.instance_methods.each do |email_method|
+ expect(Notify).to be_respond_to(email_method)
+ end
+ end
+
describe "#import_issues_csv_email" do
let(:user) { create(:user) }
let(:project) { create(:project) }
@@ -39,4 +45,47 @@ describe Emails::Issues do
it_behaves_like 'appearance header and footer not enabled'
end
end
+
+ describe '#issues_csv_email' do
+ let(:user) { create(:user) }
+ let(:empty_project) { create(:project, path: 'myproject') }
+ let(:export_status) { { truncated: false, rows_expected: 3, rows_written: 3 } }
+ let(:attachment) { subject.attachments.first }
+
+ subject { Notify.issues_csv_email(user, empty_project, "dummy content", export_status) }
+
+ include_context 'gitlab email notification'
+
+ it 'attachment has csv mime type' do
+ expect(attachment.mime_type).to eq 'text/csv'
+ end
+
+ it 'generates a useful filename' do
+ expect(attachment.filename).to include(Date.today.year.to_s)
+ expect(attachment.filename).to include('issues')
+ expect(attachment.filename).to include('myproject')
+ expect(attachment.filename).to end_with('.csv')
+ end
+
+ it 'mentions number of issues and project name' do
+ expect(subject).to have_content '3'
+ expect(subject).to have_content empty_project.name
+ end
+
+ it "doesn't need to mention truncation by default" do
+ expect(subject).not_to have_content 'truncated'
+ end
+
+ context 'when truncated' do
+ let(:export_status) { { truncated: true, rows_expected: 12, rows_written: 10 } }
+
+ it 'mentions that the csv has been truncated' do
+ expect(subject).to have_content 'truncated'
+ end
+
+ it 'mentions the number of issues written and expected' do
+ expect(subject).to have_content '10 of 12 issues'
+ end
+ end
+ end
end
diff --git a/spec/migrations/cleanup_empty_commit_user_mentions_spec.rb b/spec/migrations/cleanup_empty_commit_user_mentions_spec.rb
index 7e6afbec520..529fe046e32 100644
--- a/spec/migrations/cleanup_empty_commit_user_mentions_spec.rb
+++ b/spec/migrations/cleanup_empty_commit_user_mentions_spec.rb
@@ -14,7 +14,7 @@ describe CleanupEmptyCommitUserMentions, :migration, :sidekiq do
let(:project) { projects.create!(name: 'gitlab1', path: 'gitlab1', namespace_id: group.id, visibility_level: 0) }
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
- let(:commit) { Commit.new(RepoHelpers.sample_commit, project.becomes(Project)) }
+ let(:commit) { Commit.new(RepoHelpers.sample_commit, project) }
let(:commit_user_mentions) { table(:commit_user_mentions) }
let!(:resource1) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: user.id, note: 'note1 for @root to check') }
diff --git a/spec/migrations/migrate_commit_notes_mentions_to_db_spec.rb b/spec/migrations/migrate_commit_notes_mentions_to_db_spec.rb
index aa78381ba3a..dc40d0865f2 100644
--- a/spec/migrations/migrate_commit_notes_mentions_to_db_spec.rb
+++ b/spec/migrations/migrate_commit_notes_mentions_to_db_spec.rb
@@ -14,7 +14,7 @@ describe MigrateCommitNotesMentionsToDb, :migration, :sidekiq do
let(:project) { projects.create!(name: 'gitlab1', path: 'gitlab1', namespace_id: group.id, visibility_level: 0) }
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
- let(:commit) { Commit.new(RepoHelpers.sample_commit, project.becomes(Project)) }
+ let(:commit) { Commit.new(RepoHelpers.sample_commit, project) }
let(:commit_user_mentions) { table(:commit_user_mentions) }
let!(:resource1) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: user.id, note: 'note1 for @root to check') }
diff --git a/spec/models/ci/bridge_spec.rb b/spec/models/ci/bridge_spec.rb
index 31e13122b95..34f89d9cdae 100644
--- a/spec/models/ci/bridge_spec.rb
+++ b/spec/models/ci/bridge_spec.rb
@@ -17,8 +17,6 @@ describe Ci::Bridge do
{ trigger: { project: 'my/project', branch: 'master' } }
end
- it { is_expected.to include_module(Ci::PipelineDelegator) }
-
it 'has many sourced pipelines' do
expect(bridge).to have_many(:sourced_pipelines)
end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 673b9e5f076..bdaecea2089 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -37,8 +37,6 @@ describe Ci::Build do
it { is_expected.to delegate_method(:merge_request_ref?).to(:pipeline) }
it { is_expected.to delegate_method(:legacy_detached_merge_request_pipeline?).to(:pipeline) }
- it { is_expected.to include_module(Ci::PipelineDelegator) }
-
describe 'associations' do
it 'has a bidirectional relationship with projects' do
expect(described_class.reflect_on_association(:project).has_inverse?).to eq(:builds)
@@ -1818,64 +1816,65 @@ describe Ci::Build do
end
describe '#merge_request' do
- def create_mr(build, pipeline, factory: :merge_request, created_at: Time.now)
- create(factory, source_project: pipeline.project,
- target_project: pipeline.project,
- source_branch: build.ref,
- created_at: created_at)
- end
+ subject { pipeline.builds.take.merge_request }
- context 'when a MR has a reference to the pipeline' do
- before do
- @merge_request = create_mr(build, pipeline, factory: :merge_request)
+ context 'on a branch pipeline' do
+ let!(:pipeline) { create(:ci_pipeline, :with_job, project: project, ref: 'fix') }
- commits = [double(id: pipeline.sha)]
- allow(@merge_request).to receive(:commits).and_return(commits)
- allow(MergeRequest).to receive_message_chain(:includes, :where, :reorder).and_return([@merge_request])
+ context 'with no merge request' do
+ it { is_expected.to be_nil }
end
- it 'returns the single associated MR' do
- expect(build.merge_request.id).to eq(@merge_request.id)
- end
- end
+ context 'with an open merge request from the same ref name' do
+ let!(:merge_request) { create(:merge_request, source_project: project, source_branch: 'fix') }
- context 'when there is not a MR referencing the pipeline' do
- it 'returns nil' do
- expect(build.merge_request).to be_nil
- end
- end
+ # If no diff exists, the pipeline commit was not part of the merge
+ # request and may have simply incidentally used the same ref name.
+ context 'without a merge request diff containing the pipeline commit' do
+ it { is_expected.to be_nil }
+ end
- context 'when more than one MR have a reference to the pipeline' do
- before do
- @merge_request = create_mr(build, pipeline, factory: :merge_request)
- @merge_request.close!
- @merge_request2 = create_mr(build, pipeline, factory: :merge_request)
+ # If the merge request was truly opened from the branch that the
+ # pipeline ran on, that head sha will be present in a diff.
+ context 'with a merge request diff containing the pipeline commit' do
+ let!(:mr_diff) { create(:merge_request_diff, merge_request: merge_request) }
+ let!(:mr_diff_commit) { create(:merge_request_diff_commit, sha: build.sha, merge_request_diff: mr_diff) }
- commits = [double(id: pipeline.sha)]
- allow(@merge_request).to receive(:commits).and_return(commits)
- allow(@merge_request2).to receive(:commits).and_return(commits)
- allow(MergeRequest).to receive_message_chain(:includes, :where, :reorder).and_return([@merge_request, @merge_request2])
+ it { is_expected.to eq(merge_request) }
+ end
end
- it 'returns the first MR' do
- expect(build.merge_request.id).to eq(@merge_request.id)
+ context 'with multiple open merge requests' do
+ let!(:merge_request) { create(:merge_request, source_project: project, source_branch: 'fix') }
+ let!(:mr_diff) { create(:merge_request_diff, merge_request: merge_request) }
+ let!(:mr_diff_commit) { create(:merge_request_diff_commit, sha: build.sha, merge_request_diff: mr_diff) }
+
+ let!(:new_merge_request) { create(:merge_request, source_project: project, source_branch: 'fix', target_branch: 'staging') }
+ let!(:new_mr_diff) { create(:merge_request_diff, merge_request: new_merge_request) }
+ let!(:new_mr_diff_commit) { create(:merge_request_diff_commit, sha: build.sha, merge_request_diff: new_mr_diff) }
+
+ it 'returns the first merge request' do
+ expect(subject).to eq(merge_request)
+ end
end
end
- context 'when a Build is created after the MR' do
- before do
- @merge_request = create_mr(build, pipeline, factory: :merge_request_with_diffs)
- pipeline2 = create(:ci_pipeline, project: project)
- @build2 = create(:ci_build, pipeline: pipeline2)
+ context 'on a detached merged request pipeline' do
+ let(:pipeline) { create(:ci_pipeline, :detached_merge_request_pipeline, :with_job) }
- allow(@merge_request).to receive(:commit_shas)
- .and_return([pipeline.sha, pipeline2.sha])
- allow(MergeRequest).to receive_message_chain(:includes, :where, :reorder).and_return([@merge_request])
- end
+ it { is_expected.to eq(pipeline.merge_request) }
+ end
- it 'returns the current MR' do
- expect(@build2.merge_request.id).to eq(@merge_request.id)
- end
+ context 'on a legacy detached merged request pipeline' do
+ let(:pipeline) { create(:ci_pipeline, :legacy_detached_merge_request_pipeline, :with_job) }
+
+ it { is_expected.to eq(pipeline.merge_request) }
+ end
+
+ context 'on a pipeline for merged results' do
+ let(:pipeline) { create(:ci_pipeline, :merged_result_pipeline, :with_job) }
+
+ it { is_expected.to eq(pipeline.merge_request) }
end
end
@@ -2281,6 +2280,7 @@ describe Ci::Build do
{ key: 'CI_REGISTRY_USER', value: 'gitlab-ci-token', public: true, masked: false },
{ key: 'CI_REGISTRY_PASSWORD', value: 'my-token', public: false, masked: true },
{ key: 'CI_REPOSITORY_URL', value: build.repo_url, public: false, masked: false },
+ { key: 'CI_JOB_JWT', value: 'ci.job.jwt', public: false, masked: true },
{ key: 'CI_JOB_NAME', value: 'test', public: true, masked: false },
{ key: 'CI_JOB_STAGE', value: 'test', public: true, masked: false },
{ key: 'CI_NODE_TOTAL', value: '1', public: true, masked: false },
@@ -2333,23 +2333,36 @@ describe Ci::Build do
end
before do
+ allow(Gitlab::Ci::Jwt).to receive(:for_build).with(build).and_return('ci.job.jwt')
build.set_token('my-token')
build.yaml_variables = []
end
it { is_expected.to eq(predefined_variables) }
+ context 'when ci_job_jwt feature flag is disabled' do
+ before do
+ stub_feature_flags(ci_job_jwt: false)
+ end
+
+ it 'CI_JOB_JWT is not included' do
+ expect(subject.pluck(:key)).not_to include('CI_JOB_JWT')
+ end
+ end
+
describe 'variables ordering' do
context 'when variables hierarchy is stubbed' do
let(:build_pre_var) { { key: 'build', value: 'value', public: true, masked: false } }
let(:project_pre_var) { { key: 'project', value: 'value', public: true, masked: false } }
let(:pipeline_pre_var) { { key: 'pipeline', value: 'value', public: true, masked: false } }
let(:build_yaml_var) { { key: 'yaml', value: 'value', public: true, masked: false } }
+ let(:job_jwt_var) { { key: 'CI_JOB_JWT', value: 'ci.job.jwt', public: false, masked: true } }
before do
allow(build).to receive(:predefined_variables) { [build_pre_var] }
allow(build).to receive(:yaml_variables) { [build_yaml_var] }
allow(build).to receive(:persisted_variables) { [] }
+ allow(build).to receive(:job_jwt_variables) { [job_jwt_var] }
allow_any_instance_of(Project)
.to receive(:predefined_variables) { [project_pre_var] }
@@ -2362,7 +2375,8 @@ describe Ci::Build do
it 'returns variables in order depending on resource hierarchy' do
is_expected.to eq(
- [build_pre_var,
+ [job_jwt_var,
+ build_pre_var,
project_pre_var,
pipeline_pre_var,
build_yaml_var,
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index 80b619ed2b1..6f6ff3704b4 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -349,13 +349,16 @@ describe Ci::JobArtifact do
end
describe 'file is being stored' do
- context 'when object has nil store' do
- it 'is stored locally' do
- subject = build(:ci_job_artifact, :archive, file_store: nil)
+ subject { create(:ci_job_artifact, :archive) }
- subject.save
+ context 'when object has nil store' do
+ before do
+ subject.update_column(:file_store, nil)
+ subject.reload
+ end
- expect(subject.file_store).to be(ObjectStorage::Store::LOCAL)
+ it 'is stored locally' do
+ expect(subject.file_store).to be(nil)
expect(subject.file).to be_file_storage
expect(subject.file.object_store).to eq(ObjectStorage::Store::LOCAL)
end
@@ -363,10 +366,6 @@ describe Ci::JobArtifact do
context 'when existing object has local store' do
it 'is stored locally' do
- subject = build(:ci_job_artifact, :archive)
-
- subject.save
-
expect(subject.file_store).to be(ObjectStorage::Store::LOCAL)
expect(subject.file).to be_file_storage
expect(subject.file.object_store).to eq(ObjectStorage::Store::LOCAL)
@@ -380,10 +379,6 @@ describe Ci::JobArtifact do
context 'when file is stored' do
it 'is stored remotely' do
- subject = build(:ci_job_artifact, :archive)
-
- subject.save
-
expect(subject.file_store).to eq(ObjectStorage::Store::REMOTE)
expect(subject.file).not_to be_file_storage
expect(subject.file.object_store).to eq(ObjectStorage::Store::REMOTE)
diff --git a/spec/models/ci/processable_spec.rb b/spec/models/ci/processable_spec.rb
index e8ef7b29681..4490371bde5 100644
--- a/spec/models/ci/processable_spec.rb
+++ b/spec/models/ci/processable_spec.rb
@@ -6,6 +6,18 @@ describe Ci::Processable do
let_it_be(:project) { create(:project) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be(:detached_merge_request_pipeline) do
+ create(:ci_pipeline, :detached_merge_request_pipeline, :with_job, project: project)
+ end
+
+ let_it_be(:legacy_detached_merge_request_pipeline) do
+ create(:ci_pipeline, :legacy_detached_merge_request_pipeline, :with_job, project: project)
+ end
+
+ let_it_be(:merged_result_pipeline) do
+ create(:ci_pipeline, :merged_result_pipeline, :with_job, project: project)
+ end
+
describe '#aggregated_needs_names' do
let(:with_aggregated_needs) { pipeline.processables.select_with_aggregated_needs(project) }
@@ -155,4 +167,70 @@ describe Ci::Processable do
end
end
end
+
+ describe '#merge_request?' do
+ subject { pipeline.processables.first.merge_request? }
+
+ context 'in a detached merge request pipeline' do
+ let(:pipeline) { detached_merge_request_pipeline }
+
+ it { is_expected.to eq(pipeline.merge_request?) }
+ end
+
+ context 'in a legacy detached merge_request_pipeline' do
+ let(:pipeline) { legacy_detached_merge_request_pipeline }
+
+ it { is_expected.to eq(pipeline.merge_request?) }
+ end
+
+ context 'in a pipeline for merged results' do
+ let(:pipeline) { merged_result_pipeline }
+
+ it { is_expected.to eq(pipeline.merge_request?) }
+ end
+ end
+
+ describe '#merge_request_ref?' do
+ subject { pipeline.processables.first.merge_request_ref? }
+
+ context 'in a detached merge request pipeline' do
+ let(:pipeline) { detached_merge_request_pipeline }
+
+ it { is_expected.to eq(pipeline.merge_request_ref?) }
+ end
+
+ context 'in a legacy detached merge_request_pipeline' do
+ let(:pipeline) { legacy_detached_merge_request_pipeline }
+
+ it { is_expected.to eq(pipeline.merge_request_ref?) }
+ end
+
+ context 'in a pipeline for merged results' do
+ let(:pipeline) { merged_result_pipeline }
+
+ it { is_expected.to eq(pipeline.merge_request_ref?) }
+ end
+ end
+
+ describe '#legacy_detached_merge_request_pipeline?' do
+ subject { pipeline.processables.first.legacy_detached_merge_request_pipeline? }
+
+ context 'in a detached merge request pipeline' do
+ let(:pipeline) { detached_merge_request_pipeline }
+
+ it { is_expected.to eq(pipeline.legacy_detached_merge_request_pipeline?) }
+ end
+
+ context 'in a legacy detached merge_request_pipeline' do
+ let(:pipeline) { legacy_detached_merge_request_pipeline }
+
+ it { is_expected.to eq(pipeline.legacy_detached_merge_request_pipeline?) }
+ end
+
+ context 'in a pipeline for merged results' do
+ let(:pipeline) { merged_result_pipeline }
+
+ it { is_expected.to eq(pipeline.legacy_detached_merge_request_pipeline?) }
+ end
+ end
end
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index b8034ba5bf2..2dedff7f15b 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -78,6 +78,36 @@ describe Ci::Runner do
.to raise_error(ActiveRecord::RecordInvalid)
end
end
+
+ context 'cost factors validations' do
+ it 'dissalows :private_projects_minutes_cost_factor being nil' do
+ runner = build(:ci_runner, private_projects_minutes_cost_factor: nil)
+
+ expect(runner).to be_invalid
+ expect(runner.errors.full_messages).to include('Private projects minutes cost factor needs to be non-negative')
+ end
+
+ it 'dissalows :public_projects_minutes_cost_factor being nil' do
+ runner = build(:ci_runner, public_projects_minutes_cost_factor: nil)
+
+ expect(runner).to be_invalid
+ expect(runner.errors.full_messages).to include('Public projects minutes cost factor needs to be non-negative')
+ end
+
+ it 'dissalows :private_projects_minutes_cost_factor being negative' do
+ runner = build(:ci_runner, private_projects_minutes_cost_factor: -1.1)
+
+ expect(runner).to be_invalid
+ expect(runner.errors.full_messages).to include('Private projects minutes cost factor needs to be non-negative')
+ end
+
+ it 'dissalows :public_projects_minutes_cost_factor being negative' do
+ runner = build(:ci_runner, public_projects_minutes_cost_factor: -2.2)
+
+ expect(runner).to be_invalid
+ expect(runner.errors.full_messages).to include('Public projects minutes cost factor needs to be non-negative')
+ end
+ end
end
describe 'constraints' do
diff --git a/spec/models/clusters/applications/fluentd_spec.rb b/spec/models/clusters/applications/fluentd_spec.rb
new file mode 100644
index 00000000000..7e9680b0ab4
--- /dev/null
+++ b/spec/models/clusters/applications/fluentd_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Clusters::Applications::Fluentd do
+ let(:fluentd) { create(:clusters_applications_fluentd) }
+
+ include_examples 'cluster application core specs', :clusters_applications_fluentd
+ include_examples 'cluster application status specs', :clusters_applications_fluentd
+ include_examples 'cluster application version specs', :clusters_applications_fluentd
+ include_examples 'cluster application initial status specs'
+
+ describe '#can_uninstall?' do
+ subject { fluentd.can_uninstall? }
+
+ it { is_expected.to be true }
+ end
+
+ describe '#install_command' do
+ subject { fluentd.install_command }
+
+ it { is_expected.to be_an_instance_of(Gitlab::Kubernetes::Helm::InstallCommand) }
+
+ it 'is initialized with fluentd arguments' do
+ expect(subject.name).to eq('fluentd')
+ expect(subject.chart).to eq('stable/fluentd')
+ expect(subject.version).to eq('2.4.0')
+ expect(subject).to be_rbac
+ end
+
+ context 'application failed to install previously' do
+ let(:fluentd) { create(:clusters_applications_fluentd, :errored, version: '0.0.1') }
+
+ it 'is initialized with the locked version' do
+ expect(subject.version).to eq('2.4.0')
+ end
+ end
+ end
+
+ describe '#files' do
+ let(:application) { fluentd }
+ let(:values) { subject[:'values.yaml'] }
+
+ subject { application.files }
+
+ it 'includes fluentd specific keys in the values.yaml file' do
+ expect(values).to include('output.conf', 'general.conf')
+ end
+ end
+end
diff --git a/spec/models/clusters/applications/ingress_spec.rb b/spec/models/clusters/applications/ingress_spec.rb
index 64d667f40f6..b070729ccac 100644
--- a/spec/models/clusters/applications/ingress_spec.rb
+++ b/spec/models/clusters/applications/ingress_spec.rb
@@ -219,6 +219,12 @@ describe Clusters::Applications::Ingress do
expect(subject.values).to include('extraContainers')
end
+
+ it 'includes livenessProbe for modsecurity sidecar container' do
+ probe_config = YAML.safe_load(subject.values).dig('controller', 'extraContainers', 0, 'livenessProbe')
+
+ expect(probe_config).to eq('exec' => { 'command' => ['ls', '/var/log/modsec/audit.log'] })
+ end
end
context 'when modsecurity_enabled is disabled' do
diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb
index 29c75186110..db1d8672d1e 100644
--- a/spec/models/clusters/cluster_spec.rb
+++ b/spec/models/clusters/cluster_spec.rb
@@ -582,9 +582,10 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
let!(:jupyter) { create(:clusters_applications_jupyter, cluster: cluster) }
let!(:knative) { create(:clusters_applications_knative, cluster: cluster) }
let!(:elastic_stack) { create(:clusters_applications_elastic_stack, cluster: cluster) }
+ let!(:fluentd) { create(:clusters_applications_fluentd, cluster: cluster) }
it 'returns a list of created applications' do
- is_expected.to contain_exactly(helm, ingress, cert_manager, crossplane, prometheus, runner, jupyter, knative, elastic_stack)
+ is_expected.to contain_exactly(helm, ingress, cert_manager, crossplane, prometheus, runner, jupyter, knative, elastic_stack, fluentd)
end
end
end
diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb
index cc1bb164c16..24908785320 100644
--- a/spec/models/concerns/issuable_spec.rb
+++ b/spec/models/concerns/issuable_spec.rb
@@ -496,6 +496,40 @@ describe Issuable do
end
end
+ describe '.labels_hash' do
+ let(:feature_label) { create(:label, title: 'Feature') }
+ let(:second_label) { create(:label, title: 'Second Label') }
+ let!(:issues) { create_list(:labeled_issue, 3, labels: [feature_label, second_label]) }
+ let(:issue_id) { issues.first.id }
+
+ it 'maps issue ids to labels titles' do
+ expect(Issue.labels_hash[issue_id]).to include('Feature')
+ end
+
+ it 'works on relations filtered by multiple labels' do
+ relation = Issue.with_label(['Feature', 'Second Label'])
+
+ expect(relation.labels_hash[issue_id]).to include('Feature', 'Second Label')
+ end
+
+ # This tests the workaround for the lack of a NOT NULL constraint in
+ # label_links.label_id:
+ # https://gitlab.com/gitlab-org/gitlab/issues/197307
+ context 'with a NULL label ID in the link' do
+ let(:issue) { create(:labeled_issue, labels: [feature_label, second_label]) }
+
+ before do
+ label_link = issue.label_links.find_by(label_id: second_label.id)
+ label_link.label_id = nil
+ label_link.save(validate: false)
+ end
+
+ it 'filters out bad labels' do
+ expect(Issue.where(id: issue.id).labels_hash[issue.id]).to match_array(['Feature'])
+ end
+ end
+ end
+
describe '#user_notes_count' do
let(:project) { create(:project) }
let(:issue1) { create(:issue, project: project) }
diff --git a/spec/models/cycle_analytics/group_level_spec.rb b/spec/models/cycle_analytics/group_level_spec.rb
index 1f410a7c539..ac169ebc0cf 100644
--- a/spec/models/cycle_analytics/group_level_spec.rb
+++ b/spec/models/cycle_analytics/group_level_spec.rb
@@ -38,7 +38,7 @@ describe CycleAnalytics::GroupLevel do
end
it 'returns medians for each stage for a specific group' do
- expect(subject.summary.map { |summary| summary[:value] }).to contain_exactly(1, 1)
+ expect(subject.summary.map { |summary| summary[:value] }).to contain_exactly('0.1', '1', '1')
end
end
end
diff --git a/spec/models/diff_note_position_spec.rb b/spec/models/diff_note_position_spec.rb
index dedb8a8da4d..d93e0af5526 100644
--- a/spec/models/diff_note_position_spec.rb
+++ b/spec/models/diff_note_position_spec.rb
@@ -40,4 +40,11 @@ describe DiffNotePosition, type: :model do
expect { diff_note_position.save! }.to raise_error(ActiveRecord::RecordNotUnique)
end
+
+ it 'accepts a line_range attribute' do
+ diff_note_position = build(:diff_note_position)
+
+ expect(diff_note_position).to respond_to(:line_range)
+ expect(diff_note_position).to respond_to(:line_range=)
+ end
end
diff --git a/spec/models/import_failure_spec.rb b/spec/models/import_failure_spec.rb
index d6574791a65..d286a4ad314 100644
--- a/spec/models/import_failure_spec.rb
+++ b/spec/models/import_failure_spec.rb
@@ -3,7 +3,28 @@
require 'spec_helper'
describe ImportFailure do
- describe "Associations" do
+ describe 'Scopes' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:correlation_id) { 'ABC' }
+ let_it_be(:hard_failure) { create(:import_failure, :hard_failure, project: project, correlation_id_value: correlation_id) }
+ let_it_be(:soft_failure) { create(:import_failure, :soft_failure, project: project, correlation_id_value: correlation_id) }
+ let_it_be(:unrelated_failure) { create(:import_failure, project: project) }
+
+ it 'returns hard failures given a correlation ID' do
+ expect(ImportFailure.hard_failures_by_correlation_id(correlation_id)).to eq([hard_failure])
+ end
+
+ it 'orders hard failures by newest first' do
+ older_failure = hard_failure.dup
+ Timecop.freeze(1.day.before(hard_failure.created_at)) do
+ older_failure.save!
+
+ expect(ImportFailure.hard_failures_by_correlation_id(correlation_id)).to eq([hard_failure, older_failure])
+ end
+ end
+ end
+
+ describe 'Associations' do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:group) }
end
diff --git a/spec/models/jira_import_state_spec.rb b/spec/models/jira_import_state_spec.rb
index f75a17f71b2..4d91bf25b5e 100644
--- a/spec/models/jira_import_state_spec.rb
+++ b/spec/models/jira_import_state_spec.rb
@@ -130,8 +130,10 @@ describe JiraImportState do
context 'after transition to finished' do
let!(:jira_import) { build(:jira_import_state, :started, jid: 'some-other-jid', project: project)}
+ subject { jira_import.finish }
+
it 'triggers the import job' do
- jira_import.finish
+ subject
expect(jira_import.jid).to be_nil
end
@@ -139,11 +141,25 @@ describe JiraImportState do
it 'triggers the import job' do
jira_import.update!(status: :scheduled)
- jira_import.finish
+ subject
expect(jira_import.status).to eq('scheduled')
expect(jira_import.jid).to eq('some-other-jid')
end
+
+ it 'updates the record with imported issues counts' do
+ import_label = create(:label, project: project, title: 'jira-import')
+ create_list(:labeled_issue, 3, project: project, labels: [import_label])
+
+ expect(Gitlab::JiraImport).to receive(:get_import_label_id).and_return(import_label.id)
+ expect(Gitlab::JiraImport).to receive(:issue_failures).and_return(2)
+
+ subject
+
+ expect(jira_import.total_issue_count).to eq(5)
+ expect(jira_import.failed_to_import_count).to eq(2)
+ expect(jira_import.imported_issues_count).to eq(3)
+ end
end
end
end
diff --git a/spec/models/merge_request_diff_spec.rb b/spec/models/merge_request_diff_spec.rb
index 6d2ad3f0475..016af4f269b 100644
--- a/spec/models/merge_request_diff_spec.rb
+++ b/spec/models/merge_request_diff_spec.rb
@@ -566,6 +566,45 @@ describe MergeRequestDiff do
it 'returns affected file paths' do
expect(subject.modified_paths).to eq(%w{foo bar baz})
end
+
+ context "when fallback_on_overflow is true" do
+ let(:merge_request) { create(:merge_request, source_branch: 'feature', target_branch: 'master') }
+ let(:diff) { merge_request.merge_request_diff }
+
+ # before do
+ # # Temporarily unstub diff.modified_paths in favor of original code
+ # #
+ # allow(diff).to receive(:modified_paths).and_call_original
+ # end
+
+ context "when the merge_request_diff is overflowed" do
+ before do
+ expect(diff).to receive(:overflow?).and_return(true)
+ end
+
+ it "returns file paths via project.repository#diff_stats" do
+ expect(diff.project.repository).to receive(:diff_stats).and_call_original
+
+ expect(
+ diff.modified_paths(fallback_on_overflow: true)
+ ).to eq(diff.modified_paths)
+ end
+ end
+
+ context "when the merge_request_diff is not overflowed" do
+ before do
+ expect(subject).to receive(:overflow?).and_return(false)
+ end
+
+ it "returns expect file paths withoout called #modified_paths_for_overflowed_mr" do
+ expect(subject.project.repository).not_to receive(:diff_stats)
+
+ expect(
+ subject.modified_paths(fallback_on_overflow: true)
+ ).to eq(subject.modified_paths)
+ end
+ end
+ end
end
describe '#opening_external_diff' do
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 50bb194ef71..52cd31ee65f 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -2335,6 +2335,21 @@ describe MergeRequest do
end
end
+ describe "#public_merge_status" do
+ using RSpec::Parameterized::TableSyntax
+ subject { build(:merge_request, merge_status: status) }
+
+ where(:status, :public_status) do
+ 'cannot_be_merged_rechecking' | 'checking'
+ 'checking' | 'checking'
+ 'cannot_be_merged' | 'cannot_be_merged'
+ end
+
+ with_them do
+ it { expect(subject.public_merge_status).to eq(public_status) }
+ end
+ end
+
describe "#head_pipeline_active? " do
it do
is_expected
@@ -3226,20 +3241,51 @@ describe MergeRequest do
expect(notification_service).to receive(:merge_request_unmergeable).with(subject).once
expect(todo_service).to receive(:merge_request_became_unmergeable).with(subject).once
- subject.mark_as_unmergeable
- subject.mark_as_unchecked
- subject.mark_as_unmergeable
+ subject.mark_as_unmergeable!
+
+ subject.mark_as_unchecked!
+ subject.mark_as_unmergeable!
+ end
+
+ it 'notifies conflict, but does not notify again if rechecking still results in cannot_be_merged with async mergeability check' do
+ expect(notification_service).to receive(:merge_request_unmergeable).with(subject).once
+ expect(todo_service).to receive(:merge_request_became_unmergeable).with(subject).once
+
+ subject.mark_as_checking!
+ subject.mark_as_unmergeable!
+
+ subject.mark_as_unchecked!
+ subject.mark_as_checking!
+ subject.mark_as_unmergeable!
end
it 'notifies conflict, whenever newly unmergeable' do
expect(notification_service).to receive(:merge_request_unmergeable).with(subject).twice
expect(todo_service).to receive(:merge_request_became_unmergeable).with(subject).twice
- subject.mark_as_unmergeable
- subject.mark_as_unchecked
- subject.mark_as_mergeable
- subject.mark_as_unchecked
- subject.mark_as_unmergeable
+ subject.mark_as_unmergeable!
+
+ subject.mark_as_unchecked!
+ subject.mark_as_mergeable!
+
+ subject.mark_as_unchecked!
+ subject.mark_as_unmergeable!
+ end
+
+ it 'notifies conflict, whenever newly unmergeable with async mergeability check' do
+ expect(notification_service).to receive(:merge_request_unmergeable).with(subject).twice
+ expect(todo_service).to receive(:merge_request_became_unmergeable).with(subject).twice
+
+ subject.mark_as_checking!
+ subject.mark_as_unmergeable!
+
+ subject.mark_as_unchecked!
+ subject.mark_as_checking!
+ subject.mark_as_mergeable!
+
+ subject.mark_as_unchecked!
+ subject.mark_as_checking!
+ subject.mark_as_unmergeable!
end
it 'does not notify whenever merge request is newly unmergeable due to other reasons' do
@@ -3248,7 +3294,7 @@ describe MergeRequest do
expect(notification_service).not_to receive(:merge_request_unmergeable)
expect(todo_service).not_to receive(:merge_request_became_unmergeable)
- subject.mark_as_unmergeable
+ subject.mark_as_unmergeable!
end
end
end
@@ -3261,7 +3307,7 @@ describe MergeRequest do
expect(notification_service).not_to receive(:merge_request_unmergeable)
expect(todo_service).not_to receive(:merge_request_became_unmergeable)
- subject.mark_as_unmergeable
+ subject.mark_as_unmergeable!
end
end
end
diff --git a/spec/models/metrics/dashboard/annotation_spec.rb b/spec/models/metrics/dashboard/annotation_spec.rb
index ed3bef37a7c..f7fd7ded7e6 100644
--- a/spec/models/metrics/dashboard/annotation_spec.rb
+++ b/spec/models/metrics/dashboard/annotation_spec.rb
@@ -50,4 +50,30 @@ describe Metrics::Dashboard::Annotation do
end
end
end
+
+ describe 'scopes' do
+ let_it_be(:nine_minutes_old_annotation) { create(:metrics_dashboard_annotation, starting_at: 9.minutes.ago) }
+ let_it_be(:fifteen_minutes_old_annotation) { create(:metrics_dashboard_annotation, starting_at: 15.minutes.ago) }
+ let_it_be(:just_created_annotation) { create(:metrics_dashboard_annotation) }
+
+ describe '#after' do
+ it 'returns only younger annotations' do
+ expect(described_class.after(12.minutes.ago)).to match_array [nine_minutes_old_annotation, just_created_annotation]
+ end
+ end
+
+ describe '#before' do
+ it 'returns only older annotations' do
+ expect(described_class.before(5.minutes.ago)).to match_array [fifteen_minutes_old_annotation, nine_minutes_old_annotation]
+ end
+ end
+
+ describe '#for_dashboard' do
+ let!(:other_dashboard_annotation) { create(:metrics_dashboard_annotation, dashboard_path: 'other_dashboard.yml') }
+
+ it 'returns annotations only for appointed dashboard' do
+ expect(described_class.for_dashboard('other_dashboard.yml')).to match_array [other_dashboard_annotation]
+ end
+ end
+ end
end
diff --git a/spec/models/project_feature_spec.rb b/spec/models/project_feature_spec.rb
index 6a333898955..38fba5ea071 100644
--- a/spec/models/project_feature_spec.rb
+++ b/spec/models/project_feature_spec.rb
@@ -27,7 +27,7 @@ describe ProjectFeature do
end
describe '#feature_available?' do
- let(:features) { %w(issues wiki builds merge_requests snippets repository pages) }
+ let(:features) { %w(issues wiki builds merge_requests snippets repository pages metrics_dashboard) }
context 'when features are disabled' do
it "returns false" do
@@ -123,7 +123,7 @@ describe ProjectFeature do
end
context 'public features' do
- features = %w(issues wiki builds merge_requests snippets repository)
+ features = %w(issues wiki builds merge_requests snippets repository metrics_dashboard)
features.each do |feature|
it "does not allow public access level for #{feature}" do
diff --git a/spec/models/project_import_state_spec.rb b/spec/models/project_import_state_spec.rb
index 720dc4f435f..cb34d898a6e 100644
--- a/spec/models/project_import_state_spec.rb
+++ b/spec/models/project_import_state_spec.rb
@@ -3,7 +3,10 @@
require 'spec_helper'
describe ProjectImportState, type: :model do
- subject { create(:import_state) }
+ let_it_be(:correlation_id) { 'cid' }
+ let_it_be(:import_state, refind: true) { create(:import_state, correlation_id_value: correlation_id) }
+
+ subject { import_state }
describe 'associations' do
it { is_expected.to belong_to(:project) }
@@ -33,12 +36,24 @@ describe ProjectImportState, type: :model do
end
it 'records job and correlation IDs', :sidekiq_might_not_need_inline do
- allow(Labkit::Correlation::CorrelationId).to receive(:current_or_new_id).and_return('abc')
+ allow(Labkit::Correlation::CorrelationId).to receive(:current_or_new_id).and_return(correlation_id)
import_state.schedule
expect(import_state.jid).to be_an_instance_of(String)
- expect(import_state.correlation_id).to eq('abc')
+ expect(import_state.correlation_id).to eq(correlation_id)
+ end
+ end
+
+ describe '#relation_hard_failures' do
+ let_it_be(:failures) { create_list(:import_failure, 2, :hard_failure, project: import_state.project, correlation_id_value: correlation_id) }
+
+ it 'returns hard relation failures related to this import' do
+ expect(subject.relation_hard_failures(limit: 100)).to match_array(failures)
+ end
+
+ it 'limits returned collection to given maximum' do
+ expect(subject.relation_hard_failures(limit: 1).size).to eq(1)
end
end
diff --git a/spec/models/project_services/prometheus_service_spec.rb b/spec/models/project_services/prometheus_service_spec.rb
index 5565d30d8c1..a85dbe3a7df 100644
--- a/spec/models/project_services/prometheus_service_spec.rb
+++ b/spec/models/project_services/prometheus_service_spec.rb
@@ -418,4 +418,48 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
end
end
end
+
+ describe '#editable?' do
+ it 'is editable' do
+ expect(service.editable?).to be(true)
+ end
+
+ context 'when cluster exists with prometheus installed' do
+ let(:cluster) { create(:cluster, projects: [project]) }
+
+ before do
+ service.update!(manual_configuration: false)
+
+ create(:clusters_applications_prometheus, :installed, cluster: cluster)
+ end
+
+ it 'remains editable' do
+ expect(service.editable?).to be(true)
+ end
+ end
+ end
+
+ describe '#fields' do
+ let(:expected_fields) do
+ [
+ {
+ type: 'checkbox',
+ name: 'manual_configuration',
+ title: s_('PrometheusService|Active'),
+ required: true
+ },
+ {
+ type: 'text',
+ name: 'api_url',
+ title: 'API URL',
+ placeholder: s_('PrometheusService|Prometheus API Base URL, like http://prometheus.example.com/'),
+ required: true
+ }
+ ]
+ end
+
+ it 'returns fields' do
+ expect(service.fields).to eq(expected_fields)
+ end
+ end
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 3c8afee4466..4e75ef4fc87 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -1740,32 +1740,12 @@ describe Project do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
- context 'when feature is enabled' do
- before do
- stub_feature_flags(project_search_by_full_path: true)
- end
-
- it 'returns projects that match the group path' do
- expect(described_class.search(group.path, include_namespace: true)).to eq([project])
- end
-
- it 'returns projects that match the full path' do
- expect(described_class.search(project.full_path, include_namespace: true)).to eq([project])
- end
+ it 'returns projects that match the group path' do
+ expect(described_class.search(group.path, include_namespace: true)).to eq([project])
end
- context 'when feature is disabled' do
- before do
- stub_feature_flags(project_search_by_full_path: false)
- end
-
- it 'returns no results when searching by group path' do
- expect(described_class.search(group.path, include_namespace: true)).to be_empty
- end
-
- it 'returns no results when searching by full path' do
- expect(described_class.search(project.full_path, include_namespace: true)).to be_empty
- end
+ it 'returns projects that match the full path' do
+ expect(described_class.search(project.full_path, include_namespace: true)).to eq([project])
end
end
@@ -2665,18 +2645,6 @@ describe Project do
end
end
- describe '#daily_statistics_enabled?' do
- it { is_expected.to be_daily_statistics_enabled }
-
- context 'when :project_daily_statistics is disabled for the project' do
- before do
- stub_feature_flags(project_daily_statistics: { thing: subject, enabled: false })
- end
-
- it { is_expected.not_to be_daily_statistics_enabled }
- end
- end
-
describe '#change_head' do
let(:project) { create(:project, :repository) }
diff --git a/spec/models/resource_milestone_event_spec.rb b/spec/models/resource_milestone_event_spec.rb
index 1b0181e3fd2..bf8672f95c9 100644
--- a/spec/models/resource_milestone_event_spec.rb
+++ b/spec/models/resource_milestone_event_spec.rb
@@ -52,4 +52,30 @@ describe ResourceMilestoneEvent, type: :model do
end
end
end
+
+ shared_examples 'a milestone action queryable resource event' do |expected_results_for_actions|
+ [Issue, MergeRequest].each do |klass|
+ expected_results_for_actions.each do |action, expected_result|
+ it "is #{expected_result} for action #{action} on #{klass.name.underscore}" do
+ model = create(klass.name.underscore)
+ key = model.class.name.underscore
+ event = build(described_class.name.underscore.to_sym, key => model, action: action)
+
+ expect(event.send(query_method)).to eq(expected_result)
+ end
+ end
+ end
+ end
+
+ describe '#added?' do
+ it_behaves_like 'a milestone action queryable resource event', { add: true, remove: false } do
+ let(:query_method) { :add? }
+ end
+ end
+
+ describe '#removed?' do
+ it_behaves_like 'a milestone action queryable resource event', { add: false, remove: true } do
+ let(:query_method) { :remove? }
+ end
+ end
end
diff --git a/spec/models/terraform/state_spec.rb b/spec/models/terraform/state_spec.rb
index 1d677e7ece5..3cd15e23ee2 100644
--- a/spec/models/terraform/state_spec.rb
+++ b/spec/models/terraform/state_spec.rb
@@ -5,24 +5,35 @@ require 'spec_helper'
describe Terraform::State do
subject { create(:terraform_state, :with_file) }
+ let(:terraform_state_file) { fixture_file('terraform/terraform.tfstate') }
+
it { is_expected.to belong_to(:project) }
+ it { is_expected.to belong_to(:locked_by_user).class_name('User') }
+
it { is_expected.to validate_presence_of(:project_id) }
before do
stub_terraform_state_object_storage(Terraform::StateUploader)
end
- describe '#file_store' do
- context 'when no value is set' do
- it 'returns the default store of the uploader' do
- [ObjectStorage::Store::LOCAL, ObjectStorage::Store::REMOTE].each do |store|
- expect(Terraform::StateUploader).to receive(:default_store).and_return(store)
- expect(described_class.new.file_store).to eq(store)
- end
+ describe '#file' do
+ context 'when a file exists' do
+ it 'does not use the default file' do
+ expect(subject.file.read).to eq(terraform_state_file)
end
end
+ context 'when no file exists' do
+ subject { create(:terraform_state) }
+
+ it 'creates a default file' do
+ expect(subject.file.read).to eq('{"version":1}')
+ end
+ end
+ end
+
+ describe '#file_store' do
context 'when a value is set' do
it 'returns the value' do
[ObjectStorage::Store::LOCAL, ObjectStorage::Store::REMOTE].each do |store|
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 5a3e16baa87..8597397c3c6 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -4357,18 +4357,19 @@ describe User, :do_not_mock_admin_mode do
describe 'internal methods' do
let_it_be(:user) { create(:user) }
- let!(:ghost) { described_class.ghost }
- let!(:alert_bot) { described_class.alert_bot }
- let!(:non_internal) { [user] }
- let!(:internal) { [ghost, alert_bot] }
+ let_it_be(:ghost) { described_class.ghost }
+ let_it_be(:alert_bot) { described_class.alert_bot }
+ let_it_be(:project_bot) { create(:user, :project_bot) }
+ let_it_be(:non_internal) { [user, project_bot] }
+ let_it_be(:internal) { [ghost, alert_bot] }
it 'returns internal users' do
- expect(described_class.internal).to eq(internal)
+ expect(described_class.internal).to match_array(internal)
expect(internal.all?(&:internal?)).to eq(true)
end
it 'returns non internal users' do
- expect(described_class.non_internal).to eq(non_internal)
+ expect(described_class.non_internal).to match_array(non_internal)
expect(non_internal.all?(&:internal?)).to eq(false)
end
@@ -4420,9 +4421,12 @@ describe User, :do_not_mock_admin_mode do
it 'returns corresponding users' do
human = create(:user)
bot = create(:user, :bot)
+ project_bot = create(:user, :project_bot)
expect(described_class.humans).to match_array([human])
- expect(described_class.bots).to match_array([bot])
+ expect(described_class.bots).to match_array([bot, project_bot])
+ expect(described_class.bots_without_project_bot).to match_array([bot])
+ expect(described_class.with_project_bots).to match_array([human, project_bot])
end
end
@@ -4655,4 +4659,30 @@ describe User, :do_not_mock_admin_mode do
it { is_expected.to be :locked }
end
end
+
+ describe '#password_required?' do
+ let_it_be(:user) { create(:user) }
+
+ shared_examples 'does not require password to be present' do
+ it { expect(user).not_to validate_presence_of(:password) }
+
+ it { expect(user).not_to validate_presence_of(:password_confirmation) }
+ end
+
+ context 'when user is an internal user' do
+ before do
+ user.update(user_type: 'alert_bot')
+ end
+
+ it_behaves_like 'does not require password to be present'
+ end
+
+ context 'when user is a project bot user' do
+ before do
+ user.update(user_type: 'project_bot')
+ end
+
+ it_behaves_like 'does not require password to be present'
+ end
+ end
end
diff --git a/spec/models/user_type_enums_spec.rb b/spec/models/user_type_enums_spec.rb
new file mode 100644
index 00000000000..4f56e6ea96e
--- /dev/null
+++ b/spec/models/user_type_enums_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe UserTypeEnums do
+ it '.types' do
+ expect(described_class.types.keys).to include('alert_bot', 'project_bot', 'human', 'ghost')
+ end
+
+ it '.bots' do
+ expect(described_class.bots.keys).to include('alert_bot', 'project_bot')
+ end
+end
diff --git a/spec/policies/global_policy_spec.rb b/spec/policies/global_policy_spec.rb
index 2d261241486..5e77b64a408 100644
--- a/spec/policies/global_policy_spec.rb
+++ b/spec/policies/global_policy_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
describe GlobalPolicy do
include TermsHelper
+ let_it_be(:project_bot) { create(:user, :project_bot) }
let(:current_user) { create(:user) }
let(:user) { create(:user) }
@@ -120,6 +121,12 @@ describe GlobalPolicy do
it { is_expected.to be_allowed(:access_api) }
end
+ context 'project bot' do
+ let(:current_user) { project_bot }
+
+ it { is_expected.to be_allowed(:access_api) }
+ end
+
context 'when terms are enforced' do
before do
enforce_terms
@@ -203,6 +210,12 @@ describe GlobalPolicy do
it { is_expected.not_to be_allowed(:receive_notifications) }
end
+
+ context 'project bot' do
+ let(:current_user) { project_bot }
+
+ it { is_expected.not_to be_allowed(:receive_notifications) }
+ end
end
describe 'git access' do
@@ -265,6 +278,12 @@ describe GlobalPolicy do
it { is_expected.to be_allowed(:access_git) }
end
end
+
+ context 'project bot' do
+ let(:current_user) { project_bot }
+
+ it { is_expected.to be_allowed(:access_git) }
+ end
end
describe 'read instance metadata' do
@@ -361,6 +380,12 @@ describe GlobalPolicy do
it { is_expected.not_to be_allowed(:use_slash_commands) }
end
+
+ context 'project bot' do
+ let(:current_user) { project_bot }
+
+ it { is_expected.to be_allowed(:use_slash_commands) }
+ end
end
describe 'create_snippet' do
@@ -380,4 +405,12 @@ describe GlobalPolicy do
it { is_expected.not_to be_allowed(:create_snippet) }
end
end
+
+ describe 'log in' do
+ context 'project bot' do
+ let(:current_user) { project_bot }
+
+ it { is_expected.not_to be_allowed(:log_in) }
+ end
+ end
end
diff --git a/spec/requests/api/deploy_tokens_spec.rb b/spec/requests/api/deploy_tokens_spec.rb
index 5948c3d719f..499c334d491 100644
--- a/spec/requests/api/deploy_tokens_spec.rb
+++ b/spec/requests/api/deploy_tokens_spec.rb
@@ -205,10 +205,11 @@ describe API::DeployTokens do
context 'deploy token creation' do
shared_examples 'creating a deploy token' do |entity, unauthenticated_response|
+ let(:expires_time) { 1.year.from_now }
let(:params) do
{
name: 'Foo',
- expires_at: 1.year.from_now,
+ expires_at: expires_time,
scopes: [
'read_repository'
],
@@ -240,6 +241,10 @@ describe API::DeployTokens do
expect(response).to have_gitlab_http_status(:created)
expect(response).to match_response_schema('public_api/v4/deploy_token')
+ expect(json_response['name']).to eq('Foo')
+ expect(json_response['scopes']).to eq(['read_repository'])
+ expect(json_response['username']).to eq('Bar')
+ expect(json_response['expires_at'].to_time.to_i).to eq(expires_time.to_i)
end
context 'with no optional params given' do
diff --git a/spec/requests/api/graphql/metrics/dashboard/annotations_spec.rb b/spec/requests/api/graphql/metrics/dashboard/annotations_spec.rb
new file mode 100644
index 00000000000..f5a5f0a9ec2
--- /dev/null
+++ b/spec/requests/api/graphql/metrics/dashboard/annotations_spec.rb
@@ -0,0 +1,109 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Getting Metrics Dashboard Annotations' do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:environment) { create(:environment, project: project) }
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:path) { 'config/prometheus/common_metrics.yml' }
+ let_it_be(:from) { "2020-04-01T03:29:25Z" }
+ let_it_be(:to) { Time.zone.now.advance(minutes: 5) }
+ let_it_be(:annotation) { create(:metrics_dashboard_annotation, environment: environment, dashboard_path: path) }
+ let_it_be(:annotation_for_different_env) { create(:metrics_dashboard_annotation, dashboard_path: path) }
+ let_it_be(:annotation_for_different_dashboard) { create(:metrics_dashboard_annotation, environment: environment, dashboard_path: ".gitlab/dashboards/test.yml") }
+ let_it_be(:to_old_annotation) do
+ create(:metrics_dashboard_annotation, environment: environment, starting_at: Time.parse(from).advance(minutes: -5), dashboard_path: path)
+ end
+ let_it_be(:to_new_annotation) do
+ create(:metrics_dashboard_annotation, environment: environment, starting_at: to.advance(minutes: 5), dashboard_path: path)
+ end
+
+ let(:fields) do
+ <<~QUERY
+ #{all_graphql_fields_for('MetricsDashboardAnnotation'.classify)}
+ QUERY
+ end
+
+ let(:query) do
+ %(
+ query {
+ project(fullPath:"#{project.full_path}") {
+ environments(name: "#{environment.name}") {
+ nodes {
+ metricsDashboard(path: "#{path}"){
+ annotations(#{args}){
+ nodes {
+ #{fields}
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ )
+ end
+
+ context 'feature flag metrics_dashboard_annotations' do
+ let(:args) { "from: \"#{from}\", to: \"#{to}\"" }
+
+ before do
+ project.add_developer(current_user)
+ end
+
+ context 'is off' do
+ before do
+ stub_feature_flags(metrics_dashboard_annotations: false)
+ post_graphql(query, current_user: current_user)
+ end
+
+ it 'returns empty nodes array' do
+ annotations = graphql_data.dig('project', 'environments', 'nodes')[0].dig('metricsDashboard', 'annotations', 'nodes')
+
+ expect(annotations).to be_empty
+ end
+ end
+
+ context 'is on' do
+ before do
+ stub_feature_flags(metrics_dashboard_annotations: true)
+ post_graphql(query, current_user: current_user)
+ end
+
+ it_behaves_like 'a working graphql query'
+
+ it 'returns annotations' do
+ annotations = graphql_data.dig('project', 'environments', 'nodes')[0].dig('metricsDashboard', 'annotations', 'nodes')
+
+ expect(annotations).to match_array [{
+ "description" => annotation.description,
+ "id" => annotation.to_global_id.to_s,
+ "panelId" => annotation.panel_xid,
+ "startingAt" => annotation.starting_at.to_s,
+ "endingAt" => nil
+ }]
+ end
+
+ context 'arguments' do
+ context 'from is missing' do
+ let(:args) { "to: \"#{from}\"" }
+
+ it 'returns error' do
+ post_graphql(query, current_user: current_user)
+
+ expect(graphql_errors[0]).to include("message" => "Field 'annotations' is missing required arguments: from")
+ end
+ end
+
+ context 'to is missing' do
+ let(:args) { "from: \"#{from}\"" }
+
+ it_behaves_like 'a working graphql query'
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/jira_import/start_spec.rb b/spec/requests/api/graphql/mutations/jira_import/start_spec.rb
index feca89558e3..014da5d1e1a 100644
--- a/spec/requests/api/graphql/mutations/jira_import/start_spec.rb
+++ b/spec/requests/api/graphql/mutations/jira_import/start_spec.rb
@@ -99,12 +99,6 @@ describe 'Starting a Jira Import' do
it_behaves_like 'a mutation that returns errors in the response', errors: ['Jira integration not configured.']
end
- context 'when issues feature are disabled' do
- let_it_be(:project, reload: true) { create(:project, :issues_disabled) }
-
- it_behaves_like 'a mutation that returns errors in the response', errors: ['Cannot import because issues are not available in this project.']
- end
-
context 'when when project has Jira service' do
let!(:service) { create(:jira_service, project: project) }
@@ -112,6 +106,12 @@ describe 'Starting a Jira Import' do
project.reload
end
+ context 'when issues feature are disabled' do
+ let_it_be(:project, reload: true) { create(:project, :issues_disabled) }
+
+ it_behaves_like 'a mutation that returns errors in the response', errors: ['Cannot import because issues are not available in this project.']
+ end
+
context 'when jira_project_key not provided' do
let(:jira_project_key) { '' }
diff --git a/spec/requests/api/graphql/project/merge_request_spec.rb b/spec/requests/api/graphql/project/merge_request_spec.rb
index a1b3111ff71..8d8c31c335d 100644
--- a/spec/requests/api/graphql/project/merge_request_spec.rb
+++ b/spec/requests/api/graphql/project/merge_request_spec.rb
@@ -130,4 +130,15 @@ describe 'getting merge request information nested in a project' do
expect(merge_requests_graphql_data.size).to eq 2
end
end
+
+ context 'when merge request is cannot_be_merged_rechecking' do
+ before do
+ merge_request.update!(merge_status: 'cannot_be_merged_rechecking')
+ end
+
+ it 'returns checking' do
+ post_graphql(query, current_user: current_user)
+ expect(merge_request_graphql_data['mergeStatus']).to eq('checking')
+ end
+ end
end
diff --git a/spec/requests/api/markdown_spec.rb b/spec/requests/api/markdown_spec.rb
index 9b787e76740..09342b06744 100644
--- a/spec/requests/api/markdown_spec.rb
+++ b/spec/requests/api/markdown_spec.rb
@@ -3,8 +3,6 @@
require "spec_helper"
describe API::Markdown do
- RSpec::Matchers.define_negated_matcher :exclude, :include
-
describe "POST /markdown" do
let(:user) {} # No-op. It gets overwritten in the contexts below.
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index a8543c8e282..af2ce7f7aef 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -892,6 +892,7 @@ describe API::MergeRequests do
expect(json_response['merge_error']).to eq(merge_request.merge_error)
expect(json_response['user']['can_merge']).to be_truthy
expect(json_response).not_to include('rebase_in_progress')
+ expect(json_response['first_contribution']).to be_falsy
expect(json_response['has_conflicts']).to be_falsy
expect(json_response['blocking_discussions_resolved']).to be_truthy
expect(json_response['references']['short']).to eq("!#{merge_request.iid}")
@@ -915,6 +916,21 @@ describe API::MergeRequests do
expect(json_response).to include('rebase_in_progress')
end
+ context 'when author is not a member without any merged merge requests' do
+ let(:non_member) { create(:user) }
+
+ before do
+ merge_request.update(author: non_member)
+ end
+
+ it 'exposes first_contribution as true' do
+ get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['first_contribution']).to be_truthy
+ end
+ end
+
context 'merge_request_metrics' do
let(:pipeline) { create(:ci_empty_pipeline) }
@@ -1060,6 +1076,14 @@ describe API::MergeRequests do
expect(json_response['user']['can_merge']).to be_falsy
end
+ it 'returns `checking` as its merge_status instead of `cannot_be_merged_rechecking`' do
+ merge_request.update!(merge_status: 'cannot_be_merged_rechecking')
+
+ get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user)
+
+ expect(json_response['merge_status']).to eq 'checking'
+ end
+
context 'when merge request is unchecked' do
before do
merge_request.mark_as_unchecked!
diff --git a/spec/requests/api/project_statistics_spec.rb b/spec/requests/api/project_statistics_spec.rb
index 5d0b506cc92..1f48c081043 100644
--- a/spec/requests/api/project_statistics_spec.rb
+++ b/spec/requests/api/project_statistics_spec.rb
@@ -50,13 +50,5 @@ describe API::ProjectStatistics do
expect(response).to have_gitlab_http_status(:forbidden)
expect(json_response['message']).to eq('403 Forbidden')
end
-
- it 'responds with 404 when daily_statistics_enabled? is false' do
- stub_feature_flags(project_daily_statistics: { thing: public_project, enabled: false })
-
- get api("/projects/#{public_project.id}/statistics", maintainer)
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
end
end
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index 190afb9cda5..853155cea7a 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -2414,7 +2414,8 @@ describe API::Projects do
project_param = {
container_expiration_policy_attributes: {
cadence: '1month',
- keep_n: 1
+ keep_n: 1,
+ name_regex_keep: 'foo.*'
}
}
@@ -2424,6 +2425,7 @@ describe API::Projects do
expect(json_response['container_expiration_policy']['cadence']).to eq('1month')
expect(json_response['container_expiration_policy']['keep_n']).to eq(1)
+ expect(json_response['container_expiration_policy']['name_regex_keep']).to eq('foo.*')
end
end
diff --git a/spec/requests/api/terraform/state_spec.rb b/spec/requests/api/terraform/state_spec.rb
index b0a963db684..88c277f4e08 100644
--- a/spec/requests/api/terraform/state_spec.rb
+++ b/spec/requests/api/terraform/state_spec.rb
@@ -3,95 +3,231 @@
require 'spec_helper'
describe API::Terraform::State do
- def auth_header_for(user)
- auth_header = ActionController::HttpAuthentication::Basic.encode_credentials(
- user.username,
- create(:personal_access_token, user: user).token
- )
- { 'HTTP_AUTHORIZATION' => auth_header }
- end
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user, developer_projects: [project]) }
+ let_it_be(:maintainer) { create(:user, maintainer_projects: [project]) }
+
+ let!(:state) { create(:terraform_state, :with_file, project: project) }
- let!(:project) { create(:project) }
- let(:developer) { create(:user) }
- let(:maintainer) { create(:user) }
- let(:state_name) { 'state' }
+ let(:current_user) { maintainer }
+ let(:auth_header) { basic_auth_header(current_user) }
+ let(:project_id) { project.id }
+ let(:state_name) { state.name }
+ let(:state_path) { "/projects/#{project_id}/terraform/state/#{state_name}" }
before do
- project.add_maintainer(maintainer)
+ stub_terraform_state_object_storage(Terraform::StateUploader)
end
describe 'GET /projects/:id/terraform/state/:name' do
- it 'returns 401 if user is not authenticated' do
- headers = { 'HTTP_AUTHORIZATION' => 'failing_token' }
- get api("/projects/#{project.id}/terraform/state/#{state_name}"), headers: headers
+ subject(:request) { get api(state_path), headers: auth_header }
- expect(response).to have_gitlab_http_status(:unauthorized)
- end
+ context 'without authentication' do
+ let(:auth_header) { basic_auth_header('failing_token') }
- it 'returns terraform state belonging to a project of given state name' do
- get api("/projects/#{project.id}/terraform/state/#{state_name}"), headers: auth_header_for(maintainer)
+ it 'returns 401 if user is not authenticated' do
+ request
- expect(response).to have_gitlab_http_status(:not_implemented)
- expect(response.body).to eq('not implemented')
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
end
- it 'returns not found if the project does not exists' do
- get api("/projects/0000/terraform/state/#{state_name}"), headers: auth_header_for(maintainer)
+ context 'with maintainer permissions' do
+ let(:current_user) { maintainer }
+
+ it 'returns terraform state belonging to a project of given state name' do
+ request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.body).to eq(state.file.read)
+ end
+
+ context 'for a project that does not exist' do
+ let(:project_id) { '0000' }
+
+ it 'returns not found' do
+ request
- expect(response).to have_gitlab_http_status(:not_found)
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
end
- it 'returns forbidden if the user cannot access the state' do
- project.add_developer(developer)
- get api("/projects/#{project.id}/terraform/state/#{state_name}"), headers: auth_header_for(developer)
+ context 'with developer permissions' do
+ let(:current_user) { developer }
+
+ it 'returns forbidden if the user cannot access the state' do
+ request
- expect(response).to have_gitlab_http_status(:forbidden)
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
end
end
describe 'POST /projects/:id/terraform/state/:name' do
+ let(:params) { { 'instance': 'example-instance' } }
+
+ subject(:request) { post api(state_path), headers: auth_header, as: :json, params: params }
+
context 'when terraform state with a given name is already present' do
- it 'updates the state' do
- post api("/projects/#{project.id}/terraform/state/#{state_name}"),
- params: '{ "instance": "example-instance" }',
- headers: { 'Content-Type' => 'text/plain' }.merge(auth_header_for(maintainer))
+ context 'with maintainer permissions' do
+ let(:current_user) { maintainer }
- expect(response).to have_gitlab_http_status(:not_implemented)
- expect(response.body).to eq('not implemented')
+ it 'updates the state' do
+ expect { request }.to change { Terraform::State.count }.by(0)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
end
- it 'returns forbidden if the user cannot access the state' do
- project.add_developer(developer)
- get api("/projects/#{project.id}/terraform/state/#{state_name}"), headers: auth_header_for(developer)
+ context 'without body' do
+ let(:params) { nil }
- expect(response).to have_gitlab_http_status(:forbidden)
+ it 'returns no content if no body is provided' do
+ request
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+ end
+
+ context 'with developer permissions' do
+ let(:current_user) { developer }
+
+ it 'returns forbidden' do
+ request
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
end
end
context 'when there is no terraform state of a given name' do
- it 'creates a new state' do
- post api("/projects/#{project.id}/terraform/state/example2"),
- headers: auth_header_for(maintainer),
- params: '{ "database": "example-database" }'
+ let(:state_name) { 'example2' }
+
+ context 'with maintainer permissions' do
+ let(:current_user) { maintainer }
+
+ it 'creates a new state' do
+ expect { request }.to change { Terraform::State.count }.by(1)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'without body' do
+ let(:params) { nil }
+
+ it 'returns no content if no body is provided' do
+ request
- expect(response).to have_gitlab_http_status(:not_implemented)
- expect(response.body).to eq('not implemented')
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+ end
+
+ context 'with developer permissions' do
+ let(:current_user) { developer }
+
+ it 'returns forbidden' do
+ request
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
end
end
end
describe 'DELETE /projects/:id/terraform/state/:name' do
- it 'deletes the state' do
- delete api("/projects/#{project.id}/terraform/state/#{state_name}"), headers: auth_header_for(maintainer)
+ subject(:request) { delete api(state_path), headers: auth_header }
+
+ context 'with maintainer permissions' do
+ let(:current_user) { maintainer }
+
+ it 'deletes the state' do
+ expect { request }.to change { Terraform::State.count }.by(-1)
- expect(response).to have_gitlab_http_status(:not_implemented)
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'with developer permissions' do
+ let(:current_user) { developer }
+
+ it 'returns forbidden' do
+ expect { request }.to change { Terraform::State.count }.by(0)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+
+ describe 'PUT /projects/:id/terraform/state/:name/lock' do
+ let(:params) do
+ {
+ ID: '123-456',
+ Version: '0.1',
+ Operation: 'OperationTypePlan',
+ Info: '',
+ Who: "#{current_user.username}",
+ Created: Time.now.utc.iso8601(6),
+ Path: ''
+ }
+ end
+
+ subject(:request) { post api("#{state_path}/lock"), headers: auth_header, params: params }
+
+ it 'locks the terraform state' do
+ request
+
+ expect(response).to have_gitlab_http_status(:ok)
end
+ end
+
+ describe 'DELETE /projects/:id/terraform/state/:name/lock' do
+ before do
+ state.lock_xid = '123-456'
+ state.save!
+ end
+
+ subject(:request) { delete api("#{state_path}/lock"), headers: auth_header, params: params }
- it 'returns forbidden if the user cannot access the state' do
- project.add_developer(developer)
- get api("/projects/#{project.id}/terraform/state/#{state_name}"), headers: auth_header_for(developer)
+ context 'with the correct lock id' do
+ let(:params) { { ID: '123-456' } }
- expect(response).to have_gitlab_http_status(:forbidden)
+ it 'removes the terraform state lock' do
+ request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'with no lock id (force-unlock)' do
+ let(:params) { {} }
+
+ it 'removes the terraform state lock' do
+ request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'with an incorrect lock id' do
+ let(:params) { { ID: '456-789' } }
+
+ it 'returns an error' do
+ request
+
+ expect(response).to have_gitlab_http_status(:conflict)
+ end
+ end
+
+ context 'with a longer than 255 character lock id' do
+ let(:params) { { ID: '0' * 256 } }
+
+ it 'returns an error' do
+ request
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
end
end
end
diff --git a/spec/routing/openid_connect_spec.rb b/spec/routing/openid_connect_spec.rb
index 70470032930..fc170f8986c 100644
--- a/spec/routing/openid_connect_spec.rb
+++ b/spec/routing/openid_connect_spec.rb
@@ -3,6 +3,7 @@
require 'spec_helper'
# oauth_discovery_keys GET /oauth/discovery/keys(.:format) doorkeeper/openid_connect/discovery#keys
+# jwks GET /-/jwks(.:format) doorkeeper/openid_connect/discovery#keys
# oauth_discovery_provider GET /.well-known/openid-configuration(.:format) doorkeeper/openid_connect/discovery#provider
# oauth_discovery_webfinger GET /.well-known/webfinger(.:format) doorkeeper/openid_connect/discovery#webfinger
describe Doorkeeper::OpenidConnect::DiscoveryController, 'routing' do
@@ -17,6 +18,10 @@ describe Doorkeeper::OpenidConnect::DiscoveryController, 'routing' do
it "to #keys" do
expect(get('/oauth/discovery/keys')).to route_to('doorkeeper/openid_connect/discovery#keys')
end
+
+ it "/-/jwks" do
+ expect(get('/-/jwks')).to route_to('doorkeeper/openid_connect/discovery#keys')
+ end
end
# oauth_userinfo GET /oauth/userinfo(.:format) doorkeeper/openid_connect/userinfo#show
diff --git a/spec/routing/project_routing_spec.rb b/spec/routing/project_routing_spec.rb
index 0272d269aa4..c1b1150c28b 100644
--- a/spec/routing/project_routing_spec.rb
+++ b/spec/routing/project_routing_spec.rb
@@ -838,6 +838,13 @@ describe 'project routing' do
end
end
+ describe Projects::StaticSiteEditorController, 'routing' do
+ it 'routes to static_site_editor#show', :aggregate_failures do
+ expect(get('/gitlab/gitlabhq/-/sse/master/CONTRIBUTING.md')).to route_to('projects/static_site_editor#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/CONTRIBUTING.md')
+ expect(get('/gitlab/gitlabhq/-/sse/master/README')).to route_to('projects/static_site_editor#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/README')
+ end
+ end
+
describe Projects::EnvironmentsController, 'routing' do
describe 'legacy routing' do
it_behaves_like 'redirecting a legacy project path', "/gitlab/gitlabhq/environments", "/gitlab/gitlabhq/-/environments"
diff --git a/spec/rubocop/cop/rspec/modify_sidekiq_middleware_spec.rb b/spec/rubocop/cop/rspec/modify_sidekiq_middleware_spec.rb
new file mode 100644
index 00000000000..938916d8d75
--- /dev/null
+++ b/spec/rubocop/cop/rspec/modify_sidekiq_middleware_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rubocop'
+require_relative '../../../support/helpers/expect_offense'
+require_relative '../../../../rubocop/cop/rspec/modify_sidekiq_middleware'
+
+describe RuboCop::Cop::RSpec::ModifySidekiqMiddleware do
+ include CopHelper
+ include ExpectOffense
+
+ subject(:cop) { described_class.new }
+
+ let(:source) do
+ <<~SRC
+ Sidekiq::Testing.server_middleware do |chain|
+ chain.add(MyCustomMiddleware)
+ end
+ SRC
+ end
+
+ let(:corrected) do
+ <<~SRC
+ with_sidekiq_server_middleware do |chain|
+ chain.add(MyCustomMiddleware)
+ end
+ SRC
+ end
+
+ it 'registers an offence' do
+ inspect_source(source)
+
+ expect(cop.offenses.size).to eq(1)
+ end
+
+ it 'can autocorrect the source' do
+ expect(autocorrect_source(source)).to eq(corrected)
+ end
+end
diff --git a/spec/rubocop/cop/static_translation_definition_spec.rb b/spec/rubocop/cop/static_translation_definition_spec.rb
new file mode 100644
index 00000000000..b85f9da9b4e
--- /dev/null
+++ b/spec/rubocop/cop/static_translation_definition_spec.rb
@@ -0,0 +1,109 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require 'rubocop'
+require 'rubocop/rspec/support'
+
+require_relative '../../../rubocop/cop/static_translation_definition'
+
+describe RuboCop::Cop::StaticTranslationDefinition do
+ include CopHelper
+
+ using RSpec::Parameterized::TableSyntax
+
+ subject(:cop) { described_class.new }
+
+ shared_examples 'offense' do |code, highlight, line|
+ it 'registers an offense' do
+ inspect_source(code)
+
+ expect(cop.offenses.size).to eq(1)
+ expect(cop.offenses.map(&:line)).to eq([line])
+ expect(cop.highlights).to eq([highlight])
+ end
+ end
+
+ shared_examples 'no offense' do |code|
+ it 'does not register an offense' do
+ inspect_source(code)
+
+ expect(cop.offenses).to be_empty
+ end
+ end
+
+ describe 'offenses' do
+ where(:code, :highlight, :line) do
+ [
+ ['A = _("a")', '_("a")', 1],
+ ['B = s_("b")', 's_("b")', 1],
+ ['C = n_("c")', 'n_("c")', 1],
+ [
+ <<~CODE,
+ module MyModule
+ A = {
+ b: {
+ c: _("a")
+ }
+ }
+ end
+ CODE
+ '_("a")',
+ 4
+ ],
+ [
+ <<~CODE,
+ class MyClass
+ B = [
+ [
+ s_("a")
+ ]
+ ]
+ end
+ CODE
+ 's_("a")',
+ 4
+ ]
+ ]
+ end
+
+ with_them do
+ include_examples 'offense', params[:code], params[:highlight], params[:line]
+ end
+ end
+
+ describe 'ignore' do
+ where(:code) do
+ [
+ 'CONSTANT_1 = __("a")',
+ 'CONSTANT_2 = s__("a")',
+ 'CONSTANT_3 = n__("a")',
+ <<~CODE,
+ def method
+ s_('a')
+ end
+ CODE
+ <<~CODE,
+ class MyClass
+ VALID = -> {
+ s_('hi')
+ }
+ end
+ CODE
+ <<~CODE
+ class MyClass
+ def hello
+ {
+ a: _('hi')
+ }
+ end
+ end
+ CODE
+ ]
+ end
+
+ with_them do
+ include_examples 'no offense', params[:code]
+ end
+ end
+end
diff --git a/spec/serializers/analytics_summary_serializer_spec.rb b/spec/serializers/analytics_summary_serializer_spec.rb
index 7950f89bcc7..ed126720a55 100644
--- a/spec/serializers/analytics_summary_serializer_spec.rb
+++ b/spec/serializers/analytics_summary_serializer_spec.rb
@@ -34,7 +34,10 @@ describe AnalyticsSummarySerializer do
end
context 'when representing with unit' do
- let(:resource) { { title: 'frequency', value: 1.12, unit: 'per day' } }
+ let(:resource) do
+ Gitlab::CycleAnalytics::Summary::DeploymentFrequency
+ .new(deployments: 10, from: 1.day.ago)
+ end
subject { described_class.new.represent(resource, with_unit: true) }
diff --git a/spec/serializers/discussion_entity_spec.rb b/spec/serializers/discussion_entity_spec.rb
index 4adf1dc5994..b441fd08b98 100644
--- a/spec/serializers/discussion_entity_spec.rb
+++ b/spec/serializers/discussion_entity_spec.rb
@@ -73,9 +73,19 @@ describe DiscussionEntity do
:diff_file,
:truncated_diff_lines,
:position,
+ :positions,
+ :line_codes,
:line_code,
:active
)
end
+
+ context 'diff_head_compare feature is disabled' do
+ it 'does not expose positions and line_codes attributes' do
+ stub_feature_flags(merge_ref_head_comments: false)
+
+ expect(subject.keys).not_to include(:positions, :line_codes)
+ end
+ end
end
end
diff --git a/spec/serializers/merge_request_basic_entity_spec.rb b/spec/serializers/merge_request_basic_entity_spec.rb
new file mode 100644
index 00000000000..53ba66a79ac
--- /dev/null
+++ b/spec/serializers/merge_request_basic_entity_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe MergeRequestBasicEntity do
+ let(:resource) { build(:merge_request) }
+
+ subject do
+ described_class.new(resource).as_json
+ end
+
+ it 'has public_merge_status as merge_status' do
+ expect(resource).to receive(:public_merge_status).and_return('checking')
+
+ expect(subject[:merge_status]).to eq 'checking'
+ end
+end
diff --git a/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb b/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb
index 685abbf7e6c..9f96e5711a4 100644
--- a/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb
@@ -19,6 +19,12 @@ describe MergeRequestPollCachedWidgetEntity do
is_expected.to include(:target_branch_sha)
end
+ it 'has public_merge_status as merge_status' do
+ expect(resource).to receive(:public_merge_status).and_return('checking')
+
+ expect(subject[:merge_status]).to eq 'checking'
+ end
+
describe 'diverged_commits_count' do
context 'when MR open and its diverging' do
it 'returns diverged commits count' do
diff --git a/spec/serializers/merge_request_poll_widget_entity_spec.rb b/spec/serializers/merge_request_poll_widget_entity_spec.rb
index 29d35fdc811..fe0b717ede0 100644
--- a/spec/serializers/merge_request_poll_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_poll_widget_entity_spec.rb
@@ -94,6 +94,10 @@ describe MergeRequestPollWidgetEntity do
end
describe 'auto merge' do
+ before do
+ project.add_maintainer(user)
+ end
+
context 'when auto merge is enabled' do
let(:resource) { create(:merge_request, :merge_when_pipeline_succeeds) }
diff --git a/spec/serializers/merge_request_serializer_spec.rb b/spec/serializers/merge_request_serializer_spec.rb
index 871a47b0a02..90b3efae412 100644
--- a/spec/serializers/merge_request_serializer_spec.rb
+++ b/spec/serializers/merge_request_serializer_spec.rb
@@ -69,6 +69,22 @@ describe MergeRequestSerializer do
end
end
+ context 'poll cached widget merge request serialization' do
+ let(:serializer) { 'poll_cached_widget' }
+
+ it 'matches basic merge request json schema' do
+ expect(json_entity).to match_schema('entities/merge_request_poll_cached_widget')
+ end
+ end
+
+ context 'poll widget merge request serialization' do
+ let(:serializer) { 'poll_widget' }
+
+ it 'matches basic merge request json schema' do
+ expect(json_entity).to match_schema('entities/merge_request_poll_widget')
+ end
+ end
+
context 'no serializer' do
let(:serializer) { nil }
diff --git a/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb b/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb
index e03d87e9d49..b6e8d3c636a 100644
--- a/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb
+++ b/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe AutoMerge::MergeWhenPipelineSucceedsService do
- let(:user) { create(:user) }
- let(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
let(:mr_merge_if_green_enabled) do
create(:merge_request, merge_when_pipeline_succeeds: true, merge_user: user,
@@ -20,6 +20,10 @@ describe AutoMerge::MergeWhenPipelineSucceedsService do
described_class.new(project, user, commit_message: 'Awesome message')
end
+ before_all do
+ project.add_maintainer(user)
+ end
+
describe "#available_for?" do
subject { service.available_for?(mr_merge_if_green_enabled) }
@@ -34,11 +38,25 @@ describe AutoMerge::MergeWhenPipelineSucceedsService do
it { is_expected.to be_truthy }
+ it 'memoizes the result' do
+ expect(mr_merge_if_green_enabled).to receive(:can_be_merged_by?).once.and_call_original
+
+ 2.times { is_expected.to be_truthy }
+ end
+
context 'when the head pipeline succeeded' do
let(:pipeline_status) { :success }
it { is_expected.to be_falsy }
end
+
+ context 'when the user does not have permission to merge' do
+ before do
+ allow(mr_merge_if_green_enabled).to receive(:can_be_merged_by?) { false }
+ end
+
+ it { is_expected.to be_falsy }
+ end
end
describe "#execute" do
diff --git a/spec/services/auto_merge_service_spec.rb b/spec/services/auto_merge_service_spec.rb
index 221cf695331..bab69fb4aa3 100644
--- a/spec/services/auto_merge_service_spec.rb
+++ b/spec/services/auto_merge_service_spec.rb
@@ -3,22 +3,36 @@
require 'spec_helper'
describe AutoMergeService do
- let_it_be(:project) { create(:project) }
+ let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let(:service) { described_class.new(project, user) }
- describe '.all_strategies' do
- subject { described_class.all_strategies }
+ before_all do
+ project.add_maintainer(user)
+ end
- it 'includes merge when pipeline succeeds' do
- is_expected.to include(AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS)
+ describe '.all_strategies_ordered_by_preference' do
+ subject { described_class.all_strategies_ordered_by_preference }
+
+ it 'returns all strategies in preference order' do
+ if Gitlab.ee?
+ is_expected.to eq(
+ [AutoMergeService::STRATEGY_MERGE_TRAIN,
+ AutoMergeService::STRATEGY_ADD_TO_MERGE_TRAIN_WHEN_PIPELINE_SUCCEEDS,
+ AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS])
+ else
+ is_expected.to eq([AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS])
+ end
end
end
describe '#available_strategies' do
subject { service.available_strategies(merge_request) }
- let(:merge_request) { create(:merge_request) }
+ let(:merge_request) do
+ create(:merge_request, source_project: project)
+ end
+
let(:pipeline_status) { :running }
before do
@@ -42,6 +56,36 @@ describe AutoMergeService do
end
end
+ describe '#preferred_strategy' do
+ subject { service.preferred_strategy(merge_request) }
+
+ let(:merge_request) do
+ create(:merge_request, source_project: project)
+ end
+
+ let(:pipeline_status) { :running }
+
+ before do
+ create(:ci_pipeline, pipeline_status, ref: merge_request.source_branch,
+ sha: merge_request.diff_head_sha,
+ project: merge_request.source_project)
+
+ merge_request.update_head_pipeline
+ end
+
+ it 'returns preferred strategy' do
+ is_expected.to eq('merge_when_pipeline_succeeds')
+ end
+
+ context 'when the head piipeline succeeded' do
+ let(:pipeline_status) { :success }
+
+ it 'returns available strategies' do
+ is_expected.to be_nil
+ end
+ end
+ end
+
describe '.get_service_class' do
subject { described_class.get_service_class(strategy) }
@@ -63,7 +107,10 @@ describe AutoMergeService do
describe '#execute' do
subject { service.execute(merge_request, strategy) }
- let(:merge_request) { create(:merge_request) }
+ let(:merge_request) do
+ create(:merge_request, source_project: project)
+ end
+
let(:pipeline_status) { :running }
let(:strategy) { AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS }
@@ -90,6 +137,14 @@ describe AutoMergeService do
is_expected.to eq(:failed)
end
end
+
+ context 'when strategy is not specified' do
+ let(:strategy) { }
+
+ it 'chooses the most preferred strategy' do
+ is_expected.to eq(:merge_when_pipeline_succeeds)
+ end
+ end
end
describe '#update' do
diff --git a/spec/services/ci/create_cross_project_pipeline_service_spec.rb b/spec/services/ci/create_cross_project_pipeline_service_spec.rb
index a411244e57f..5c59aaa4ce9 100644
--- a/spec/services/ci/create_cross_project_pipeline_service_spec.rb
+++ b/spec/services/ci/create_cross_project_pipeline_service_spec.rb
@@ -475,5 +475,45 @@ describe Ci::CreateCrossProjectPipelineService, '#execute' do
expect(bridge.failure_reason).to eq 'insufficient_bridge_permissions'
end
end
+
+ context 'when there is no such branch in downstream project' do
+ let(:trigger) do
+ {
+ trigger: {
+ project: downstream_project.full_path,
+ branch: 'invalid_branch'
+ }
+ }
+ end
+
+ it 'does not create a pipeline and drops the bridge' do
+ service.execute(bridge)
+
+ expect(bridge.reload).to be_failed
+ expect(bridge.failure_reason).to eq('downstream_pipeline_creation_failed')
+ end
+ end
+
+ context 'when downstream pipeline has a branch rule and does not satisfy' do
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ end
+
+ let(:config) do
+ <<-EOY
+ hello:
+ script: echo world
+ only:
+ - invalid_branch
+ EOY
+ end
+
+ it 'does not create a pipeline and drops the bridge' do
+ service.execute(bridge)
+
+ expect(bridge.reload).to be_failed
+ expect(bridge.failure_reason).to eq('downstream_pipeline_creation_failed')
+ end
+ end
end
end
diff --git a/spec/services/ci/update_runner_service_spec.rb b/spec/services/ci/update_runner_service_spec.rb
index 2b07dad7248..abe575eebc8 100644
--- a/spec/services/ci/update_runner_service_spec.rb
+++ b/spec/services/ci/update_runner_service_spec.rb
@@ -23,6 +23,19 @@ describe Ci::UpdateRunnerService do
end
end
+ context 'with cost factor params' do
+ let(:params) { { public_projects_minutes_cost_factor: 1.1, private_projects_minutes_cost_factor: 2.2 }}
+
+ it 'updates the runner cost factors' do
+ expect(update).to be_truthy
+
+ runner.reload
+
+ expect(runner.public_projects_minutes_cost_factor).to eq(1.1)
+ expect(runner.private_projects_minutes_cost_factor).to eq(2.2)
+ end
+ end
+
context 'when params are not valid' do
let(:params) { { run_untagged: false } }
diff --git a/spec/services/emails/destroy_service_spec.rb b/spec/services/emails/destroy_service_spec.rb
index 5abe8da2529..9e14a13aa4f 100644
--- a/spec/services/emails/destroy_service_spec.rb
+++ b/spec/services/emails/destroy_service_spec.rb
@@ -10,7 +10,10 @@ describe Emails::DestroyService do
describe '#execute' do
it 'removes an email' do
- expect { service.execute(email) }.to change { user.emails.count }.by(-1)
+ response = service.execute(email)
+
+ expect(user.emails).not_to include(email)
+ expect(response).to be true
end
end
end
diff --git a/spec/services/git/process_ref_changes_service_spec.rb b/spec/services/git/process_ref_changes_service_spec.rb
index fc5e379f51d..924e913a9ec 100644
--- a/spec/services/git/process_ref_changes_service_spec.rb
+++ b/spec/services/git/process_ref_changes_service_spec.rb
@@ -160,6 +160,49 @@ describe Git::ProcessRefChangesService do
let(:ref_prefix) { 'refs/heads' }
it_behaves_like 'service for processing ref changes', Git::BranchPushService
+
+ context 'when there are merge requests associated with branches' do
+ let(:tag_changes) do
+ [
+ { index: 0, oldrev: Gitlab::Git::BLANK_SHA, newrev: '789012', ref: "refs/tags/v10.0.0" }
+ ]
+ end
+ let(:branch_changes) do
+ [
+ { index: 0, oldrev: Gitlab::Git::BLANK_SHA, newrev: '789012', ref: "#{ref_prefix}/create1" },
+ { index: 1, oldrev: Gitlab::Git::BLANK_SHA, newrev: '789013', ref: "#{ref_prefix}/create2" },
+ { index: 2, oldrev: Gitlab::Git::BLANK_SHA, newrev: '789014', ref: "#{ref_prefix}/create3" }
+ ]
+ end
+ let(:git_changes) { double(branch_changes: branch_changes, tag_changes: tag_changes) }
+
+ it 'schedules job for existing merge requests' do
+ expect_next_instance_of(MergeRequests::PushedBranchesService) do |service|
+ expect(service).to receive(:execute).and_return(%w(create1 create2))
+ end
+
+ expect(UpdateMergeRequestsWorker).to receive(:perform_async)
+ .with(project.id, user.id, Gitlab::Git::BLANK_SHA, '789012', "#{ref_prefix}/create1").ordered
+ expect(UpdateMergeRequestsWorker).to receive(:perform_async)
+ .with(project.id, user.id, Gitlab::Git::BLANK_SHA, '789013', "#{ref_prefix}/create2").ordered
+ expect(UpdateMergeRequestsWorker).not_to receive(:perform_async)
+ .with(project.id, user.id, Gitlab::Git::BLANK_SHA, '789014', "#{ref_prefix}/create3").ordered
+
+ subject.execute
+ end
+
+ context 'refresh_only_existing_merge_requests_on_push disabled' do
+ before do
+ stub_feature_flags(refresh_only_existing_merge_requests_on_push: false)
+ end
+
+ it 'refreshes all merge requests' do
+ expect(UpdateMergeRequestsWorker).to receive(:perform_async).exactly(3).times
+
+ subject.execute
+ end
+ end
+ end
end
context 'tag changes' do
diff --git a/spec/services/issues/export_csv_service_spec.rb b/spec/services/issues/export_csv_service_spec.rb
new file mode 100644
index 00000000000..419e29d92a8
--- /dev/null
+++ b/spec/services/issues/export_csv_service_spec.rb
@@ -0,0 +1,170 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Issues::ExportCsvService do
+ let_it_be(:user) { create(:user) }
+ let(:group) { create(:group) }
+ let(:project) { create(:project, :public, group: group) }
+ let!(:issue) { create(:issue, project: project, author: user) }
+ let!(:bad_issue) { create(:issue, project: project, author: user) }
+ let(:subject) { described_class.new(Issue.all, project) }
+
+ it 'renders csv to string' do
+ expect(subject.csv_data).to be_a String
+ end
+
+ describe '#email' do
+ it 'emails csv' do
+ expect { subject.email(user) }.to change(ActionMailer::Base.deliveries, :count)
+ end
+
+ it 'renders with a target filesize' do
+ expect(subject.csv_builder).to receive(:render).with(described_class::TARGET_FILESIZE)
+
+ subject.email(user)
+ end
+ end
+
+ def csv
+ CSV.parse(subject.csv_data, headers: true)
+ end
+
+ context 'includes' do
+ let(:milestone) { create(:milestone, title: 'v1.0', project: project) }
+ let(:idea_label) { create(:label, project: project, title: 'Idea') }
+ let(:feature_label) { create(:label, project: project, title: 'Feature') }
+
+ before do
+ # Creating a timelog touches the updated_at timestamp of issue,
+ # so create these first.
+ issue.timelogs.create(time_spent: 360, user: user)
+ issue.timelogs.create(time_spent: 200, user: user)
+ issue.update!(milestone: milestone,
+ assignees: [user],
+ description: 'Issue with details',
+ state: :opened,
+ due_date: DateTime.new(2014, 3, 2),
+ created_at: DateTime.new(2015, 4, 3, 2, 1, 0),
+ updated_at: DateTime.new(2016, 5, 4, 3, 2, 1),
+ closed_at: DateTime.new(2017, 6, 5, 4, 3, 2),
+ weight: 4,
+ discussion_locked: true,
+ labels: [feature_label, idea_label],
+ time_estimate: 72000)
+ end
+
+ it 'includes the columns required for import' do
+ expect(csv.headers).to include('Title', 'Description')
+ end
+
+ specify 'iid' do
+ expect(csv[0]['Issue ID']).to eq issue.iid.to_s
+ end
+
+ specify 'url' do
+ expect(csv[0]['URL']).to match(/http.*#{project.full_path}.*#{issue.iid}/)
+ end
+
+ specify 'title' do
+ expect(csv[0]['Title']).to eq issue.title
+ end
+
+ specify 'state' do
+ expect(csv[0]['State']).to eq 'Open'
+ end
+
+ specify 'description' do
+ expect(csv[0]['Description']).to eq issue.description
+ expect(csv[1]['Description']).to eq nil
+ end
+
+ specify 'author name' do
+ expect(csv[0]['Author']).to eq issue.author_name
+ end
+
+ specify 'author username' do
+ expect(csv[0]['Author Username']).to eq issue.author.username
+ end
+
+ specify 'assignee name' do
+ expect(csv[0]['Assignee']).to eq user.name
+ expect(csv[1]['Assignee']).to eq ''
+ end
+
+ specify 'assignee username' do
+ expect(csv[0]['Assignee Username']).to eq user.username
+ expect(csv[1]['Assignee Username']).to eq ''
+ end
+
+ specify 'confidential' do
+ expect(csv[0]['Confidential']).to eq 'No'
+ end
+
+ specify 'milestone' do
+ expect(csv[0]['Milestone']).to eq issue.milestone.title
+ expect(csv[1]['Milestone']).to eq nil
+ end
+
+ specify 'labels' do
+ expect(csv[0]['Labels']).to eq 'Feature,Idea'
+ expect(csv[1]['Labels']).to eq nil
+ end
+
+ specify 'due_date' do
+ expect(csv[0]['Due Date']).to eq '2014-03-02'
+ expect(csv[1]['Due Date']).to eq nil
+ end
+
+ specify 'created_at' do
+ expect(csv[0]['Created At (UTC)']).to eq '2015-04-03 02:01:00'
+ end
+
+ specify 'updated_at' do
+ expect(csv[0]['Updated At (UTC)']).to eq '2016-05-04 03:02:01'
+ end
+
+ specify 'closed_at' do
+ expect(csv[0]['Closed At (UTC)']).to eq '2017-06-05 04:03:02'
+ expect(csv[1]['Closed At (UTC)']).to eq nil
+ end
+
+ specify 'discussion_locked' do
+ expect(csv[0]['Locked']).to eq 'Yes'
+ end
+
+ specify 'weight' do
+ expect(csv[0]['Weight']).to eq '4'
+ end
+
+ specify 'time estimate' do
+ expect(csv[0]['Time Estimate']).to eq '72000'
+ expect(csv[1]['Time Estimate']).to eq '0'
+ end
+
+ specify 'time spent' do
+ expect(csv[0]['Time Spent']).to eq '560'
+ expect(csv[1]['Time Spent']).to eq '0'
+ end
+
+ context 'with issues filtered by labels and project' do
+ let(:subject) do
+ described_class.new(
+ IssuesFinder.new(user,
+ project_id: project.id,
+ label_name: %w(Idea Feature)).execute, project)
+ end
+
+ it 'returns only filtered objects' do
+ expect(csv.count).to eq(1)
+ expect(csv[0]['Issue ID']).to eq issue.iid.to_s
+ end
+ end
+ end
+
+ context 'with minimal details' do
+ it 'renders labels as nil' do
+ expect(csv[0]['Labels']).to eq nil
+ end
+ end
+end
diff --git a/spec/services/jira_import/start_import_service_spec.rb b/spec/services/jira_import/start_import_service_spec.rb
index ae0c4f63fee..90f38945a9f 100644
--- a/spec/services/jira_import/start_import_service_spec.rb
+++ b/spec/services/jira_import/start_import_service_spec.rb
@@ -5,8 +5,9 @@ require 'spec_helper'
describe JiraImport::StartImportService do
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project) }
+ let(:key) { 'KEY' }
- subject { described_class.new(user, project, '').execute }
+ subject { described_class.new(user, project, key).execute }
context 'when feature flag disabled' do
before do
@@ -23,6 +24,8 @@ describe JiraImport::StartImportService do
context 'when user does not have permissions to run the import' do
before do
+ create(:jira_service, project: project, active: true)
+
project.add_developer(user)
end
@@ -38,19 +41,21 @@ describe JiraImport::StartImportService do
it_behaves_like 'responds with error', 'Jira integration not configured.'
end
- context 'when issues feature are disabled' do
- let_it_be(:project, reload: true) { create(:project, :issues_disabled) }
-
- it_behaves_like 'responds with error', 'Cannot import because issues are not available in this project.'
- end
-
context 'when Jira service exists' do
let!(:jira_service) { create(:jira_service, project: project, active: true) }
context 'when Jira project key is not provided' do
+ let(:key) { '' }
+
it_behaves_like 'responds with error', 'Unable to find Jira project to import data from.'
end
+ context 'when issues feature are disabled' do
+ let_it_be(:project, reload: true) { create(:project, :issues_disabled) }
+
+ it_behaves_like 'responds with error', 'Cannot import because issues are not available in this project.'
+ end
+
context 'when correct data provided' do
let(:fake_key) { 'some-key' }
@@ -62,15 +67,17 @@ describe JiraImport::StartImportService do
it_behaves_like 'responds with error', 'Jira import is already running.'
end
- it 'returns success response' do
- expect(subject).to be_a(ServiceResponse)
- expect(subject).to be_success
- end
+ context 'when everything is ok' do
+ it 'returns success response' do
+ expect(subject).to be_a(ServiceResponse)
+ expect(subject).to be_success
+ end
- it 'schedules jira import' do
- subject
+ it 'schedules jira import' do
+ subject
- expect(project.latest_jira_import).to be_scheduled
+ expect(project.latest_jira_import).to be_scheduled
+ end
end
it 'creates jira import data' do
diff --git a/spec/services/merge_requests/merge_orchestration_service_spec.rb b/spec/services/merge_requests/merge_orchestration_service_spec.rb
new file mode 100644
index 00000000000..c50f20d7703
--- /dev/null
+++ b/spec/services/merge_requests/merge_orchestration_service_spec.rb
@@ -0,0 +1,116 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe MergeRequests::MergeOrchestrationService do
+ let_it_be(:maintainer) { create(:user) }
+ let(:merge_params) { { sha: merge_request.diff_head_sha } }
+ let(:user) { maintainer }
+ let(:service) { described_class.new(project, user, merge_params) }
+
+ let!(:merge_request) do
+ create(:merge_request, source_project: project, source_branch: 'feature',
+ target_project: project, target_branch: 'master')
+ end
+
+ shared_context 'fresh repository' do
+ let_it_be(:project) { create(:project, :repository) }
+
+ before_all do
+ project.add_maintainer(maintainer)
+ end
+ end
+
+ describe '#execute' do
+ subject { service.execute(merge_request) }
+
+ include_context 'fresh repository'
+
+ context 'when merge request is mergeable' do
+ context 'when merge request can be merged automatically' do
+ before do
+ create(:ci_pipeline, :detached_merge_request_pipeline, project: project, merge_request: merge_request)
+ merge_request.update_head_pipeline
+ end
+
+ it 'schedules auto merge' do
+ expect_next_instance_of(AutoMergeService, project, user, merge_params) do |service|
+ expect(service).to receive(:execute).with(merge_request).and_call_original
+ end
+
+ subject
+
+ expect(merge_request).to be_auto_merge_enabled
+ expect(merge_request.auto_merge_strategy).to eq(AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS)
+ expect(merge_request).not_to be_merged
+ end
+ end
+
+ context 'when merge request cannot be merged automatically' do
+ it 'merges immediately', :sidekiq_inline do
+ expect(merge_request)
+ .to receive(:merge_async).with(user.id, merge_params)
+ .and_call_original
+
+ subject
+
+ merge_request.reset
+ expect(merge_request).to be_merged
+ expect(merge_request).not_to be_auto_merge_enabled
+ end
+ end
+ end
+
+ context 'when merge request is not mergeable' do
+ before do
+ allow(merge_request).to receive(:mergeable_state?) { false }
+ end
+
+ it 'does nothing' do
+ subject
+
+ expect(merge_request).not_to be_auto_merge_enabled
+ expect(merge_request).not_to be_merged
+ end
+ end
+ end
+
+ describe '#can_merge?' do
+ subject { service.can_merge?(merge_request) }
+
+ include_context 'fresh repository'
+
+ context 'when merge request is mergeable' do
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when merge request is not mergeable' do
+ before do
+ allow(merge_request).to receive(:mergeable_state?) { false }
+ end
+
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ describe '#preferred_auto_merge_strategy' do
+ subject { service.preferred_auto_merge_strategy(merge_request) }
+
+ include_context 'fresh repository'
+
+ context 'when merge request can be merged automatically' do
+ before do
+ create(:ci_pipeline, :detached_merge_request_pipeline, project: project, merge_request: merge_request)
+ merge_request.update_head_pipeline
+ end
+
+ it 'fetches perferred auto merge strategy' do
+ is_expected.to eq(AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS)
+ end
+ end
+
+ context 'when merge request cannot be merged automatically' do
+ it { is_expected.to be_nil }
+ end
+ end
+end
diff --git a/spec/services/merge_requests/pushed_branches_service_spec.rb b/spec/services/merge_requests/pushed_branches_service_spec.rb
new file mode 100644
index 00000000000..7b5d505f4d9
--- /dev/null
+++ b/spec/services/merge_requests/pushed_branches_service_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe MergeRequests::PushedBranchesService do
+ let(:project) { create(:project) }
+ let!(:service) { described_class.new(project, nil, changes: pushed_branches) }
+
+ context 'when branches pushed' do
+ let(:pushed_branches) do
+ %w(branch1 branch2 extra1 extra2 extra3).map do |branch|
+ { ref: "refs/heads/#{branch}" }
+ end
+ end
+
+ it 'returns only branches which have a merge request' do
+ create(:merge_request, source_branch: 'branch1', source_project: project)
+ create(:merge_request, source_branch: 'branch2', source_project: project)
+ create(:merge_request, target_branch: 'branch2', source_project: project)
+ create(:merge_request, :closed, target_branch: 'extra1', source_project: project)
+ create(:merge_request, source_branch: 'extra2')
+
+ expect(service.execute).to contain_exactly('branch1', 'branch2')
+ end
+ end
+
+ context 'when tags pushed' do
+ let(:pushed_branches) do
+ %w(v10.0.0 v11.0.2 v12.1.0).map do |branch|
+ { ref: "refs/tags/#{branch}" }
+ end
+ end
+
+ it 'returns empty result without any SQL query performed' do
+ control_count = ActiveRecord::QueryRecorder.new do
+ expect(service.execute).to be_empty
+ end.count
+
+ expect(control_count).to be_zero
+ end
+ end
+end
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index dd5d90b2d07..8c1800c495f 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -208,7 +208,7 @@ describe MergeRequests::UpdateService, :mailer do
end
end
- context 'merge' do
+ shared_examples_for 'correct merge behavior' do
let(:opts) do
{
merge: merge_request.diff_head_sha
@@ -311,6 +311,18 @@ describe MergeRequests::UpdateService, :mailer do
end
end
+ describe 'merge' do
+ it_behaves_like 'correct merge behavior'
+
+ context 'when merge_orchestration_service feature flag is disabled' do
+ before do
+ stub_feature_flags(merge_orchestration_service: false)
+ end
+
+ it_behaves_like 'correct merge behavior'
+ end
+ end
+
context 'todos' do
let!(:pending_todo) { create(:todo, :assigned, user: user, project: project, target: merge_request, author: user2) }
diff --git a/spec/services/metrics/dashboard/transient_embed_service_spec.rb b/spec/services/metrics/dashboard/transient_embed_service_spec.rb
index fddfbe15281..4982f56cddc 100644
--- a/spec/services/metrics/dashboard/transient_embed_service_spec.rb
+++ b/spec/services/metrics/dashboard/transient_embed_service_spec.rb
@@ -38,21 +38,7 @@ describe Metrics::Dashboard::TransientEmbedService, :use_clean_rails_memory_stor
end
describe '#get_dashboard' do
- let(:embed_json) do
- {
- panel_groups: [{
- panels: [{
- type: 'line-graph',
- title: 'title',
- y_label: 'y_label',
- metrics: [{
- query_range: 'up',
- label: 'y_label'
- }]
- }]
- }]
- }.to_json
- end
+ let(:embed_json) { get_embed_json }
let(:service_params) { [project, user, { environment: environment, embedded: 'true', embed_json: embed_json }] }
let(:service_call) { described_class.new(*service_params).get_dashboard }
@@ -68,5 +54,39 @@ describe Metrics::Dashboard::TransientEmbedService, :use_clean_rails_memory_stor
described_class.new(*service_params).get_dashboard
described_class.new(*service_params).get_dashboard
end
+
+ it 'caches unique requests separately' do
+ alt_embed_json = get_embed_json('area-chart')
+ alt_service_params = [project, user, { environment: environment, embedded: 'true', embed_json: alt_embed_json }]
+
+ embed = described_class.new(*service_params).get_dashboard
+ alt_embed = described_class.new(*alt_service_params).get_dashboard
+
+ expect(embed).not_to eq(alt_embed)
+ expect(get_type_for_embed(embed)).to eq('line-graph')
+ expect(get_type_for_embed(alt_embed)).to eq('area-chart')
+ end
+
+ private
+
+ def get_embed_json(type = 'line-graph')
+ {
+ panel_groups: [{
+ panels: [{
+ type: type,
+ title: 'title',
+ y_label: 'y_label',
+ metrics: [{
+ query_range: 'up',
+ label: 'y_label'
+ }]
+ }]
+ }]
+ }.to_json
+ end
+
+ def get_type_for_embed(embed)
+ embed[:dashboard][:panel_groups][0][:panels][0][:type]
+ end
end
end
diff --git a/spec/services/personal_access_tokens/create_service_spec.rb b/spec/services/personal_access_tokens/create_service_spec.rb
new file mode 100644
index 00000000000..9190434b96a
--- /dev/null
+++ b/spec/services/personal_access_tokens/create_service_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe PersonalAccessTokens::CreateService do
+ describe '#execute' do
+ context 'with valid params' do
+ it 'creates personal access token record' do
+ user = create(:user)
+ params = { name: 'Test token', impersonation: true, scopes: [:api], expires_at: Date.today + 1.month }
+
+ response = described_class.new(user, params).execute
+ personal_access_token = response.payload[:personal_access_token]
+
+ expect(response.success?).to be true
+ expect(personal_access_token.name).to eq(params[:name])
+ expect(personal_access_token.impersonation).to eq(params[:impersonation])
+ expect(personal_access_token.scopes).to eq(params[:scopes])
+ expect(personal_access_token.expires_at).to eq(params[:expires_at])
+ expect(personal_access_token.user).to eq(user)
+ end
+ end
+ end
+end
diff --git a/spec/services/pod_logs/base_service_spec.rb b/spec/services/pod_logs/base_service_spec.rb
index fb53321352b..3ec5dc68c60 100644
--- a/spec/services/pod_logs/base_service_spec.rb
+++ b/spec/services/pod_logs/base_service_spec.rb
@@ -13,10 +13,16 @@ describe ::PodLogs::BaseService do
let(:container_name) { 'container-0' }
let(:params) { {} }
let(:raw_pods) do
- JSON.parse([
- kube_pod(name: pod_name),
- kube_pod(name: pod_name_2)
- ].to_json, object_class: OpenStruct)
+ [
+ {
+ name: pod_name,
+ container_names: %w(container-0-0 container-0-1)
+ },
+ {
+ name: pod_name_2,
+ container_names: %w(container-1-0 container-1-1)
+ }
+ ]
end
subject { described_class.new(cluster, namespace, params: params) }
@@ -99,19 +105,6 @@ describe ::PodLogs::BaseService do
end
end
- describe '#get_raw_pods' do
- let(:service) { create(:cluster_platform_kubernetes, :configured) }
-
- it 'returns success with passthrough k8s response' do
- stub_kubeclient_pods(namespace)
-
- result = subject.send(:get_raw_pods, {})
-
- expect(result[:status]).to eq(:success)
- expect(result[:raw_pods].first).to be_a(Kubeclient::Resource)
- end
- end
-
describe '#get_pod_names' do
it 'returns success with a list of pods' do
result = subject.send(:get_pod_names, raw_pods: raw_pods)
diff --git a/spec/services/pod_logs/elasticsearch_service_spec.rb b/spec/services/pod_logs/elasticsearch_service_spec.rb
index 39aa910d878..e3efce1134b 100644
--- a/spec/services/pod_logs/elasticsearch_service_spec.rb
+++ b/spec/services/pod_logs/elasticsearch_service_spec.rb
@@ -21,8 +21,63 @@ describe ::PodLogs::ElasticsearchService do
]
end
+ let(:raw_pods) do
+ [
+ {
+ name: pod_name,
+ container_names: [container_name, "#{container_name}-1"]
+ }
+ ]
+ end
+
subject { described_class.new(cluster, namespace, params: params) }
+ describe '#get_raw_pods' do
+ before do
+ create(:clusters_applications_elastic_stack, :installed, cluster: cluster)
+ end
+
+ it 'returns success with elasticsearch response' do
+ allow_any_instance_of(::Clusters::Applications::ElasticStack)
+ .to receive(:elasticsearch_client)
+ .and_return(Elasticsearch::Transport::Client.new)
+ allow_any_instance_of(::Gitlab::Elasticsearch::Logs::Pods)
+ .to receive(:pods)
+ .with(namespace)
+ .and_return(raw_pods)
+
+ result = subject.send(:get_raw_pods, {})
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:raw_pods]).to eq(raw_pods)
+ end
+
+ it 'returns an error when ES is unreachable' do
+ allow_any_instance_of(::Clusters::Applications::ElasticStack)
+ .to receive(:elasticsearch_client)
+ .and_return(nil)
+
+ result = subject.send(:get_raw_pods, {})
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Unable to connect to Elasticsearch')
+ end
+
+ it 'handles server errors from elasticsearch' do
+ allow_any_instance_of(::Clusters::Applications::ElasticStack)
+ .to receive(:elasticsearch_client)
+ .and_return(Elasticsearch::Transport::Client.new)
+ allow_any_instance_of(::Gitlab::Elasticsearch::Logs::Pods)
+ .to receive(:pods)
+ .and_raise(Elasticsearch::Transport::Transport::Errors::ServiceUnavailable.new)
+
+ result = subject.send(:get_raw_pods, {})
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Elasticsearch returned status code: ServiceUnavailable')
+ end
+ end
+
describe '#check_times' do
context 'with start and end provided and valid' do
let(:params) do
@@ -168,7 +223,7 @@ describe ::PodLogs::ElasticsearchService do
allow_any_instance_of(::Clusters::Applications::ElasticStack)
.to receive(:elasticsearch_client)
.and_return(Elasticsearch::Transport::Client.new)
- allow_any_instance_of(::Gitlab::Elasticsearch::Logs)
+ allow_any_instance_of(::Gitlab::Elasticsearch::Logs::Lines)
.to receive(:pod_logs)
.with(namespace, pod_name: pod_name, container_name: container_name, search: search, start_time: start_time, end_time: end_time, cursor: cursor)
.and_return({ logs: expected_logs, cursor: expected_cursor })
@@ -195,7 +250,7 @@ describe ::PodLogs::ElasticsearchService do
allow_any_instance_of(::Clusters::Applications::ElasticStack)
.to receive(:elasticsearch_client)
.and_return(Elasticsearch::Transport::Client.new)
- allow_any_instance_of(::Gitlab::Elasticsearch::Logs)
+ allow_any_instance_of(::Gitlab::Elasticsearch::Logs::Lines)
.to receive(:pod_logs)
.and_raise(Elasticsearch::Transport::Transport::Errors::ServiceUnavailable.new)
@@ -209,9 +264,9 @@ describe ::PodLogs::ElasticsearchService do
allow_any_instance_of(::Clusters::Applications::ElasticStack)
.to receive(:elasticsearch_client)
.and_return(Elasticsearch::Transport::Client.new)
- allow_any_instance_of(::Gitlab::Elasticsearch::Logs)
+ allow_any_instance_of(::Gitlab::Elasticsearch::Logs::Lines)
.to receive(:pod_logs)
- .and_raise(::Gitlab::Elasticsearch::Logs::InvalidCursor.new)
+ .and_raise(::Gitlab::Elasticsearch::Logs::Lines::InvalidCursor.new)
result = subject.send(:pod_logs, result_arg)
diff --git a/spec/services/pod_logs/kubernetes_service_spec.rb b/spec/services/pod_logs/kubernetes_service_spec.rb
index ff0554bbe5c..da89c7ee117 100644
--- a/spec/services/pod_logs/kubernetes_service_spec.rb
+++ b/spec/services/pod_logs/kubernetes_service_spec.rb
@@ -20,14 +20,36 @@ describe ::PodLogs::KubernetesService do
end
let(:raw_pods) do
- JSON.parse([
- kube_pod(name: pod_name),
- kube_pod(name: pod_name_2, container_name: container_name_2)
- ].to_json, object_class: OpenStruct)
+ [
+ {
+ name: pod_name,
+ container_names: [container_name, "#{container_name}-1"]
+ },
+ {
+ name: pod_name_2,
+ container_names: [container_name_2, "#{container_name_2}-1"]
+ }
+ ]
end
subject { described_class.new(cluster, namespace, params: params) }
+ describe '#get_raw_pods' do
+ let(:service) { create(:cluster_platform_kubernetes, :configured) }
+
+ it 'returns success with passthrough k8s response' do
+ stub_kubeclient_pods(namespace)
+
+ result = subject.send(:get_raw_pods, {})
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:raw_pods]).to eq([{
+ name: 'kube-pod',
+ container_names: %w(container-0 container-0-1)
+ }])
+ end
+ end
+
describe '#pod_logs' do
let(:result_arg) do
{
@@ -233,7 +255,7 @@ describe ::PodLogs::KubernetesService do
end
it 'returns error if container_name was not specified and there are no containers on the pod' do
- raw_pods.first.spec.containers = []
+ raw_pods.first[:container_names] = []
result = subject.send(:check_container_name,
pod_name: pod_name,
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index 6cc2e2b6abe..36f9966c0ef 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -492,7 +492,7 @@ describe QuickActions::InterpretService do
end
end
- shared_examples 'merge command' do
+ shared_examples 'merge immediately command' do
let(:project) { create(:project, :repository) }
it 'runs merge command if content contains /merge' do
@@ -504,7 +504,18 @@ describe QuickActions::InterpretService do
it 'returns them merge message' do
_, _, message = service.execute(content, issuable)
- expect(message).to eq('Scheduled to merge this merge request when the pipeline succeeds.')
+ expect(message).to eq('Merged this merge request.')
+ end
+ end
+
+ shared_examples 'merge automatically command' do
+ let(:project) { create(:project, :repository) }
+
+ it 'runs merge command if content contains /merge and returns merge message' do
+ _, updates, message = service.execute(content, issuable)
+
+ expect(updates).to eq(merge: merge_request.diff_head_sha)
+ expect(message).to eq('Scheduled to merge this merge request (Merge when pipeline succeeds).')
end
end
@@ -675,11 +686,23 @@ describe QuickActions::InterpretService do
context 'merge command' do
let(:service) { described_class.new(project, developer, { merge_request_diff_head_sha: merge_request.diff_head_sha }) }
- it_behaves_like 'merge command' do
+ it_behaves_like 'merge immediately command' do
let(:content) { '/merge' }
let(:issuable) { merge_request }
end
+ context 'when the head pipeline of merge request is running' do
+ before do
+ create(:ci_pipeline, :detached_merge_request_pipeline, merge_request: merge_request)
+ merge_request.update_head_pipeline
+ end
+
+ it_behaves_like 'merge automatically command' do
+ let(:content) { '/merge' }
+ let(:issuable) { merge_request }
+ end
+ end
+
context 'can not be merged when logged user does not have permissions' do
let(:service) { described_class.new(project, create(:user)) }
diff --git a/spec/services/resources/create_access_token_service_spec.rb b/spec/services/resources/create_access_token_service_spec.rb
new file mode 100644
index 00000000000..8c108d9937a
--- /dev/null
+++ b/spec/services/resources/create_access_token_service_spec.rb
@@ -0,0 +1,163 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Resources::CreateAccessTokenService do
+ subject { described_class.new(resource_type, resource, user, params).execute }
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :private) }
+ let_it_be(:params) { {} }
+
+ describe '#execute' do
+ # Created shared_examples as it will easy to include specs for group bots in https://gitlab.com/gitlab-org/gitlab/-/issues/214046
+ shared_examples 'fails when user does not have the permission to create a Resource Bot' do
+ before do
+ resource.add_developer(user)
+ end
+
+ it 'returns error' do
+ response = subject
+
+ expect(response.error?).to be true
+ expect(response.message).to eq("User does not have permission to create #{resource_type} Access Token")
+ end
+ end
+
+ shared_examples 'fails when flag is disabled' do
+ before do
+ stub_feature_flags(resource_access_token: false)
+ end
+
+ it 'returns nil' do
+ expect(subject).to be nil
+ end
+ end
+
+ shared_examples 'allows creation of bot with valid params' do
+ it { expect { subject }.to change { User.count }.by(1) }
+
+ it 'creates resource bot user' do
+ response = subject
+
+ access_token = response.payload[:access_token]
+
+ expect(access_token.user.reload.user_type).to eq("#{resource_type}_bot")
+ end
+
+ context 'bot name' do
+ context 'when no value is passed' do
+ it 'uses default value' do
+ response = subject
+ access_token = response.payload[:access_token]
+
+ expect(access_token.user.name).to eq("#{resource.name.to_s.humanize} bot")
+ end
+ end
+
+ context 'when user provides value' do
+ let(:params) { { name: 'Random bot' } }
+
+ it 'overrides the default value' do
+ response = subject
+ access_token = response.payload[:access_token]
+
+ expect(access_token.user.name).to eq(params[:name])
+ end
+ end
+ end
+
+ it 'adds the bot user as a maintainer in the resource' do
+ response = subject
+ access_token = response.payload[:access_token]
+ bot_user = access_token.user
+
+ expect(resource.members.maintainers.map(&:user_id)).to include(bot_user.id)
+ end
+
+ context 'personal access token' do
+ it { expect { subject }.to change { PersonalAccessToken.count }.by(1) }
+
+ context 'when user does not provide scope' do
+ it 'has default scopes' do
+ response = subject
+ access_token = response.payload[:access_token]
+
+ expect(access_token.scopes).to eq(Gitlab::Auth::API_SCOPES + Gitlab::Auth::REPOSITORY_SCOPES + Gitlab::Auth.registry_scopes - [:read_user])
+ end
+ end
+
+ context 'when user provides scope explicitly' do
+ let(:params) { { scopes: Gitlab::Auth::REPOSITORY_SCOPES } }
+
+ it 'overrides the default value' do
+ response = subject
+ access_token = response.payload[:access_token]
+
+ expect(access_token.scopes).to eq(Gitlab::Auth::REPOSITORY_SCOPES)
+ end
+ end
+
+ context 'expires_at' do
+ context 'when no value is passed' do
+ it 'uses default value' do
+ response = subject
+ access_token = response.payload[:access_token]
+
+ expect(access_token.expires_at).to eq(nil)
+ end
+ end
+
+ context 'when user provides value' do
+ let(:params) { { expires_at: Date.today + 1.month } }
+
+ it 'overrides the default value' do
+ response = subject
+ access_token = response.payload[:access_token]
+
+ expect(access_token.expires_at).to eq(params[:expires_at])
+ end
+ end
+
+ context 'when invalid scope is passed' do
+ let(:params) { { scopes: [:invalid_scope] } }
+
+ it 'returns error' do
+ response = subject
+
+ expect(response.error?).to be true
+ end
+ end
+ end
+ end
+
+ context 'when access provisioning fails' do
+ before do
+ allow(resource).to receive(:add_maintainer).and_return(nil)
+ end
+
+ it 'returns error' do
+ response = subject
+
+ expect(response.error?).to be true
+ end
+ end
+ end
+
+ context 'when resource is a project' do
+ let(:resource_type) { 'project' }
+ let(:resource) { project }
+
+ it_behaves_like 'fails when user does not have the permission to create a Resource Bot'
+ it_behaves_like 'fails when flag is disabled'
+
+ context 'user with valid permission' do
+ before do
+ resource.add_maintainer(user)
+ end
+
+ it_behaves_like 'allows creation of bot with valid params'
+ end
+ end
+ end
+end
diff --git a/spec/services/snippets/create_service_spec.rb b/spec/services/snippets/create_service_spec.rb
index 690aa2c066e..c1a8a026b90 100644
--- a/spec/services/snippets/create_service_spec.rb
+++ b/spec/services/snippets/create_service_spec.rb
@@ -252,6 +252,39 @@ describe Snippets::CreateService do
end
end
+ shared_examples 'after_save callback to store_mentions' do
+ context 'when mentionable attributes change' do
+ let(:extra_opts) { { description: "Description with #{user.to_reference}" } }
+
+ it 'saves mentions' do
+ expect_next_instance_of(Snippet) do |instance|
+ expect(instance).to receive(:store_mentions!).and_call_original
+ end
+ expect(snippet.user_mentions.count).to eq 1
+ end
+ end
+
+ context 'when mentionable attributes do not change' do
+ it 'does not call store_mentions' do
+ expect_next_instance_of(Snippet) do |instance|
+ expect(instance).not_to receive(:store_mentions!)
+ end
+ expect(snippet.user_mentions.count).to eq 0
+ end
+ end
+
+ context 'when save fails' do
+ it 'does not call store_mentions' do
+ base_opts.delete(:title)
+
+ expect_next_instance_of(Snippet) do |instance|
+ expect(instance).not_to receive(:store_mentions!)
+ end
+ expect(snippet.valid?).to be false
+ end
+ end
+ end
+
context 'when ProjectSnippet' do
let_it_be(:project) { create(:project) }
@@ -265,6 +298,7 @@ describe Snippets::CreateService do
it_behaves_like 'snippet create data is tracked'
it_behaves_like 'an error service response when save fails'
it_behaves_like 'creates repository and files'
+ it_behaves_like 'after_save callback to store_mentions'
end
context 'when PersonalSnippet' do
@@ -276,6 +310,9 @@ describe Snippets::CreateService do
it_behaves_like 'snippet create data is tracked'
it_behaves_like 'an error service response when save fails'
it_behaves_like 'creates repository and files'
+ pending('See https://gitlab.com/gitlab-org/gitlab/issues/30742') do
+ it_behaves_like 'after_save callback to store_mentions'
+ end
end
end
end
diff --git a/spec/services/terraform/remote_state_handler_spec.rb b/spec/services/terraform/remote_state_handler_spec.rb
new file mode 100644
index 00000000000..f4e1831b2e8
--- /dev/null
+++ b/spec/services/terraform/remote_state_handler_spec.rb
@@ -0,0 +1,143 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Terraform::RemoteStateHandler do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ describe '#find_with_lock' do
+ context 'without a state name' do
+ subject { described_class.new(project, user) }
+
+ it 'raises an exception' do
+ expect { subject.find_with_lock }.to raise_error(ArgumentError)
+ end
+ end
+
+ context 'with a state name' do
+ subject { described_class.new(project, user, name: 'state') }
+
+ context 'with no matching state' do
+ it 'raises an exception' do
+ expect { subject.find_with_lock }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+
+ context 'with a matching state' do
+ let!(:state) { create(:terraform_state, project: project, name: 'state') }
+
+ it 'returns the state' do
+ expect(subject.find_with_lock).to eq(state)
+ end
+ end
+ end
+ end
+
+ describe '#create_or_find!' do
+ it 'requires passing a state name' do
+ handler = described_class.new(project, user)
+
+ expect { handler.create_or_find! }.to raise_error(ArgumentError)
+ end
+
+ it 'allows to create states with same name in different projects' do
+ project_b = create(:project)
+
+ state_a = described_class.new(project, user, name: 'my-state').create_or_find!
+ state_b = described_class.new(project_b, user, name: 'my-state').create_or_find!
+
+ expect(state_a).to be_persisted
+ expect(state_b).to be_persisted
+ expect(state_a.id).not_to eq state_b.id
+ end
+
+ it 'loads the same state upon subsequent call in the project scope' do
+ state_a = described_class.new(project, user, name: 'my-state').create_or_find!
+ state_b = described_class.new(project, user, name: 'my-state').create_or_find!
+
+ expect(state_a).to be_persisted
+ expect(state_a.id).to eq state_b.id
+ end
+ end
+
+ context 'when state locking is not being used' do
+ subject { described_class.new(project, user, name: 'my-state') }
+
+ describe '#handle_with_lock' do
+ it 'allows to modify a state using database locking' do
+ state = subject.handle_with_lock do |state|
+ state.name = 'updated-name'
+ end
+
+ expect(state.name).to eq 'updated-name'
+ end
+
+ it 'returns the state object itself' do
+ state = subject.create_or_find!
+
+ expect(state.name).to eq 'my-state'
+ end
+ end
+
+ describe '#lock!' do
+ it 'raises an error' do
+ expect { subject.lock! }.to raise_error(ArgumentError)
+ end
+ end
+ end
+
+ context 'when using locking' do
+ describe '#handle_with_lock' do
+ it 'handles a locked state using exclusive read lock' do
+ handler = described_class
+ .new(project, user, name: 'new-state', lock_id: 'abc-abc')
+
+ handler.lock!
+
+ state = handler.handle_with_lock do |state|
+ state.name = 'new-name'
+ end
+
+ expect(state.name).to eq 'new-name'
+ end
+ end
+
+ it 'raises exception if lock has not been acquired before' do
+ handler = described_class
+ .new(project, user, name: 'new-state', lock_id: 'abc-abc')
+
+ expect { handler.handle_with_lock }
+ .to raise_error(described_class::StateLockedError)
+ end
+
+ describe '#lock!' do
+ it 'allows to lock state if it does not exist yet' do
+ handler = described_class.new(project, user, name: 'new-state', lock_id: 'abc-abc')
+
+ state = handler.lock!
+
+ expect(state).to be_persisted
+ expect(state.name).to eq 'new-state'
+ end
+
+ it 'allows to lock state if it exists and is not locked' do
+ state = described_class.new(project, user, name: 'new-state').create_or_find!
+ handler = described_class.new(project, user, name: 'new-state', lock_id: 'abc-abc')
+
+ handler.lock!
+
+ expect(state.reload.lock_xid).to eq 'abc-abc'
+ expect(state).to be_locked
+ end
+
+ it 'raises an exception when trying to unlocked state locked by someone else' do
+ described_class.new(project, user, name: 'new-state', lock_id: 'abc-abc').lock!
+
+ handler = described_class.new(project, user, name: 'new-state', lock_id: '12a-23f')
+
+ expect { handler.lock! }.to raise_error(described_class::StateLockedError)
+ end
+ end
+ end
+end
diff --git a/spec/services/users/build_service_spec.rb b/spec/services/users/build_service_spec.rb
index 146819c7f44..7588be833ae 100644
--- a/spec/services/users/build_service_spec.rb
+++ b/spec/services/users/build_service_spec.rb
@@ -157,6 +157,26 @@ describe Users::BuildService do
end
end
+ context 'when user_type is provided' do
+ subject(:user) { service.execute }
+
+ context 'when project_bot' do
+ before do
+ params.merge!({ user_type: :project_bot })
+ end
+
+ it { expect(user.project_bot?).to be true}
+ end
+
+ context 'when not a project_bot' do
+ before do
+ params.merge!({ user_type: :alert_bot })
+ end
+
+ it { expect(user.user_type).to be nil }
+ end
+ end
+
context 'with "user_default_external" application setting' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/services/x509_certificate_revoke_service_spec.rb b/spec/services/x509_certificate_revoke_service_spec.rb
index ef76f616c93..c2b2576904c 100644
--- a/spec/services/x509_certificate_revoke_service_spec.rb
+++ b/spec/services/x509_certificate_revoke_service_spec.rb
@@ -24,8 +24,6 @@ describe X509CertificateRevokeService do
end
context 'for good certificates' do
- RSpec::Matchers.define_negated_matcher :not_change, :change
-
let(:x509_certificate) { create(:x509_certificate) }
it 'do not update any commit signature' do
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index 19d12a0f5cb..fe03621b9bf 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -136,6 +136,7 @@ RSpec.configure do |config|
config.include ExpectRequestWithStatus, type: :request
config.include IdempotentWorkerHelper, type: :worker
config.include RailsHelpers
+ config.include SidekiqMiddleware
if ENV['CI'] || ENV['RETRIES']
# This includes the first try, i.e. tests will be run 4 times before failing.
@@ -299,6 +300,22 @@ RSpec.configure do |config|
Labkit::Context.with_context { example.run }
end
+ config.around do |example|
+ with_sidekiq_server_middleware do |chain|
+ Gitlab::SidekiqMiddleware.server_configurator(
+ metrics: false, # The metrics don't go anywhere in tests
+ arguments_logger: false, # We're not logging the regular messages for inline jobs
+ memory_killer: false, # This is not a thing we want to do inline in tests
+ # Don't enable this if the request store is active in the spec itself
+ # This needs to run within the `request_store` around block defined above
+ request_store: !RequestStore.active?
+ ).call(chain)
+ chain.add DisableQueryLimit
+
+ example.run
+ end
+ end
+
config.after do
Fog.unmock! if Fog.mock?
Gitlab::CurrentSettings.clear_in_memory_application_settings!
diff --git a/spec/support/helpers/api_helpers.rb b/spec/support/helpers/api_helpers.rb
index b1e6078c4f2..eb9594a4fb6 100644
--- a/spec/support/helpers/api_helpers.rb
+++ b/spec/support/helpers/api_helpers.rb
@@ -40,6 +40,17 @@ module ApiHelpers
end
end
+ def basic_auth_header(user = nil)
+ return { 'HTTP_AUTHORIZATION' => user } unless user.respond_to?(:username)
+
+ {
+ 'HTTP_AUTHORIZATION' => ActionController::HttpAuthentication::Basic.encode_credentials(
+ user.username,
+ create(:personal_access_token, user: user).token
+ )
+ }
+ end
+
def expect_empty_array_response
expect_successful_response_with_paginated_array
expect(json_response.length).to eq(0)
diff --git a/spec/support/helpers/migrations_helpers.rb b/spec/support/helpers/migrations_helpers.rb
index 5eb70f534d8..2c31a608b35 100644
--- a/spec/support/helpers/migrations_helpers.rb
+++ b/spec/support/helpers/migrations_helpers.rb
@@ -80,6 +80,9 @@ module MigrationsHelpers
allow(ActiveRecord::Base.connection)
.to receive(:active?)
.and_return(false)
+ allow(Gitlab::Runtime)
+ .to receive(:rake?)
+ .and_return(true)
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
end
diff --git a/spec/support/import_export/configuration_helper.rb b/spec/support/import_export/configuration_helper.rb
index 4fe619225bb..4330c4314a8 100644
--- a/spec/support/import_export/configuration_helper.rb
+++ b/spec/support/import_export/configuration_helper.rb
@@ -44,8 +44,8 @@ module ConfigurationHelper
import_export_config = config_hash(config)
excluded_attributes = import_export_config[:excluded_attributes][relation_name.to_sym]
included_attributes = import_export_config[:included_attributes][relation_name.to_sym]
- attributes = attributes - JSON[excluded_attributes.to_json] if excluded_attributes
- attributes = attributes & JSON[included_attributes.to_json] if included_attributes
+ attributes = attributes - JSON.parse(excluded_attributes.to_json) if excluded_attributes
+ attributes = attributes & JSON.parse(included_attributes.to_json) if included_attributes
attributes
end
diff --git a/spec/support/matchers/exclude_matcher.rb b/spec/support/matchers/exclude_matcher.rb
new file mode 100644
index 00000000000..29ee251a466
--- /dev/null
+++ b/spec/support/matchers/exclude_matcher.rb
@@ -0,0 +1,3 @@
+# frozen_string_literal: true
+
+RSpec::Matchers.define_negated_matcher :exclude, :include
diff --git a/spec/support/shared_examples/controllers/deploy_token_shared_examples.rb b/spec/support/shared_examples/controllers/deploy_token_shared_examples.rb
index 791eb0b68e0..bd4eeff81a0 100644
--- a/spec/support/shared_examples/controllers/deploy_token_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/deploy_token_shared_examples.rb
@@ -1,12 +1,13 @@
# frozen_string_literal: true
RSpec.shared_examples 'a created deploy token' do
+ let(:read_repository) { '1' }
let(:deploy_token_params) do
{
name: 'deployer_token',
expires_at: 1.month.from_now.to_date.to_s,
username: 'deployer',
- read_repository: '1',
+ read_repository: read_repository,
deploy_token_type: deploy_token_type
}
end
@@ -19,4 +20,15 @@ RSpec.shared_examples 'a created deploy token' do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:show)
end
+
+ context 'when no scope is selected' do
+ let(:read_repository) { '0' }
+
+ it 'creates a variable with a errored deploy token' do
+ expect { create_deploy_token }.not_to change { DeployToken.active.count }
+
+ expect(assigns(:new_deploy_token)).to be_a(DeployToken)
+ expect(assigns(:new_deploy_token).errors.full_messages.first).to eq('Scopes can\'t be blank')
+ end
+ end
end
diff --git a/spec/support/shared_examples/models/diff_positionable_note_shared_examples.rb b/spec/support/shared_examples/models/diff_positionable_note_shared_examples.rb
index 38a9f1fe098..aa8979603b6 100644
--- a/spec/support/shared_examples/models/diff_positionable_note_shared_examples.rb
+++ b/spec/support/shared_examples/models/diff_positionable_note_shared_examples.rb
@@ -13,6 +13,7 @@ RSpec.shared_examples 'a valid diff positionable note' do |factory_on_commit|
new_path: "files/ruby/popen.rb",
old_line: nil,
new_line: 14,
+ line_range: nil,
diff_refs: diff_refs
)
end
diff --git a/spec/support/shared_examples/quick_actions/merge_request/merge_quick_action_shared_examples.rb b/spec/support/shared_examples/quick_actions/merge_request/merge_quick_action_shared_examples.rb
index fa163b54405..e0edbc5637a 100644
--- a/spec/support/shared_examples/quick_actions/merge_request/merge_quick_action_shared_examples.rb
+++ b/spec/support/shared_examples/quick_actions/merge_request/merge_quick_action_shared_examples.rb
@@ -10,10 +10,27 @@ RSpec.shared_examples 'merge quick action' do
it 'merges the MR', :sidekiq_might_not_need_inline do
add_note("/merge")
- expect(page).to have_content 'Scheduled to merge this merge request when the pipeline succeeds.'
+ expect(page).to have_content 'Merged this merge request.'
expect(merge_request.reload).to be_merged
end
+
+ context 'when auto merge is avialable' do
+ before do
+ create(:ci_pipeline, :detached_merge_request_pipeline,
+ project: project, merge_request: merge_request)
+ merge_request.update_head_pipeline
+ end
+
+ it 'schedules to merge the MR' do
+ add_note("/merge")
+
+ expect(page).to have_content "Scheduled to merge this merge request (Merge when pipeline succeeds)."
+
+ expect(merge_request.reload).to be_auto_merge_enabled
+ expect(merge_request.reload).not_to be_merged
+ end
+ end
end
context 'when the head diff changes in the meanwhile' do
diff --git a/spec/support/shared_examples/requests/api/diff_discussions_shared_examples.rb b/spec/support/shared_examples/requests/api/diff_discussions_shared_examples.rb
index 583475678f1..3d25b9076ad 100644
--- a/spec/support/shared_examples/requests/api/diff_discussions_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/diff_discussions_shared_examples.rb
@@ -22,12 +22,18 @@ RSpec.shared_examples 'diff discussions API' do |parent_type, noteable_type, id_
expect(json_response['id']).to eq(diff_note.discussion_id)
expect(json_response['notes'].first['body']).to eq(diff_note.note)
expect(json_response['notes'].first['position']).to eq(diff_note.position.to_h.stringify_keys)
+ expect(json_response['notes'].first['line_range']).to eq(nil)
end
end
describe "POST /#{parent_type}/:id/#{noteable_type}/:noteable_id/discussions" do
it "creates a new diff note" do
- position = diff_note.position.to_h
+ line_range = {
+ "start_line_code" => Gitlab::Git.diff_line_code(diff_note.position.file_path, 1, 1),
+ "end_line_code" => Gitlab::Git.diff_line_code(diff_note.position.file_path, 2, 2)
+ }
+
+ position = diff_note.position.to_h.merge({ line_range: line_range })
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions", user),
params: { body: 'hi!', position: position }
diff --git a/spec/support/sidekiq_middleware.rb b/spec/support/sidekiq_middleware.rb
index f6694713101..1380f4394d8 100644
--- a/spec/support/sidekiq_middleware.rb
+++ b/spec/support/sidekiq_middleware.rb
@@ -2,6 +2,17 @@
require 'sidekiq/testing'
+# rubocop:disable RSpec/ModifySidekiqMiddleware
+module SidekiqMiddleware
+ def with_sidekiq_server_middleware(&block)
+ Sidekiq::Testing.server_middleware.clear
+ Sidekiq::Testing.server_middleware(&block)
+ ensure
+ Sidekiq::Testing.server_middleware.clear
+ end
+end
+# rubocop:enable RSpec/ModifySidekiqMiddleware
+
# If Sidekiq::Testing.inline! is used, SQL transactions done inside
# Sidekiq worker are included in the SQL query limit (in a real
# deployment sidekiq worker is executed separately). To avoid
@@ -20,8 +31,3 @@ class DisableQueryLimit
end
end
end
-
-Sidekiq::Testing.server_middleware do |chain|
- chain.add Gitlab::SidekiqStatus::ServerMiddleware
- chain.add DisableQueryLimit
-end
diff --git a/spec/uploaders/records_uploads_spec.rb b/spec/uploaders/records_uploads_spec.rb
index 140595e58ad..71eff23c77c 100644
--- a/spec/uploaders/records_uploads_spec.rb
+++ b/spec/uploaders/records_uploads_spec.rb
@@ -78,8 +78,7 @@ describe RecordsUploads do
path: File.join('uploads', 'rails_sample.jpg'),
size: 512.kilobytes,
model: build_stubbed(:user),
- uploader: uploader.class.to_s,
- store: ::ObjectStorage::Store::LOCAL
+ uploader: uploader.class.to_s
)
uploader.upload = existing
@@ -99,8 +98,7 @@ describe RecordsUploads do
path: File.join('uploads', 'rails_sample.jpg'),
size: 512.kilobytes,
model: project,
- uploader: uploader.class.to_s,
- store: ::ObjectStorage::Store::LOCAL
+ uploader: uploader.class.to_s
)
uploader.store!(upload_fixture('rails_sample.jpg'))
diff --git a/spec/uploaders/terraform/state_uploader_spec.rb b/spec/uploaders/terraform/state_uploader_spec.rb
index 4577a2c4738..cbcb6298eca 100644
--- a/spec/uploaders/terraform/state_uploader_spec.rb
+++ b/spec/uploaders/terraform/state_uploader_spec.rb
@@ -5,15 +5,15 @@ require 'spec_helper'
describe Terraform::StateUploader do
subject { terraform_state.file }
- let(:terraform_state) { create(:terraform_state, file: fixture_file_upload('spec/fixtures/terraform/terraform.tfstate')) }
+ let(:terraform_state) { create(:terraform_state, :with_file) }
before do
stub_terraform_state_object_storage
end
describe '#filename' do
- it 'contains the ID of the terraform state record' do
- expect(subject.filename).to include(terraform_state.id.to_s)
+ it 'contains the UUID of the terraform state record' do
+ expect(subject.filename).to include(terraform_state.uuid)
end
end
diff --git a/spec/views/shared/projects/_project.html.haml_spec.rb b/spec/views/shared/projects/_project.html.haml_spec.rb
index b123be42074..8c3b8768469 100644
--- a/spec/views/shared/projects/_project.html.haml_spec.rb
+++ b/spec/views/shared/projects/_project.html.haml_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe 'shared/projects/_project.html.haml' do
- let(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
before do
allow(view).to receive(:current_application_settings).and_return(Gitlab::CurrentSettings.current_application_settings)
diff --git a/spec/workers/concerns/cronjob_queue_spec.rb b/spec/workers/concerns/cronjob_queue_spec.rb
index ea3b7bad2e1..0cea67bf116 100644
--- a/spec/workers/concerns/cronjob_queue_spec.rb
+++ b/spec/workers/concerns/cronjob_queue_spec.rb
@@ -14,6 +14,10 @@ describe CronjobQueue do
end
end
+ before do
+ stub_const("DummyWorker", worker)
+ end
+
it 'sets the queue name of a worker' do
expect(worker.sidekiq_options['queue'].to_s).to eq('cronjob:dummy')
end
@@ -29,4 +33,22 @@ describe CronjobQueue do
expect(worker_context[:root_namespace]).to be_nil
expect(worker_context[:project]).to be_nil
end
+
+ it 'gets scheduled with caller_id set to Cronjob' do
+ worker.perform_async
+
+ job = worker.jobs.last
+
+ expect(job).to include('meta.caller_id' => 'Cronjob')
+ end
+
+ it 'does not set the caller_id if there was already one in the context' do
+ Gitlab::ApplicationContext.with_context(caller_id: 'already set') do
+ worker.perform_async
+ end
+
+ job = worker.jobs.last
+
+ expect(job).to include('meta.caller_id' => 'already set')
+ end
end
diff --git a/spec/workers/create_commit_signature_worker_spec.rb b/spec/workers/create_commit_signature_worker_spec.rb
index d7235fcd907..f40482f2361 100644
--- a/spec/workers/create_commit_signature_worker_spec.rb
+++ b/spec/workers/create_commit_signature_worker_spec.rb
@@ -9,14 +9,14 @@ describe CreateCommitSignatureWorker do
let(:gpg_commit) { instance_double(Gitlab::Gpg::Commit) }
let(:x509_commit) { instance_double(Gitlab::X509::Commit) }
- context 'when a signature is found' do
- before do
- allow(Project).to receive(:find_by).with(id: project.id).and_return(project)
- allow(project).to receive(:commits_by).with(oids: commit_shas).and_return(commits)
- end
+ before do
+ allow(Project).to receive(:find_by).with(id: project.id).and_return(project)
+ allow(project).to receive(:commits_by).with(oids: commit_shas).and_return(commits)
+ end
- subject { described_class.new.perform(commit_shas, project.id) }
+ subject { described_class.new.perform(commit_shas, project.id) }
+ context 'when a signature is found' do
it 'calls Gitlab::Gpg::Commit#signature' do
commits.each do |commit|
allow(commit).to receive(:signature_type).and_return(:PGP)
@@ -67,9 +67,10 @@ describe CreateCommitSignatureWorker do
end
context 'handles when a string is passed in for the commit SHA' do
+ let(:commit_shas) { super().first }
+
before do
- allow(Project).to receive(:find_by).with(id: project.id).and_return(project)
- allow(project).to receive(:commits_by).with(oids: Array(commit_shas.first)).and_return(commits)
+ allow(project).to receive(:commits_by).with(oids: [commit_shas]).and_return(commits)
allow(commits.first).to receive(:signature_type).and_return(:PGP)
end
@@ -78,35 +79,65 @@ describe CreateCommitSignatureWorker do
expect(gpg_commit).to receive(:signature).once
- described_class.new.perform(commit_shas.first, project.id)
+ subject
end
end
context 'when Commit is not found' do
let(:nonexisting_commit_sha) { '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a34' }
+ let(:commit_shas) { [nonexisting_commit_sha] }
it 'does not raise errors' do
- expect { described_class.new.perform([nonexisting_commit_sha], project.id) }.not_to raise_error
+ expect { described_class.new.perform(commit_shas, project.id) }.not_to raise_error
end
end
context 'when Project is not found' do
- let(:nonexisting_project_id) { -1 }
+ let(:commits) { [] }
+ let(:project) { double(id: non_existing_record_id) }
it 'does not raise errors' do
- expect { described_class.new.perform(commit_shas, nonexisting_project_id) }.not_to raise_error
+ expect { subject }.not_to raise_error
end
it 'does not call Gitlab::Gpg::Commit#signature' do
expect_any_instance_of(Gitlab::Gpg::Commit).not_to receive(:signature)
- described_class.new.perform(commit_shas, nonexisting_project_id)
+ subject
end
it 'does not call Gitlab::X509::Commit#signature' do
expect_any_instance_of(Gitlab::X509::Commit).not_to receive(:signature)
- described_class.new.perform(commit_shas, nonexisting_project_id)
+ subject
+ end
+ end
+
+ context 'fetching signatures' do
+ before do
+ commits.each do |commit|
+ allow(commit).to receive(:signature_type).and_return(type)
+ end
+ end
+
+ context 'X509' do
+ let(:type) { :X509 }
+
+ it 'performs a single query for commit signatures' do
+ expect(X509CommitSignature).to receive(:by_commit_sha).with(commit_shas).once.and_return([])
+
+ subject
+ end
+ end
+
+ context 'PGP' do
+ let(:type) { :PGP }
+
+ it 'performs a single query for commit signatures' do
+ expect(GpgSignature).to receive(:by_commit_sha).with(commit_shas).once.and_return([])
+
+ subject
+ end
end
end
end
diff --git a/spec/workers/expire_pipeline_cache_worker_spec.rb b/spec/workers/expire_pipeline_cache_worker_spec.rb
index 8d898ffc13e..61ea22fbd32 100644
--- a/spec/workers/expire_pipeline_cache_worker_spec.rb
+++ b/spec/workers/expire_pipeline_cache_worker_spec.rb
@@ -11,7 +11,9 @@ describe ExpirePipelineCacheWorker do
describe '#perform' do
it 'executes the service' do
- expect_any_instance_of(Ci::ExpirePipelineCacheService).to receive(:execute).with(pipeline).and_call_original
+ expect_next_instance_of(Ci::ExpirePipelineCacheService) do |instance|
+ expect(instance).to receive(:execute).with(pipeline).and_call_original
+ end
subject.perform(pipeline.id)
end
@@ -31,5 +33,9 @@ describe ExpirePipelineCacheWorker do
subject.perform(pipeline.id)
end
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [pipeline.id] }
+ end
end
end
diff --git a/spec/workers/export_csv_worker_spec.rb b/spec/workers/export_csv_worker_spec.rb
new file mode 100644
index 00000000000..87285b6264a
--- /dev/null
+++ b/spec/workers/export_csv_worker_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ExportCsvWorker do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, creator: user) }
+
+ def perform(params = {})
+ described_class.new.perform(user.id, project.id, params)
+ end
+
+ it 'emails a CSV' do
+ expect {perform}.to change(ActionMailer::Base.deliveries, :size).by(1)
+ end
+
+ it 'ensures that project_id is passed to issues_finder' do
+ expect(IssuesFinder).to receive(:new).with(anything, hash_including(project_id: project.id)).and_call_original
+
+ perform
+ end
+
+ it 'removes sort parameter' do
+ expect(IssuesFinder).to receive(:new).with(anything, hash_not_including(:sort)).and_call_original
+
+ perform
+ end
+
+ it 'converts controller string keys to symbol keys for IssuesFinder' do
+ expect(IssuesFinder).to receive(:new).with(anything, hash_including(test_key: true)).and_call_original
+
+ perform('test_key' => true)
+ end
+end
diff --git a/spec/workers/gitlab/jira_import/stage/finish_import_worker_spec.rb b/spec/workers/gitlab/jira_import/stage/finish_import_worker_spec.rb
index 93e2a44223b..5c3c7dcccc1 100644
--- a/spec/workers/gitlab/jira_import/stage/finish_import_worker_spec.rb
+++ b/spec/workers/gitlab/jira_import/stage/finish_import_worker_spec.rb
@@ -20,7 +20,7 @@ describe Gitlab::JiraImport::Stage::FinishImportWorker do
end
context 'when feature flag enabled' do
- let_it_be(:jira_import) { create(:jira_import_state, :scheduled, project: project) }
+ let_it_be(:jira_import, reload: true) { create(:jira_import_state, :scheduled, project: project) }
before do
stub_feature_flags(jira_issue_import: true)
@@ -31,15 +31,27 @@ describe Gitlab::JiraImport::Stage::FinishImportWorker do
end
context 'when import started' do
+ let_it_be(:import_label) { create(:label, project: project, title: 'jira-import') }
+ let_it_be(:imported_issues) { create_list(:labeled_issue, 3, project: project, labels: [import_label]) }
+
before do
+ expect(Gitlab::JiraImport).to receive(:get_import_label_id).and_return(import_label.id)
+ expect(Gitlab::JiraImport).to receive(:issue_failures).and_return(2)
+
jira_import.start!
+ worker.perform(project.id)
end
it 'changes import state to finished' do
- worker.perform(project.id)
-
expect(project.jira_import_status).to eq('finished')
end
+
+ it 'saves imported issues counts' do
+ latest_jira_import = project.latest_jira_import
+ expect(latest_jira_import.total_issue_count).to eq(5)
+ expect(latest_jira_import.failed_to_import_count).to eq(2)
+ expect(latest_jira_import.imported_issues_count).to eq(3)
+ end
end
end
end
diff --git a/spec/workers/post_receive_spec.rb b/spec/workers/post_receive_spec.rb
index a51e0b79075..3d24b5f753a 100644
--- a/spec/workers/post_receive_spec.rb
+++ b/spec/workers/post_receive_spec.rb
@@ -352,6 +352,9 @@ describe PostReceive do
it "enqueues a UpdateMergeRequestsWorker job" do
allow(Project).to receive(:find_by).and_return(project)
+ expect_next_instance_of(MergeRequests::PushedBranchesService) do |service|
+ expect(service).to receive(:execute).and_return(%w(tést))
+ end
expect(UpdateMergeRequestsWorker).to receive(:perform_async).with(project.id, project.owner.id, any_args)