summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2023-02-01 12:10:48 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2023-02-01 12:10:48 +0000
commita21091270d45530468f8ac2f4f926fe1b9840b67 (patch)
treea641a030521f16e320f1d3559a49956f485c217f
parent8700fc108e2c269a4d73530d60662a6aaff14381 (diff)
downloadgitlab-ce-a21091270d45530468f8ac2f4f926fe1b9840b67.tar.gz
Add latest changes from gitlab-org/gitlab@master
-rw-r--r--.gitlab/ci/rails.gitlab-ci.yml2
-rw-r--r--.gitlab/ci/setup.gitlab-ci.yml6
-rw-r--r--app/assets/javascripts/pages/projects/find_file/ref_switcher/index.js38
-rw-r--r--app/assets/javascripts/pages/projects/find_file/ref_switcher/ref_switcher_utils.js28
-rw-r--r--app/assets/javascripts/pages/projects/find_file/show/index.js2
-rw-r--r--app/assets/javascripts/work_items/components/work_item_comment_form.vue4
-rw-r--r--app/assets/javascripts/work_items/components/work_item_notes.vue20
-rw-r--r--app/assets/javascripts/work_items/graphql/cache_utils.js14
-rw-r--r--app/graphql/types/ci/runner_type.rb15
-rw-r--r--app/models/ci/runner.rb4
-rw-r--r--app/policies/ci/runner_policy.rb6
-rw-r--r--app/views/projects/find_file/show.html.haml2
-rw-r--r--config/feature_flags/development/gitlab_metrics_error_rate_sli.yml8
-rw-r--r--config/initializers/00_deprecations.rb4
-rw-r--r--doc/api/graphql/reference/index.md1
-rw-r--r--doc/subscriptions/gitlab_dedicated/index.md6
-rw-r--r--doc/user/project/deploy_tokens/index.md4
-rw-r--r--lib/gitlab/metrics/rails_slis.rb8
-rw-r--r--lib/gitlab/metrics/requests_rack_middleware.rb2
-rw-r--r--locale/gitlab.pot6
-rw-r--r--scripts/api/default_options.rb3
-rw-r--r--scripts/api/find_issues.rb29
-rwxr-xr-xscripts/failed_tests.rb89
-rwxr-xr-xscripts/pipeline_test_report_builder.rb125
-rw-r--r--scripts/rspec_helpers.sh16
-rw-r--r--scripts/utils.sh4
-rw-r--r--spec/factories/projects.rb4
-rw-r--r--spec/features/projects/files/user_find_file_spec.rb39
-rw-r--r--spec/frontend/pages/projects/find_file/ref_switcher/ref_switcher_utils_spec.js39
-rw-r--r--spec/graphql/types/ci/runner_type_spec.rb1
-rw-r--r--spec/lib/gitlab/metrics/rails_slis_spec.rb10
-rw-r--r--spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb23
-rw-r--r--spec/models/ci/build_spec.rb290
-rw-r--r--spec/models/ci/runner_spec.rb18
-rw-r--r--spec/policies/ci/runner_policy_spec.rb23
-rw-r--r--spec/requests/api/graphql/ci/runner_spec.rb103
-rw-r--r--spec/scripts/failed_tests_spec.rb200
-rw-r--r--spec/scripts/pipeline_test_report_builder_spec.rb191
-rw-r--r--spec/tooling/lib/tooling/mappings/base_spec.rb44
-rw-r--r--spec/tooling/lib/tooling/mappings/js_to_system_specs_mappings_spec.rb169
-rw-r--r--spec/tooling/lib/tooling/mappings/view_to_js_mappings_spec.rb (renamed from spec/tooling/lib/tooling/view_to_js_mappings_spec.rb)49
-rwxr-xr-xtooling/bin/js_to_system_specs_mappings10
-rwxr-xr-xtooling/bin/view_to_js_mappings4
-rw-r--r--tooling/lib/tooling/mappings/base.rb30
-rw-r--r--tooling/lib/tooling/mappings/js_to_system_specs_mappings.rb61
-rw-r--r--tooling/lib/tooling/mappings/view_to_js_mappings.rb74
-rw-r--r--tooling/lib/tooling/view_to_js_mappings.rb77
-rw-r--r--workhorse/.tool-versions2
48 files changed, 1271 insertions, 636 deletions
diff --git a/.gitlab/ci/rails.gitlab-ci.yml b/.gitlab/ci/rails.gitlab-ci.yml
index 6de0ad42235..4b99d108c07 100644
--- a/.gitlab/ci/rails.gitlab-ci.yml
+++ b/.gitlab/ci/rails.gitlab-ci.yml
@@ -886,7 +886,7 @@ rspec rspec-pg12-rerun-previous-failed-tests:
- .rspec-base-pg12
- .base-rspec-pg12-rerun-previous-failed-tests
variables:
- PREVIOUS_FAILED_TESTS_FILE: tmp/previous_failed_tests/rspec_failed_files.txt
+ PREVIOUS_FAILED_TESTS_FILE: tmp/previous_failed_tests/rspec_failed_tests.txt
rspec rspec-ee-pg12-rerun-previous-failed-tests:
extends:
diff --git a/.gitlab/ci/setup.gitlab-ci.yml b/.gitlab/ci/setup.gitlab-ci.yml
index f9ef0f7ad49..298d5c4ae08 100644
--- a/.gitlab/ci/setup.gitlab-ci.yml
+++ b/.gitlab/ci/setup.gitlab-ci.yml
@@ -128,6 +128,7 @@ detect-tests:
- source ./scripts/rspec_helpers.sh
- install_gitlab_gem
- install_tff_gem
+ - install_activesupport_gem
- retrieve_tests_mapping
- retrieve_frontend_fixtures_mapping
- |
@@ -135,6 +136,7 @@ detect-tests:
mkdir -p $(dirname "$RSPEC_CHANGED_FILES_PATH")
tooling/bin/find_changes ${RSPEC_CHANGED_FILES_PATH};
tooling/bin/find_tests ${RSPEC_CHANGED_FILES_PATH} ${RSPEC_MATCHING_TESTS_PATH};
+ tooling/bin/js_to_system_specs_mappings ${RSPEC_CHANGED_FILES_PATH} ${RSPEC_MATCHING_TESTS_PATH};
tooling/bin/find_changes ${RSPEC_CHANGED_FILES_PATH} ${RSPEC_MATCHING_TESTS_PATH} ${FRONTEND_FIXTURES_MAPPING_PATH};
filter_rspec_matched_foss_tests ${RSPEC_MATCHING_TESTS_PATH} ${RSPEC_MATCHING_TESTS_FOSS_PATH};
filter_rspec_matched_ee_tests ${RSPEC_MATCHING_TESTS_PATH} ${RSPEC_MATCHING_TESTS_EE_PATH};
@@ -160,12 +162,10 @@ detect-previous-failed-tests:
- .rails:rules:detect-previous-failed-tests
variables:
PREVIOUS_FAILED_TESTS_DIR: tmp/previous_failed_tests/
- RSPEC_PG_REGEX: /rspec .+ pg12( .+)?/
- RSPEC_EE_PG_REGEX: /rspec-ee .+ pg12( .+)?/
script:
- source ./scripts/utils.sh
- source ./scripts/rspec_helpers.sh
- - retrieve_previous_failed_tests ${PREVIOUS_FAILED_TESTS_DIR} "${RSPEC_PG_REGEX}" "${RSPEC_EE_PG_REGEX}"
+ - retrieve_failed_tests "${PREVIOUS_FAILED_TESTS_DIR}" "oneline" "previous"
artifacts:
expire_in: 7d
paths:
diff --git a/app/assets/javascripts/pages/projects/find_file/ref_switcher/index.js b/app/assets/javascripts/pages/projects/find_file/ref_switcher/index.js
new file mode 100644
index 00000000000..9a3bb25de70
--- /dev/null
+++ b/app/assets/javascripts/pages/projects/find_file/ref_switcher/index.js
@@ -0,0 +1,38 @@
+import Vue from 'vue';
+import { s__ } from '~/locale';
+import Translate from '~/vue_shared/translate';
+import RefSelector from '~/ref/components/ref_selector.vue';
+import { visitUrl } from '~/lib/utils/url_utility';
+import { generateRefDestinationPath } from './ref_switcher_utils';
+
+Vue.use(Translate);
+
+const REF_SWITCH_HEADER = s__('FindFile|Switch branch/tag');
+
+export default () => {
+ const el = document.getElementById('js-blob-ref-switcher');
+ if (!el) return false;
+
+ const { projectId, ref, namespace } = el.dataset;
+
+ return new Vue({
+ el,
+ render(createElement) {
+ return createElement(RefSelector, {
+ props: {
+ projectId,
+ value: ref,
+ translations: {
+ dropdownHeader: REF_SWITCH_HEADER,
+ searchPlaceholder: REF_SWITCH_HEADER,
+ },
+ },
+ on: {
+ input(selected) {
+ visitUrl(generateRefDestinationPath(selected, namespace));
+ },
+ },
+ });
+ },
+ });
+};
diff --git a/app/assets/javascripts/pages/projects/find_file/ref_switcher/ref_switcher_utils.js b/app/assets/javascripts/pages/projects/find_file/ref_switcher/ref_switcher_utils.js
new file mode 100644
index 00000000000..5fecd024f1a
--- /dev/null
+++ b/app/assets/javascripts/pages/projects/find_file/ref_switcher/ref_switcher_utils.js
@@ -0,0 +1,28 @@
+import { joinPaths } from '~/lib/utils/url_utility';
+
+/**
+ * Generates a ref destination url based on the selected ref and current url.
+ * @param {string} selectedRef - The selected ref from the ref dropdown.
+ * @param {string} namespace - The destination namespace for the path.
+ */
+export function generateRefDestinationPath(selectedRef, namespace) {
+ if (!selectedRef || !namespace) {
+ return window.location.href;
+ }
+
+ const { pathname } = window.location;
+ const encodedHash = '%23';
+
+ const [projectRootPath] = pathname.split(namespace);
+
+ const destinationPath = joinPaths(
+ projectRootPath,
+ namespace,
+ encodeURI(selectedRef).replace(/#/g, encodedHash),
+ );
+
+ const newURL = new URL(window.location);
+ newURL.pathname = destinationPath;
+
+ return newURL.href;
+}
diff --git a/app/assets/javascripts/pages/projects/find_file/show/index.js b/app/assets/javascripts/pages/projects/find_file/show/index.js
index f47888f0cb8..e207df2434b 100644
--- a/app/assets/javascripts/pages/projects/find_file/show/index.js
+++ b/app/assets/javascripts/pages/projects/find_file/show/index.js
@@ -1,7 +1,9 @@
import $ from 'jquery';
import ShortcutsFindFile from '~/behaviors/shortcuts/shortcuts_find_file';
import ProjectFindFile from '~/projects/project_find_file';
+import InitBlobRefSwitcher from '../ref_switcher';
+InitBlobRefSwitcher();
const findElement = document.querySelector('.js-file-finder');
const projectFindFile = new ProjectFindFile($('.file-finder-holder'), {
url: findElement.dataset.fileFindUrl,
diff --git a/app/assets/javascripts/work_items/components/work_item_comment_form.vue b/app/assets/javascripts/work_items/components/work_item_comment_form.vue
index 9da4473cd46..a38fea01d1f 100644
--- a/app/assets/javascripts/work_items/components/work_item_comment_form.vue
+++ b/app/assets/javascripts/work_items/components/work_item_comment_form.vue
@@ -175,7 +175,7 @@ export default {
this.isSubmitting = true;
this.$emit('replying', this.commentText);
- const { queryVariables, fetchByIid, sortOrder } = this;
+ const { queryVariables, fetchByIid } = this;
try {
this.track('add_work_item_comment');
@@ -193,7 +193,7 @@ export default {
if (createNoteData.data?.createNote?.errors?.length) {
throw new Error(createNoteData.data?.createNote?.errors[0]);
}
- updateCommentState(store, createNoteData, fetchByIid, queryVariables, sortOrder);
+ updateCommentState(store, createNoteData, fetchByIid, queryVariables);
},
});
clearDraft(this.autosaveKey);
diff --git a/app/assets/javascripts/work_items/components/work_item_notes.vue b/app/assets/javascripts/work_items/components/work_item_notes.vue
index 474fea5cec6..457eabb5671 100644
--- a/app/assets/javascripts/work_items/components/work_item_notes.vue
+++ b/app/assets/javascripts/work_items/components/work_item_notes.vue
@@ -50,7 +50,6 @@ export default {
},
data() {
return {
- notesArray: [],
isLoadingMore: false,
perPage: DEFAULT_PAGE_SIZE_NOTES,
sortOrder: ASC,
@@ -61,12 +60,12 @@ export default {
initialLoading() {
return this.$apollo.queries.workItemNotes.loading && !this.isLoadingMore;
},
- pageInfo() {
- return this.workItemNotes?.pageInfo;
- },
avatarUrl() {
return window.gon.current_user_avatar_url;
},
+ pageInfo() {
+ return this.workItemNotes?.pageInfo;
+ },
hasNextPage() {
return this.pageInfo?.hasNextPage;
},
@@ -95,6 +94,14 @@ export default {
sortOrder: this.sortOrder,
};
},
+ notesArray() {
+ const notes = this.workItemNotes?.nodes || [];
+
+ if (this.sortOrder === DESC) {
+ return [...notes].reverse();
+ }
+ return notes;
+ },
},
apollo: {
workItemNotes: {
@@ -117,8 +124,6 @@ export default {
: data.workItem?.widgets;
const discussionNodes =
workItemWidgets.find((widget) => widget.type === 'NOTES')?.discussions || [];
- this.notesArray = discussionNodes?.nodes || [];
- this.updateSortingOrderIfApplicable();
return discussionNodes;
},
skip() {
@@ -128,6 +133,8 @@ export default {
this.$emit('error', i18n.fetchError);
},
result() {
+ this.updateSortingOrderIfApplicable();
+
if (this.hasNextPage) {
this.fetchMoreNotes();
}
@@ -163,7 +170,6 @@ export default {
},
changeNotesSortOrder(direction) {
this.sortOrder = direction;
- this.notesArray = [...this.notesArray].reverse();
this.changeNotesSortOrderAfterLoading = false;
},
async fetchMoreNotes() {
diff --git a/app/assets/javascripts/work_items/graphql/cache_utils.js b/app/assets/javascripts/work_items/graphql/cache_utils.js
index c1de9f673f2..16b892b3476 100644
--- a/app/assets/javascripts/work_items/graphql/cache_utils.js
+++ b/app/assets/javascripts/work_items/graphql/cache_utils.js
@@ -1,6 +1,5 @@
import { produce } from 'immer';
import { WIDGET_TYPE_NOTES } from '~/work_items/constants';
-import { ASC } from '~/notes/constants';
import { getWorkItemNotesQuery } from '~/work_items/utils';
/**
@@ -12,13 +11,7 @@ import { getWorkItemNotesQuery } from '~/work_items/utils';
* @param queryVariables
* @param sortOrder
*/
-export const updateCommentState = (
- store,
- { data: { createNote } },
- fetchByIid,
- queryVariables,
- sortOrder,
-) => {
+export const updateCommentState = (store, { data: { createNote } }, fetchByIid, queryVariables) => {
const notesQuery = getWorkItemNotesQuery(fetchByIid);
const variables = {
...queryVariables,
@@ -36,7 +29,10 @@ export const updateCommentState = (
)
: draftData.workItem.widgets.find((widget) => widget.type === WIDGET_TYPE_NOTES);
- const arrayPushMethod = sortOrder === ASC ? 'push' : 'unshift';
+ // as notes are currently sorted/reversed on the frontend rather than in the query
+ // we only ever push.
+ // const arrayPushMethod = sortOrder === ASC ? 'push' : 'unshift';
+ const arrayPushMethod = 'push';
// manual update of cache with a completely new discussion
if (createNote.note.discussion.notes.nodes.length === 1) {
diff --git a/app/graphql/types/ci/runner_type.rb b/app/graphql/types/ci/runner_type.rb
index 35339624e37..3cab2b7aba7 100644
--- a/app/graphql/types/ci/runner_type.rb
+++ b/app/graphql/types/ci/runner_type.rb
@@ -14,6 +14,9 @@ module Types
JOB_COUNT_LIMIT = 1000
+ # Only allow ephemeral_authentication_token to be visible for a short while
+ RUNNER_EPHEMERAL_TOKEN_AVAILABILITY_TIME = 3.hours
+
alias_method :runner, :object
field :access_level, ::Types::Ci::RunnerAccessLevelEnum, null: false,
@@ -35,6 +38,10 @@ module Types
description: 'Description of the runner.'
field :edit_admin_url, GraphQL::Types::String, null: true,
description: 'Admin form URL of the runner. Only available for administrators.'
+ field :ephemeral_authentication_token, GraphQL::Types::String, null: true,
+ description: 'Ephemeral authentication token used for runner machine registration.',
+ authorize: :read_ephemeral_token,
+ alpha: { milestone: '15.9' }
field :executor_name, GraphQL::Types::String, null: true,
description: 'Executor last advertised by the runner.',
method: :executor_name
@@ -134,6 +141,14 @@ module Types
Gitlab::Routing.url_helpers.edit_admin_runner_url(runner) if can_admin_runners?
end
+ def ephemeral_authentication_token
+ return unless runner.created_via_ui?
+ return unless runner.created_at > RUNNER_EPHEMERAL_TOKEN_AVAILABILITY_TIME.ago
+ return if runner.runner_machines.any?
+
+ runner.token
+ end
+
def project_count
BatchLoader::GraphQL.for(runner.id).batch(key: :runner_project_count) do |ids, loader, args|
counts = ::Ci::Runner.project_type
diff --git a/app/models/ci/runner.rb b/app/models/ci/runner.rb
index 5c4512c5f72..19acd86a7f4 100644
--- a/app/models/ci/runner.rb
+++ b/app/models/ci/runner.rb
@@ -495,6 +495,10 @@ module Ci
"#{CREATED_RUNNER_TOKEN_PREFIX}#{token}"
end
+ def created_via_ui?
+ token.start_with?(CREATED_RUNNER_TOKEN_PREFIX)
+ end
+
private
scope :with_upgrade_status, ->(upgrade_status) do
diff --git a/app/policies/ci/runner_policy.rb b/app/policies/ci/runner_policy.rb
index 1c23b367489..7b01dccff87 100644
--- a/app/policies/ci/runner_policy.rb
+++ b/app/policies/ci/runner_policy.rb
@@ -9,6 +9,10 @@ module Ci
@user.owns_runner?(@subject)
end
+ condition(:creator) do
+ @user == @subject.creator
+ end
+
with_options scope: :subject, score: 0
condition(:is_instance_runner) do
@subject.instance_type?
@@ -72,6 +76,8 @@ module Ci
rule { ~admin & belongs_to_multiple_projects }.prevent :delete_runner
rule { ~admin & locked }.prevent :assign_runner
+
+ rule { creator }.enable :read_ephemeral_token
end
end
diff --git a/app/views/projects/find_file/show.html.haml b/app/views/projects/find_file/show.html.haml
index 7cd4ab08680..1d4e907dd61 100644
--- a/app/views/projects/find_file/show.html.haml
+++ b/app/views/projects/find_file/show.html.haml
@@ -4,7 +4,7 @@
.file-finder-holder.tree-holder.clearfix.js-file-finder{ 'data-file-find-url': "#{escape_javascript(project_files_path(@project, @ref, format: :json))}", 'data-find-tree-url': escape_javascript(project_tree_path(@project, @ref)), 'data-blob-url-template': escape_javascript(project_blob_path(@project, @ref)) }
.nav-block
.tree-ref-holder
- = render 'shared/ref_switcher', destination: 'find_file', path: @path
+ #js-blob-ref-switcher{ data: { project_id: @project.id, ref: @ref, namespace: "/-/find_file" } }
%ul.breadcrumb.repo-breadcrumb
%li.breadcrumb-item
= link_to project_tree_path(@project, @ref) do
diff --git a/config/feature_flags/development/gitlab_metrics_error_rate_sli.yml b/config/feature_flags/development/gitlab_metrics_error_rate_sli.yml
deleted file mode 100644
index 30b872343ce..00000000000
--- a/config/feature_flags/development/gitlab_metrics_error_rate_sli.yml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-name: gitlab_metrics_error_rate_sli
-introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/103976
-rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/383071
-milestone: '15.7'
-type: development
-group: group::scalability
-default_enabled: false
diff --git a/config/initializers/00_deprecations.rb b/config/initializers/00_deprecations.rb
index bfbd57c99fe..0d55510701d 100644
--- a/config/initializers/00_deprecations.rb
+++ b/config/initializers/00_deprecations.rb
@@ -20,8 +20,10 @@ end
#
# This way we prevent already fixed warnings from sneaking back into the codebase silently.
rails7_deprecation_warnings = [
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/366910
+ /no longer takes non-deterministic result/,
# https://gitlab.com/gitlab-org/gitlab/-/issues/339739
- /ActiveModel::Errors#keys is deprecated/,
+ /ActiveModel::Errors/,
# https://gitlab.com/gitlab-org/gitlab/-/issues/342492
/Rendering actions with '\.' in the name is deprecated/,
# https://gitlab.com/gitlab-org/gitlab/-/issues/333086
diff --git a/doc/api/graphql/reference/index.md b/doc/api/graphql/reference/index.md
index a000215f461..419175f6553 100644
--- a/doc/api/graphql/reference/index.md
+++ b/doc/api/graphql/reference/index.md
@@ -11415,6 +11415,7 @@ CI/CD variables for a project.
| <a id="cirunnercreatedat"></a>`createdAt` | [`Time`](#time) | Timestamp of creation of this runner. |
| <a id="cirunnerdescription"></a>`description` | [`String`](#string) | Description of the runner. |
| <a id="cirunnereditadminurl"></a>`editAdminUrl` | [`String`](#string) | Admin form URL of the runner. Only available for administrators. |
+| <a id="cirunnerephemeralauthenticationtoken"></a>`ephemeralAuthenticationToken` **{warning-solid}** | [`String`](#string) | **Introduced** in 15.9. This feature is in Alpha. It can be changed or removed at any time. Ephemeral authentication token used for runner machine registration. |
| <a id="cirunnerexecutorname"></a>`executorName` | [`String`](#string) | Executor last advertised by the runner. |
| <a id="cirunnergroups"></a>`groups` | [`GroupConnection`](#groupconnection) | Groups the runner is associated with. For group runners only. (see [Connections](#connections)) |
| <a id="cirunnerid"></a>`id` | [`CiRunnerID!`](#cirunnerid) | ID of the runner. |
diff --git a/doc/subscriptions/gitlab_dedicated/index.md b/doc/subscriptions/gitlab_dedicated/index.md
index b471d1d971f..2d9e3cb5ca8 100644
--- a/doc/subscriptions/gitlab_dedicated/index.md
+++ b/doc/subscriptions/gitlab_dedicated/index.md
@@ -47,7 +47,7 @@ The following GitLab application features are not available:
- FortiAuthenticator, or FortiToken 2FA
- Reply-by email
- Service Desk
-- GitLab-managed runners
+- GitLab-managed runners (hosted runners)
- Any feature [not listed above](#available-features) which must be configured outside of the GitLab user interface.
The following features will not be supported:
@@ -64,7 +64,9 @@ The following operational features are not available:
- Multiple Geo secondaries (Geo replicas) beyond the secondary site included by default
- Self-serve purchasing and configuration
- Multiple login providers
-- Non-AWS cloud providers, such as GCP or Azure
+- Support for deploying to non-AWS cloud providers, such as GCP or Azure
+- Observability Dashboard using Switchboard
+- Pre-Production Instance
### AWS regions not supported
diff --git a/doc/user/project/deploy_tokens/index.md b/doc/user/project/deploy_tokens/index.md
index 0b005256d58..cfb382d73e2 100644
--- a/doc/user/project/deploy_tokens/index.md
+++ b/doc/user/project/deploy_tokens/index.md
@@ -38,7 +38,9 @@ You can create deploy tokens at either the project or group level:
By default, a deploy token does not expire. You can optionally set an expiry date when you create
it. Expiry occurs at midnight UTC on that date.
-Deploy tokens can't be used for Git operations and Package Registry operations if [external authorization](../../admin_area/settings/external_authorization.md) is enabled.
+WARNING:
+You cannot use new or existing deploy tokens for Git operations and Package Registry operations if
+[external authorization](../../admin_area/settings/external_authorization.md) is enabled.
## Scope
diff --git a/lib/gitlab/metrics/rails_slis.rb b/lib/gitlab/metrics/rails_slis.rb
index 9fd4eec479e..e3cc7a40693 100644
--- a/lib/gitlab/metrics/rails_slis.rb
+++ b/lib/gitlab/metrics/rails_slis.rb
@@ -6,7 +6,7 @@ module Gitlab
class << self
def initialize_request_slis!
Gitlab::Metrics::Sli::Apdex.initialize_sli(:rails_request, possible_request_labels)
- initialize_rails_request_error_rate
+ Gitlab::Metrics::Sli::ErrorRate.initialize_sli(:rails_request, possible_request_labels)
Gitlab::Metrics::Sli::Apdex.initialize_sli(:graphql_query, possible_graphql_query_labels)
end
@@ -63,12 +63,6 @@ module Gitlab
}
end
end
-
- def initialize_rails_request_error_rate
- return unless Feature.enabled?(:gitlab_metrics_error_rate_sli, type: :development)
-
- Gitlab::Metrics::Sli::ErrorRate.initialize_sli(:rails_request, possible_request_labels)
- end
end
end
end
diff --git a/lib/gitlab/metrics/requests_rack_middleware.rb b/lib/gitlab/metrics/requests_rack_middleware.rb
index ca7b0f443c5..f635deabf76 100644
--- a/lib/gitlab/metrics/requests_rack_middleware.rb
+++ b/lib/gitlab/metrics/requests_rack_middleware.rb
@@ -127,8 +127,6 @@ module Gitlab
end
def record_error(urgency, status)
- return unless Feature.enabled?(:gitlab_metrics_error_rate_sli, type: :development)
-
Gitlab::Metrics::RailsSlis.request_error_rate.increment(
labels: labels_from_context.merge(request_urgency: urgency.name),
error: ::Gitlab::Metrics.server_error?(status)
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index 74fca2846b5..fc5de91dfac 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -17557,6 +17557,9 @@ msgstr ""
msgid "Find file"
msgstr ""
+msgid "FindFile|Switch branch/tag"
+msgstr ""
+
msgid "Fingerprint (MD5)"
msgstr ""
@@ -34313,6 +34316,9 @@ msgstr ""
msgid "ProtectedEnvironments|Approval rules"
msgstr ""
+msgid "ProtectedEnvironments|List of protected environments (%{protectedEnvironmentsCount})"
+msgstr ""
+
msgid "ProtectedEnvironments|Number of approvals must be between 1 and 5"
msgstr ""
diff --git a/scripts/api/default_options.rb b/scripts/api/default_options.rb
index d10666e3a68..3085ef55085 100644
--- a/scripts/api/default_options.rb
+++ b/scripts/api/default_options.rb
@@ -13,6 +13,7 @@ end
module Host
DEFAULT_OPTIONS = {
instance_base_url: ENV['CI_SERVER_URL'],
- mr_id: ENV['CI_MERGE_REQUEST_ID']
+ target_project: ENV['CI_MERGE_REQUEST_PROJECT_ID'],
+ mr_iid: ENV['CI_MERGE_REQUEST_IID']
}.freeze
end
diff --git a/scripts/api/find_issues.rb b/scripts/api/find_issues.rb
new file mode 100644
index 00000000000..a1c37030319
--- /dev/null
+++ b/scripts/api/find_issues.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'gitlab'
+require_relative 'default_options'
+
+class FindIssues
+ def initialize(options)
+ @project = options.fetch(:project)
+
+ # Force the token to be a string so that if api_token is nil, it's set to '',
+ # allowing unauthenticated requests (for forks).
+ api_token = options.delete(:api_token).to_s
+
+ warn "No API token given." if api_token.empty?
+
+ @client = Gitlab.client(
+ endpoint: options.delete(:endpoint) || API::DEFAULT_OPTIONS[:endpoint],
+ private_token: api_token
+ )
+ end
+
+ def execute(search_data)
+ client.issues(project, search_data)
+ end
+
+ private
+
+ attr_reader :project, :client
+end
diff --git a/scripts/failed_tests.rb b/scripts/failed_tests.rb
index 319961d277c..786d3c24c74 100755
--- a/scripts/failed_tests.rb
+++ b/scripts/failed_tests.rb
@@ -8,31 +8,47 @@ require 'json'
require 'set'
class FailedTests
+ DEFAULT_OPTIONS = {
+ previous_tests_report_path: 'test_results/previous/test_reports.json',
+ output_directory: 'tmp/previous_failed_tests/',
+ format: :oneline,
+ rspec_pg_regex: /rspec .+ pg12( .+)?/,
+ rspec_ee_pg_regex: /rspec-ee .+ pg12( .+)?/
+ }.freeze
+
def initialize(options)
@filename = options.delete(:previous_tests_report_path)
@output_directory = options.delete(:output_directory)
+ @format = options.delete(:format).to_sym
@rspec_pg_regex = options.delete(:rspec_pg_regex)
@rspec_ee_pg_regex = options.delete(:rspec_ee_pg_regex)
end
- def output_failed_test_files
+ def output_failed_tests
create_output_dir
- failed_files_for_suite_collection.each do |suite_collection_name, suite_collection_files|
- failed_test_files = suite_collection_files.map { |filepath| filepath.delete_prefix('./') }.join(' ')
+ failed_cases_for_suite_collection.each do |suite_name, suite_tests|
+ puts "[FailedTests] Detected #{suite_tests.size} failed tests in suite #{suite_name}..."
+ suite_tests =
+ case format
+ when :oneline
+ suite_tests.map { |test| test['file'] }.join(' ') # rubocop:disable Rails/Pluck
+ when :json
+ JSON.pretty_generate(suite_tests.to_a)
+ end
- output_file = File.join(output_directory, "#{suite_collection_name}_failed_files.txt")
+ output_file = File.join(output_directory, "#{suite_name}_failed_tests.#{output_file_format}")
File.open(output_file, 'w') do |file|
- file.write(failed_test_files)
+ file.write(suite_tests)
end
end
end
- def failed_files_for_suite_collection
- suite_map.each_with_object(Hash.new { |h, k| h[k] = Set.new }) do |(suite_collection_name, suite_collection_regex), hash|
+ def failed_cases_for_suite_collection
+ suite_map.each_with_object(Hash.new { |h, k| h[k] = Set.new }) do |(suite_name, suite_collection_regex), hash|
failed_suites.each do |suite|
- hash[suite_collection_name].merge(failed_files(suite)) if suite['name'] =~ suite_collection_regex
+ hash[suite_name].merge(failed_cases(suite)) if suite['name'] =~ suite_collection_regex
end
end
end
@@ -47,7 +63,7 @@ class FailedTests
private
- attr_reader :filename, :output_directory, :rspec_pg_regex, :rspec_ee_pg_regex
+ attr_reader :filename, :output_directory, :format, :rspec_pg_regex, :rspec_ee_pg_regex
def file_contents
@file_contents ||= begin
@@ -65,50 +81,75 @@ class FailedTests
end
end
+ def output_file_format
+ case format
+ when :oneline
+ 'txt'
+ when :json
+ 'json'
+ else
+ raise "[FailedTests] Unsupported format `#{format}` (allowed formats: `oneline` and `json`)!"
+ end
+ end
+
def failed_suites
return [] unless file_contents_as_json['suites']
file_contents_as_json['suites'].select { |suite| suite['failed_count'] > 0 }
end
- def failed_files(suite)
+ def failed_cases(suite)
return [] unless suite
- suite['test_cases'].each_with_object([]) do |failure_hash, failed_cases|
- failed_cases << failure_hash['file'] if failure_hash['status'] == 'failed'
+ suite['test_cases'].filter_map do |failure_hash|
+ next if failure_hash['status'] != 'failed'
+
+ failure_hash['job_url'] = suite['job_url']
+ failure_hash['file'] = failure_hash['file'].delete_prefix('./')
+
+ failure_hash
end
end
def create_output_dir
return if File.directory?(output_directory)
- puts 'Creating output directory...'
+ puts '[FailedTests] Creating output directory...'
FileUtils.mkdir_p(output_directory)
end
end
if $PROGRAM_NAME == __FILE__
- options = {
- previous_tests_report_path: 'test_results/previous/test_reports.json',
- output_directory: 'tmp/previous_failed_tests/',
- rspec_pg_regex: /rspec .+ pg12( .+)?/,
- rspec_ee_pg_regex: /rspec-ee .+ pg12( .+)?/
- }
+ options = FailedTests::DEFAULT_OPTIONS.dup
OptionParser.new do |opts|
- opts.on("-p", "--previous-tests-report-path PREVIOUS_TESTS_REPORT_PATH", String, "Path of the file listing previous test failures") do |value|
+ opts.on("-p", "--previous-tests-report-path PREVIOUS_TESTS_REPORT_PATH", String,
+ "Path of the file listing previous test failures (defaults to " \
+ "`#{FailedTests::DEFAULT_OPTIONS[:previous_tests_report_path]}`)") do |value|
options[:previous_tests_report_path] = value
end
- opts.on("-o", "--output-directory OUTPUT_DIRECTORY", String, "Output directory for failed test files") do |value|
+ opts.on("-o", "--output-directory OUTPUT_DIRECTORY", String,
+ "Output directory for failed test files (defaults to " \
+ "`#{FailedTests::DEFAULT_OPTIONS[:output_directory]}`)") do |value|
options[:output_directory] = value
end
- opts.on("--rspec-pg-regex RSPEC_PG_REGEX", Regexp, "Regex to use when finding matching RSpec jobs") do |value|
+ opts.on("-f", "--format [oneline|json]", String,
+ "Format of the output files: oneline (with test filenames) or JSON (defaults to " \
+ "`#{FailedTests::DEFAULT_OPTIONS[:format]}`)") do |value|
+ options[:format] = value
+ end
+
+ opts.on("--rspec-pg-regex RSPEC_PG_REGEX", Regexp,
+ "Regex to use when finding matching RSpec jobs (defaults to " \
+ "`#{FailedTests::DEFAULT_OPTIONS[:rspec_pg_regex]}`)") do |value|
options[:rspec_pg_regex] = value
end
- opts.on("--rspec-ee-pg-regex RSPEC_EE_PG_REGEX", Regexp, "Regex to use when finding matching RSpec EE jobs") do |value|
+ opts.on("--rspec-ee-pg-regex RSPEC_EE_PG_REGEX", Regexp,
+ "Regex to use when finding matching RSpec EE jobs (defaults to " \
+ "`#{FailedTests::DEFAULT_OPTIONS[:rspec_ee_pg_regex]}`)") do |value|
options[:rspec_ee_pg_regex] = value
end
@@ -118,5 +159,5 @@ if $PROGRAM_NAME == __FILE__
end
end.parse!
- FailedTests.new(options).output_failed_test_files
+ FailedTests.new(options).output_failed_tests
end
diff --git a/scripts/pipeline_test_report_builder.rb b/scripts/pipeline_test_report_builder.rb
index 90af0451864..6f69a5c692f 100755
--- a/scripts/pipeline_test_report_builder.rb
+++ b/scripts/pipeline_test_report_builder.rb
@@ -5,7 +5,6 @@ require 'optparse'
require 'time'
require 'fileutils'
require 'uri'
-require 'cgi'
require 'net/http'
require 'json'
require_relative 'api/default_options'
@@ -19,50 +18,79 @@ require_relative 'api/default_options'
# https://gitlab.com/gitlab-org/gitlab/-/pipelines/363788864/tests/suite.json?build_ids[]=1555608749
# Push into expected format for failed tests
class PipelineTestReportBuilder
+ DEFAULT_OPTIONS = {
+ target_project: Host::DEFAULT_OPTIONS[:target_project],
+ mr_iid: Host::DEFAULT_OPTIONS[:mr_iid],
+ api_endpoint: API::DEFAULT_OPTIONS[:endpoint],
+ output_file_path: 'test_results/test_reports.json',
+ pipeline_index: :previous
+ }.freeze
+
def initialize(options)
@target_project = options.delete(:target_project)
- @mr_id = options.delete(:mr_id) || Host::DEFAULT_OPTIONS[:mr_id]
- @instance_base_url = options.delete(:instance_base_url) || Host::DEFAULT_OPTIONS[:instance_base_url]
- @output_file_path = options.delete(:output_file_path)
- end
-
- def test_report_for_latest_pipeline
- build_test_report_json_for_pipeline(previous_pipeline)
+ @mr_iid = options.delete(:mr_iid)
+ @api_endpoint = options.delete(:api_endpoint).to_s
+ @output_file_path = options.delete(:output_file_path).to_s
+ @pipeline_index = options.delete(:pipeline_index).to_sym
end
def execute
- if output_file_path
- FileUtils.mkdir_p(File.dirname(output_file_path))
- end
+ FileUtils.mkdir_p(File.dirname(output_file_path))
File.open(output_file_path, 'w') do |file|
- file.write(test_report_for_latest_pipeline)
+ file.write(test_report_for_pipeline)
end
end
+ def test_report_for_pipeline
+ build_test_report_json_for_pipeline
+ end
+
+ def latest_pipeline
+ pipelines_sorted_descending[0]
+ end
+
def previous_pipeline
- # Top of the list will always be the current pipeline
+ # Top of the list will always be the latest pipeline
# Second from top will be the previous pipeline
- pipelines_for_mr.sort_by { |a| -Time.parse(a['created_at']).to_i }[1]
+ pipelines_sorted_descending[1]
end
private
- attr_reader :target_project, :mr_id, :instance_base_url, :output_file_path
+ def pipeline
+ @pipeline ||=
+ case pipeline_index
+ when :latest
+ latest_pipeline
+ when :previous
+ previous_pipeline
+ else
+ raise "[PipelineTestReportBuilder] Unsupported pipeline_index `#{pipeline_index}` (allowed index: `latest` and `previous`!"
+ end
+ end
+
+ def pipelines_sorted_descending
+ # Top of the list will always be the current pipeline
+ # Second from top will be the previous pipeline
+ pipelines_for_mr.sort_by { |a| -a['id'] }
+ end
+
+ attr_reader :target_project, :mr_iid, :api_endpoint, :output_file_path, :pipeline_index
def pipeline_project_api_base_url(pipeline)
- "#{instance_base_url}/api/v4/projects/#{pipeline['project_id']}"
+ "#{api_endpoint}/projects/#{pipeline['project_id']}"
end
def target_project_api_base_url
- "#{instance_base_url}/api/v4/projects/#{CGI.escape(target_project)}"
+ "#{api_endpoint}/projects/#{target_project}"
end
def pipelines_for_mr
- fetch("#{target_project_api_base_url}/merge_requests/#{mr_id}/pipelines")
+ @pipelines_for_mr ||= fetch("#{target_project_api_base_url}/merge_requests/#{mr_iid}/pipelines")
end
- def failed_builds_for_pipeline(pipeline)
+ def failed_builds_for_pipeline
fetch("#{pipeline_project_api_base_url(pipeline)}/pipelines/#{pipeline['id']}/jobs?scope=failed&per_page=100")
end
@@ -70,44 +98,45 @@ class PipelineTestReportBuilder
# Here we request individual builds, even though it is possible to supply multiple build IDs.
# The reason for this; it is possible to lose the job context and name when requesting multiple builds.
# Please see for more info: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/69053#note_709939709
- def test_report_for_build(pipeline, build_id)
- fetch("#{pipeline['web_url']}/tests/suite.json?build_ids[]=#{build_id}")
+ def test_report_for_build(pipeline_url, build_id)
+ fetch("#{pipeline_url}/tests/suite.json?build_ids[]=#{build_id}").tap do |suite|
+ suite['job_url'] = job_url(pipeline_url, build_id)
+ end
rescue Net::HTTPServerException => e
raise e unless e.response.code.to_i == 404
- puts "Artifacts not found. They may have expired. Skipping this build."
+ puts "[PipelineTestReportBuilder] Artifacts not found. They may have expired. Skipping this build."
end
- def build_test_report_json_for_pipeline(pipeline)
+ def build_test_report_json_for_pipeline
# empty file if no previous failed pipeline
- return {}.to_json if pipeline.nil? || pipeline['status'] != 'failed'
+ return {}.to_json if pipeline.nil?
- test_report = {}
+ test_report = { 'suites' => [] }
- puts "Discovered last failed pipeline (#{pipeline['id']}) for MR!#{mr_id}"
+ puts "[PipelineTestReportBuilder] Discovered #{pipeline_index} failed pipeline (##{pipeline['id']}) for MR!#{mr_iid}"
- failed_builds_for_test_stage = failed_builds_for_pipeline(pipeline).select do |failed_build|
- failed_build['stage'] == 'test'
- end
+ failed_builds_for_pipeline.each do |failed_build|
+ next if failed_build['stage'] != 'test'
- puts "#{failed_builds_for_test_stage.length} failed builds in test stage found..."
+ test_report['suites'] << test_report_for_build(pipeline['web_url'], failed_build['id'])
+ end
- if failed_builds_for_test_stage.any?
- test_report['suites'] ||= []
+ test_report['suites'].compact!
- failed_builds_for_test_stage.each do |failed_build|
- suite = test_report_for_build(pipeline, failed_build['id'])
- test_report['suites'] << suite if suite
- end
- end
+ puts "[PipelineTestReportBuilder] #{test_report['suites'].size} failed builds in test stage found..."
test_report.to_json
end
+ def job_url(pipeline_url, build_id)
+ pipeline_url.sub(%r{/pipelines/.+}, "/jobs/#{build_id}")
+ end
+
def fetch(uri_str)
uri = URI(uri_str)
- puts "URL: #{uri}"
+ puts "[PipelineTestReportBuilder] URL: #{uri}"
request = Net::HTTP::Get.new(uri)
@@ -119,7 +148,7 @@ class PipelineTestReportBuilder
when Net::HTTPSuccess
body = response.read_body
else
- raise "Unexpected response: #{response.value}"
+ raise "[PipelineTestReportBuilder] Unexpected response: #{response.value}"
end
end
end
@@ -129,25 +158,17 @@ class PipelineTestReportBuilder
end
if $PROGRAM_NAME == __FILE__
- options = Host::DEFAULT_OPTIONS.dup
+ options = PipelineTestReportBuilder::DEFAULT_OPTIONS.dup
OptionParser.new do |opts|
- opts.on("-t", "--target-project TARGET_PROJECT", String, "Project where to find the merge request") do |value|
- options[:target_project] = value
- end
-
- opts.on("-m", "--mr-id MR_ID", String, "A merge request ID") do |value|
- options[:mr_id] = value
- end
-
- opts.on("-i", "--instance-base-url INSTANCE_BASE_URL", String, "URL of the instance where project and merge request resides") do |value|
- options[:instance_base_url] = value
- end
-
opts.on("-o", "--output-file-path OUTPUT_PATH", String, "A path for output file") do |value|
options[:output_file_path] = value
end
+ opts.on("-p", "--pipeline-index [latest|previous]", String, "What pipeline to retrieve (defaults to `#{PipelineTestReportBuilder::DEFAULT_OPTIONS[:pipeline_index]}`)") do |value|
+ options[:pipeline_index] = value
+ end
+
opts.on("-h", "--help", "Prints this help") do
puts opts
exit
diff --git a/scripts/rspec_helpers.sh b/scripts/rspec_helpers.sh
index 5b2c84059ee..de735e03db0 100644
--- a/scripts/rspec_helpers.sh
+++ b/scripts/rspec_helpers.sh
@@ -75,23 +75,19 @@ function crystalball_rspec_data_exists() {
compgen -G "crystalball/rspec*.yml" >/dev/null
}
-function retrieve_previous_failed_tests() {
+function retrieve_failed_tests() {
local directory_for_output_reports="${1}"
- local rspec_pg_regex="${2}"
- local rspec_ee_pg_regex="${3}"
- local pipeline_report_path="test_results/previous/test_reports.json"
-
- # Used to query merge requests. This variable reflects where the merge request has been created
- local target_project_path="${CI_MERGE_REQUEST_PROJECT_PATH}"
- local instance_url="${CI_SERVER_URL}"
+ local failed_tests_format="${2}"
+ local pipeline_index="${3}"
+ local pipeline_report_path="tmp/test_results/${pipeline_index}/test_reports.json"
echo 'Attempting to build pipeline test report...'
- scripts/pipeline_test_report_builder.rb --instance-base-url "${instance_url}" --target-project "${target_project_path}" --mr-id "${CI_MERGE_REQUEST_IID}" --output-file-path "${pipeline_report_path}"
+ scripts/pipeline_test_report_builder.rb --output-file-path "${pipeline_report_path}" --pipeline-index "${pipeline_index}"
echo 'Generating failed tests lists...'
- scripts/failed_tests.rb --previous-tests-report-path "${pipeline_report_path}" --output-directory "${directory_for_output_reports}" --rspec-pg-regex "${rspec_pg_regex}" --rspec-ee-pg-regex "${rspec_ee_pg_regex}"
+ scripts/failed_tests.rb --previous-tests-report-path "${pipeline_report_path}" --format "${failed_tests_format}" --output-directory "${directory_for_output_reports}"
}
function rspec_args() {
diff --git a/scripts/utils.sh b/scripts/utils.sh
index 44bbabb4c99..6fee1909bb7 100644
--- a/scripts/utils.sh
+++ b/scripts/utils.sh
@@ -125,6 +125,10 @@ function install_tff_gem() {
run_timed_command "gem install test_file_finder --no-document --version 0.1.4"
}
+function install_activesupport_gem() {
+ run_timed_command "gem install activesupport --no-document --version 6.1.7.1"
+}
+
function install_junit_merge_gem() {
run_timed_command "gem install junit_merge --no-document --version 0.1.2"
}
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index 67186282c95..f113ca2425f 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -466,7 +466,9 @@ FactoryBot.define do
trait :with_jira_integration do
has_external_issue_tracker { true }
- jira_integration
+ after :create do |project|
+ create(:jira_integration, project: project)
+ end
end
trait :with_prometheus_integration do
diff --git a/spec/features/projects/files/user_find_file_spec.rb b/spec/features/projects/files/user_find_file_spec.rb
index 1b53189da83..0a3b8c19cc4 100644
--- a/spec/features/projects/files/user_find_file_spec.rb
+++ b/spec/features/projects/files/user_find_file_spec.rb
@@ -21,6 +21,10 @@ RSpec.describe 'User find project file', feature_category: :projects do
fill_in 'file_find', with: text
end
+ def ref_selector_dropdown
+ find('.gl-dropdown-toggle > .gl-dropdown-button-text')
+ end
+
it 'navigates to find file by shortcut', :js do
find('body').native.send_key('t')
@@ -65,4 +69,39 @@ RSpec.describe 'User find project file', feature_category: :projects do
expect(page).not_to have_content('CHANGELOG')
expect(page).not_to have_content('VERSION')
end
+
+ context 'when refs are switched', :js do
+ before do
+ click_link 'Find file'
+ end
+
+ specify 'the ref switcher lists all the branches and tags' do
+ ref = 'add-ipython-files'
+ expect(ref_selector_dropdown).not_to have_text(ref)
+
+ find('.ref-selector').click
+ wait_for_requests
+
+ page.within('.ref-selector') do
+ expect(page).to have_selector('li', text: ref)
+ expect(page).to have_selector('li', text: 'v1.0.0')
+ end
+ end
+
+ specify 'the search result changes when refs switched' do
+ ref = 'add-ipython-files'
+ expect(ref_selector_dropdown).not_to have_text(ref)
+
+ find('.ref-selector button').click
+ wait_for_requests
+
+ page.within('.ref-selector') do
+ fill_in _('Switch branch/tag'), with: ref
+ wait_for_requests
+
+ find('.gl-dropdown-item', text: ref).click
+ end
+ expect(ref_selector_dropdown).to have_text(ref)
+ end
+ end
end
diff --git a/spec/frontend/pages/projects/find_file/ref_switcher/ref_switcher_utils_spec.js b/spec/frontend/pages/projects/find_file/ref_switcher/ref_switcher_utils_spec.js
new file mode 100644
index 00000000000..ef2e5d779d8
--- /dev/null
+++ b/spec/frontend/pages/projects/find_file/ref_switcher/ref_switcher_utils_spec.js
@@ -0,0 +1,39 @@
+import { generateRefDestinationPath } from '~/pages/projects/find_file/ref_switcher/ref_switcher_utils';
+import setWindowLocation from 'helpers/set_window_location_helper';
+
+const projectRootPath = 'root/Project1';
+const selectedRef = 'feature/test';
+
+describe('generateRefDestinationPath', () => {
+ it.each`
+ currentPath | result
+ ${`${projectRootPath}/-/find_file/flightjs/Flight`} | ${`http://test.host/${projectRootPath}/-/find_file/${selectedRef}`}
+ ${`${projectRootPath}/-/find_file/test/test1?test=something`} | ${`http://test.host/${projectRootPath}/-/find_file/${selectedRef}?test=something`}
+ ${`${projectRootPath}/-/find_file/simpletest?test=something&test=it`} | ${`http://test.host/${projectRootPath}/-/find_file/${selectedRef}?test=something&test=it`}
+ ${`${projectRootPath}/-/find_file/some_random_char?test=something&test[]=it&test[]=is`} | ${`http://test.host/${projectRootPath}/-/find_file/${selectedRef}?test=something&test[]=it&test[]=is`}
+ `('generates the correct destination path for $currentPath', ({ currentPath, result }) => {
+ setWindowLocation(currentPath);
+ expect(generateRefDestinationPath(selectedRef, '/-/find_file')).toBe(result);
+ });
+
+ it("returns original url if it's missing selectedRef param", () => {
+ setWindowLocation(`${projectRootPath}/-/find_file/flightjs/Flight`);
+ expect(generateRefDestinationPath(undefined, '/-/find_file')).toBe(
+ `http://test.host/${projectRootPath}/-/find_file/flightjs/Flight`,
+ );
+ });
+
+ it("returns original url if it's missing namespace param", () => {
+ setWindowLocation(`${projectRootPath}/-/find_file/flightjs/Flight`);
+ expect(generateRefDestinationPath(selectedRef, undefined)).toBe(
+ `http://test.host/${projectRootPath}/-/find_file/flightjs/Flight`,
+ );
+ });
+
+ it("returns original url if it's missing namespace and selectedRef param", () => {
+ setWindowLocation(`${projectRootPath}/-/find_file/flightjs/Flight`);
+ expect(generateRefDestinationPath(undefined, undefined)).toBe(
+ `http://test.host/${projectRootPath}/-/find_file/flightjs/Flight`,
+ );
+ });
+});
diff --git a/spec/graphql/types/ci/runner_type_spec.rb b/spec/graphql/types/ci/runner_type_spec.rb
index 2296d1bea30..a2d107ae295 100644
--- a/spec/graphql/types/ci/runner_type_spec.rb
+++ b/spec/graphql/types/ci/runner_type_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe GitlabSchema.types['CiRunner'], feature_category: :runner do
version short_sha revision locked run_untagged ip_address runner_type tag_list
project_count job_count admin_url edit_admin_url user_permissions executor_name architecture_name platform_name
maintenance_note maintenance_note_html groups projects jobs token_expires_at owner_project job_execution_status
+ ephemeral_authentication_token
]
expect(described_class).to include_graphql_fields(*expected_fields)
diff --git a/spec/lib/gitlab/metrics/rails_slis_spec.rb b/spec/lib/gitlab/metrics/rails_slis_spec.rb
index 9da102fb8b8..af4df00b7da 100644
--- a/spec/lib/gitlab/metrics/rails_slis_spec.rb
+++ b/spec/lib/gitlab/metrics/rails_slis_spec.rb
@@ -46,16 +46,6 @@ RSpec.describe Gitlab::Metrics::RailsSlis do
described_class.initialize_request_slis!
end
-
- it "initializes the SLI for all possible endpoints if they weren't given error rate feature flag is disabled", :aggregate_failures do
- stub_feature_flags(gitlab_metrics_error_rate_sli: false)
-
- expect(Gitlab::Metrics::Sli::Apdex).to receive(:initialize_sli).with(:rails_request, array_including(*possible_labels)).and_call_original
- expect(Gitlab::Metrics::Sli::Apdex).to receive(:initialize_sli).with(:graphql_query, array_including(*possible_graphql_labels)).and_call_original
- expect(Gitlab::Metrics::Sli::ErrorRate).not_to receive(:initialize_sli)
-
- described_class.initialize_request_slis!
- end
end
describe '.request_apdex' do
diff --git a/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb b/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb
index d6644bc12d2..97b70b5a7fc 100644
--- a/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb
+++ b/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb
@@ -53,18 +53,6 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware, :aggregate_failures, fea
subject.call(env)
end
- it 'does not track error rate when feature flag is disabled' do
- stub_feature_flags(gitlab_metrics_error_rate_sli: false)
-
- expect(described_class).to receive_message_chain(:http_requests_total, :increment).with(method: 'get', status: '200', feature_category: 'unknown')
- expect(described_class).to receive_message_chain(:http_request_duration_seconds, :observe).with({ method: 'get' }, a_positive_execution_time)
- expect(Gitlab::Metrics::RailsSlis.request_apdex).to receive(:increment)
- .with(labels: { feature_category: 'unknown', endpoint_id: 'unknown', request_urgency: :default }, success: true)
- expect(Gitlab::Metrics::RailsSlis.request_error_rate).not_to receive(:increment)
-
- subject.call(env)
- end
-
context 'request is a health check endpoint' do
['/-/liveness', '/-/liveness/', '/-/%6D%65%74%72%69%63%73'].each do |path|
context "when path is #{path}" do
@@ -116,17 +104,6 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware, :aggregate_failures, fea
subject.call(env)
end
-
- it 'does not track error rate when feature flag is disabled' do
- stub_feature_flags(gitlab_metrics_error_rate_sli: false)
-
- expect(described_class).to receive_message_chain(:http_requests_total, :increment).with(method: 'get', status: '500', feature_category: 'unknown')
- expect(described_class).not_to receive(:http_request_duration_seconds)
- expect(Gitlab::Metrics::RailsSlis).not_to receive(:request_apdex)
- expect(Gitlab::Metrics::RailsSlis.request_error_rate).not_to receive(:increment)
-
- subject.call(env)
- end
end
context '@app.call throws exception' do
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index e941fccada1..a3982263de8 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -69,7 +69,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
it 'executes hooks' do
expect_next(described_class).to receive(:execute_hooks)
- create(:ci_build)
+ create(:ci_build, pipeline: pipeline)
end
end
end
@@ -108,19 +108,19 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
subject { described_class.ref_protected }
context 'when protected is true' do
- let!(:job) { create(:ci_build, :protected) }
+ let!(:job) { create(:ci_build, :protected, pipeline: pipeline) }
it { is_expected.to include(job) }
end
context 'when protected is false' do
- let!(:job) { create(:ci_build) }
+ let!(:job) { create(:ci_build, pipeline: pipeline) }
it { is_expected.not_to include(job) }
end
context 'when protected is nil' do
- let!(:job) { create(:ci_build) }
+ let!(:job) { create(:ci_build, pipeline: pipeline) }
before do
job.update_attribute(:protected, nil)
@@ -134,7 +134,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
subject { described_class.with_downloadable_artifacts }
context 'when job does not have a downloadable artifact' do
- let!(:job) { create(:ci_build) }
+ let!(:job) { create(:ci_build, pipeline: pipeline) }
it 'does not return the job' do
is_expected.not_to include(job)
@@ -144,7 +144,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
::Ci::JobArtifact::DOWNLOADABLE_TYPES.each do |type|
context "when job has a #{type} artifact" do
it 'returns the job' do
- job = create(:ci_build)
+ job = create(:ci_build, pipeline: pipeline)
create(
:ci_job_artifact,
file_format: ::Ci::JobArtifact::TYPE_AND_FORMAT_PAIRS[type.to_sym],
@@ -158,7 +158,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
context 'when job has a non-downloadable artifact' do
- let!(:job) { create(:ci_build, :trace_artifact) }
+ let!(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
it 'does not return the job' do
is_expected.not_to include(job)
@@ -170,7 +170,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
subject { described_class.with_erasable_artifacts }
context 'when job does not have any artifacts' do
- let!(:job) { create(:ci_build) }
+ let!(:job) { create(:ci_build, pipeline: pipeline) }
it 'does not return the job' do
is_expected.not_to include(job)
@@ -180,7 +180,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
::Ci::JobArtifact.erasable_file_types.each do |type|
context "when job has a #{type} artifact" do
it 'returns the job' do
- job = create(:ci_build)
+ job = create(:ci_build, pipeline: pipeline)
create(
:ci_job_artifact,
file_format: ::Ci::JobArtifact::TYPE_AND_FORMAT_PAIRS[type.to_sym],
@@ -194,7 +194,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
context 'when job has a non-erasable artifact' do
- let!(:job) { create(:ci_build, :trace_artifact) }
+ let!(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
it 'does not return the job' do
is_expected.not_to include(job)
@@ -206,7 +206,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
subject { described_class.with_live_trace }
context 'when build has live trace' do
- let!(:build) { create(:ci_build, :success, :trace_live) }
+ let!(:build) { create(:ci_build, :success, :trace_live, pipeline: pipeline) }
it 'selects the build' do
is_expected.to eq([build])
@@ -214,7 +214,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
context 'when build does not have live trace' do
- let!(:build) { create(:ci_build, :success, :trace_artifact) }
+ let!(:build) { create(:ci_build, :success, :trace_artifact, pipeline: pipeline) }
it 'does not select the build' do
is_expected.to be_empty
@@ -226,7 +226,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
subject { described_class.with_stale_live_trace }
context 'when build has a stale live trace' do
- let!(:build) { create(:ci_build, :success, :trace_live, finished_at: 1.day.ago) }
+ let!(:build) { create(:ci_build, :success, :trace_live, finished_at: 1.day.ago, pipeline: pipeline) }
it 'selects the build' do
is_expected.to eq([build])
@@ -234,7 +234,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
context 'when build does not have a stale live trace' do
- let!(:build) { create(:ci_build, :success, :trace_live, finished_at: 1.hour.ago) }
+ let!(:build) { create(:ci_build, :success, :trace_live, finished_at: 1.hour.ago, pipeline: pipeline) }
it 'does not select the build' do
is_expected.to be_empty
@@ -245,9 +245,9 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
describe '.license_management_jobs' do
subject { described_class.license_management_jobs }
- let!(:management_build) { create(:ci_build, :success, name: :license_management) }
- let!(:scanning_build) { create(:ci_build, :success, name: :license_scanning) }
- let!(:another_build) { create(:ci_build, :success, name: :another_type) }
+ let!(:management_build) { create(:ci_build, :success, name: :license_management, pipeline: pipeline) }
+ let!(:scanning_build) { create(:ci_build, :success, name: :license_scanning, pipeline: pipeline) }
+ let!(:another_build) { create(:ci_build, :success, name: :another_type, pipeline: pipeline) }
it 'returns license_scanning jobs' do
is_expected.to include(scanning_build)
@@ -268,7 +268,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
let(:date) { 1.hour.ago }
context 'when build has finished one day ago' do
- let!(:build) { create(:ci_build, :success, finished_at: 1.day.ago) }
+ let!(:build) { create(:ci_build, :success, finished_at: 1.day.ago, pipeline: pipeline) }
it 'selects the build' do
is_expected.to eq([build])
@@ -276,7 +276,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
context 'when build has finished 30 minutes ago' do
- let!(:build) { create(:ci_build, :success, finished_at: 30.minutes.ago) }
+ let!(:build) { create(:ci_build, :success, finished_at: 30.minutes.ago, pipeline: pipeline) }
it 'returns an empty array' do
is_expected.to be_empty
@@ -284,7 +284,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
context 'when build is still running' do
- let!(:build) { create(:ci_build, :running) }
+ let!(:build) { create(:ci_build, :running, pipeline: pipeline) }
it 'returns an empty array' do
is_expected.to be_empty
@@ -295,9 +295,9 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
describe '.with_exposed_artifacts' do
subject { described_class.with_exposed_artifacts }
- let!(:job1) { create(:ci_build) }
- let!(:job2) { create(:ci_build, options: options) }
- let!(:job3) { create(:ci_build) }
+ let!(:job1) { create(:ci_build, pipeline: pipeline) }
+ let!(:job2) { create(:ci_build, options: options, pipeline: pipeline) }
+ let!(:job3) { create(:ci_build, pipeline: pipeline) }
context 'when some jobs have exposed artifacs and some not' do
let(:options) { { artifacts: { expose_as: 'test', paths: ['test'] } } }
@@ -337,7 +337,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
context 'when there are multiple builds containing artifacts' do
before do
- create_list(:ci_build, 5, :success, :test_reports)
+ create_list(:ci_build, 5, :success, :test_reports, pipeline: pipeline)
end
it 'does not execute a query for selecting job artifact one by one' do
@@ -353,8 +353,8 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
describe '.with_needs' do
- let!(:build) { create(:ci_build) }
- let!(:build_b) { create(:ci_build) }
+ let!(:build) { create(:ci_build, pipeline: pipeline) }
+ let!(:build_b) { create(:ci_build, pipeline: pipeline) }
let!(:build_need_a) { create(:ci_build_need, build: build) }
let!(:build_need_b) { create(:ci_build_need, build: build_b) }
@@ -393,7 +393,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
describe '#stick_build_if_status_changed' do
it 'sticks the build if the status changed' do
- job = create(:ci_build, :pending)
+ job = create(:ci_build, :pending, pipeline: pipeline)
expect(described_class.sticking).to receive(:stick)
.with(:build, job.id)
@@ -403,7 +403,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
describe '#enqueue' do
- let(:build) { create(:ci_build, :created) }
+ let(:build) { create(:ci_build, :created, pipeline: pipeline) }
before do
allow(build).to receive(:any_unmet_prerequisites?).and_return(has_prerequisites)
@@ -480,7 +480,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
describe '#enqueue_preparing' do
- let(:build) { create(:ci_build, :preparing) }
+ let(:build) { create(:ci_build, :preparing, pipeline: pipeline) }
before do
allow(build).to receive(:any_unmet_prerequisites?).and_return(has_unmet_prerequisites)
@@ -535,7 +535,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
describe '#run' do
context 'when build has been just created' do
- let(:build) { create(:ci_build, :created) }
+ let(:build) { create(:ci_build, :created, pipeline: pipeline) }
it 'creates queuing entry and then removes it' do
build.enqueue!
@@ -547,7 +547,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
context 'when build status transition fails' do
- let(:build) { create(:ci_build, :pending) }
+ let(:build) { create(:ci_build, :pending, pipeline: pipeline) }
before do
create(:ci_pending_build, build: build, project: build.project)
@@ -563,7 +563,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
context 'when build has been picked by a shared runner' do
- let(:build) { create(:ci_build, :pending) }
+ let(:build) { create(:ci_build, :pending, pipeline: pipeline) }
it 'creates runtime metadata entry' do
build.runner = create(:ci_runner, :instance_type)
@@ -577,7 +577,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
describe '#drop' do
context 'when has a runtime tracking entry' do
- let(:build) { create(:ci_build, :pending) }
+ let(:build) { create(:ci_build, :pending, pipeline: pipeline) }
it 'removes runtime tracking entry' do
build.runner = create(:ci_runner, :instance_type)
@@ -614,10 +614,10 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
describe '#outdated_deployment?' do
subject { build.outdated_deployment? }
- let(:build) { create(:ci_build, :created, :with_deployment, project: project, environment: 'production') }
+ let(:build) { create(:ci_build, :created, :with_deployment, pipeline: pipeline, environment: 'production') }
context 'when build has no environment' do
- let(:build) { create(:ci_build, :created, project: project, environment: nil) }
+ let(:build) { create(:ci_build, :created, pipeline: pipeline, environment: nil) }
it { expect(subject).to be_falsey }
end
@@ -647,7 +647,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
context 'when build is older than the latest deployment but succeeded once' do
- let(:build) { create(:ci_build, :success, :with_deployment, project: project, environment: 'production') }
+ let(:build) { create(:ci_build, :success, :with_deployment, pipeline: pipeline, environment: 'production') }
before do
allow(build.deployment).to receive(:older_than_last_successful_deployment?).and_return(true)
@@ -663,13 +663,13 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
subject { build.schedulable? }
context 'when build is schedulable' do
- let(:build) { create(:ci_build, :created, :schedulable, project: project) }
+ let(:build) { create(:ci_build, :created, :schedulable, pipeline: pipeline) }
it { expect(subject).to be_truthy }
end
context 'when build is not schedulable' do
- let(:build) { create(:ci_build, :created, project: project) }
+ let(:build) { create(:ci_build, :created, pipeline: pipeline) }
it { expect(subject).to be_falsy }
end
@@ -682,7 +682,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
project.add_developer(user)
end
- let(:build) { create(:ci_build, :created, :schedulable, user: user, project: project) }
+ let(:build) { create(:ci_build, :created, :schedulable, user: user, pipeline: pipeline) }
it 'transits to scheduled' do
allow(Ci::BuildScheduleWorker).to receive(:perform_at)
@@ -743,7 +743,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
describe '#options_scheduled_at' do
subject { build.options_scheduled_at }
- let(:build) { build_stubbed(:ci_build, options: option) }
+ let(:build) { build_stubbed(:ci_build, options: option, pipeline: pipeline) }
context 'when start_in is 1 day' do
let(:option) { { start_in: '1 day' } }
@@ -881,18 +881,18 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
context 'when new artifacts are used' do
context 'artifacts archive does not exist' do
- let(:build) { create(:ci_build) }
+ let(:build) { create(:ci_build, pipeline: pipeline) }
it { is_expected.to be_falsy }
end
context 'artifacts archive exists' do
- let(:build) { create(:ci_build, :artifacts) }
+ let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
it { is_expected.to be_truthy }
context 'is expired' do
- let(:build) { create(:ci_build, :artifacts, :expired) }
+ let(:build) { create(:ci_build, :artifacts, :expired, pipeline: pipeline) }
it { is_expected.to be_falsy }
end
@@ -909,13 +909,13 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
context 'artifacts archive does not exist' do
- let(:build) { create(:ci_build) }
+ let(:build) { create(:ci_build, pipeline: pipeline) }
it { is_expected.to be_falsy }
end
context 'artifacts archive exists' do
- let(:build) { create(:ci_build, :artifacts) }
+ let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
it { is_expected.to be_truthy }
end
@@ -927,13 +927,13 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
context 'artifacts archive does not exist' do
- let(:build) { create(:ci_build) }
+ let(:build) { create(:ci_build, pipeline: pipeline) }
it { is_expected.to be_falsy }
end
context 'artifacts archive exists' do
- let(:build) { create(:ci_build, :artifacts) }
+ let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
it { is_expected.to be_falsy }
end
@@ -941,7 +941,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
describe '#available_artifacts?' do
- let(:build) { create(:ci_build) }
+ let(:build) { create(:ci_build, pipeline: pipeline) }
subject { build.available_artifacts? }
@@ -1000,7 +1000,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
subject { build.browsable_artifacts? }
context 'artifacts metadata does exists' do
- let(:build) { create(:ci_build, :artifacts) }
+ let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
it { is_expected.to be_truthy }
end
@@ -1010,13 +1010,13 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
subject { build.artifacts_public? }
context 'artifacts with defaults' do
- let(:build) { create(:ci_build, :artifacts) }
+ let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
it { is_expected.to be_truthy }
end
context 'non public artifacts' do
- let(:build) { create(:ci_build, :artifacts, :non_public_artifacts) }
+ let(:build) { create(:ci_build, :artifacts, :non_public_artifacts, pipeline: pipeline) }
it { is_expected.to be_falsey }
end
@@ -1050,7 +1050,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
context 'artifacts archive is a zip file and metadata exists' do
- let(:build) { create(:ci_build, :artifacts) }
+ let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
it { is_expected.to be_truthy }
end
@@ -1277,12 +1277,12 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
describe '#has_live_trace?' do
subject { build.has_live_trace? }
- let(:build) { create(:ci_build, :trace_live) }
+ let(:build) { create(:ci_build, :trace_live, pipeline: pipeline) }
it { is_expected.to be_truthy }
context 'when build does not have live trace' do
- let(:build) { create(:ci_build) }
+ let(:build) { create(:ci_build, pipeline: pipeline) }
it { is_expected.to be_falsy }
end
@@ -1291,12 +1291,12 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
describe '#has_archived_trace?' do
subject { build.has_archived_trace? }
- let(:build) { create(:ci_build, :trace_artifact) }
+ let(:build) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
it { is_expected.to be_truthy }
context 'when build does not have archived trace' do
- let(:build) { create(:ci_build) }
+ let(:build) { create(:ci_build, pipeline: pipeline) }
it { is_expected.to be_falsy }
end
@@ -1306,7 +1306,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
subject { build.has_job_artifacts? }
context 'when build has a job artifact' do
- let(:build) { create(:ci_build, :artifacts) }
+ let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
it { is_expected.to be_truthy }
end
@@ -1316,13 +1316,13 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
subject { build.has_test_reports? }
context 'when build has a test report' do
- let(:build) { create(:ci_build, :test_reports) }
+ let(:build) { create(:ci_build, :test_reports, pipeline: pipeline) }
it { is_expected.to be_truthy }
end
context 'when build does not have a test report' do
- let(:build) { create(:ci_build) }
+ let(:build) { create(:ci_build, pipeline: pipeline) }
it { is_expected.to be_falsey }
end
@@ -1395,7 +1395,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
with_them do
- let(:build) { create(:ci_build, trait, project: project, pipeline: pipeline) }
+ let(:build) { create(:ci_build, trait, pipeline: pipeline) }
let(:event) { state }
context "when transitioning to #{params[:state]}" do
@@ -1419,7 +1419,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
describe 'state transition as a deployable' do
subject { build.send(event) }
- let!(:build) { create(:ci_build, :with_deployment, :start_review_app, project: project, pipeline: pipeline) }
+ let!(:build) { create(:ci_build, :with_deployment, :start_review_app, pipeline: pipeline) }
let(:deployment) { build.deployment }
let(:environment) { deployment.environment }
@@ -1583,7 +1583,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
subject { build.on_stop }
context 'when a job has a specification that it can be stopped from the other job' do
- let(:build) { create(:ci_build, :start_review_app) }
+ let(:build) { create(:ci_build, :start_review_app, pipeline: pipeline) }
it 'returns the other job name' do
is_expected.to eq('stop_review_app')
@@ -1591,7 +1591,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
context 'when a job does not have environment information' do
- let(:build) { create(:ci_build) }
+ let(:build) { create(:ci_build, pipeline: pipeline) }
it 'returns nil' do
is_expected.to be_nil
@@ -1666,7 +1666,8 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
let(:build) do
create(:ci_build,
ref: 'master',
- environment: 'review/$CI_COMMIT_REF_NAME')
+ environment: 'review/$CI_COMMIT_REF_NAME',
+ pipeline: pipeline)
end
it { is_expected.to eq('review/master') }
@@ -1676,7 +1677,8 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
let(:build) do
create(:ci_build,
yaml_variables: [{ key: :APP_HOST, value: 'host' }],
- environment: 'review/$APP_HOST')
+ environment: 'review/$APP_HOST',
+ pipeline: pipeline)
end
it 'returns an expanded environment name with a list of variables' do
@@ -1698,7 +1700,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
context 'when using persisted variables' do
let(:build) do
- create(:ci_build, environment: 'review/x$CI_BUILD_ID')
+ create(:ci_build, environment: 'review/x$CI_BUILD_ID', pipeline: pipeline)
end
it { is_expected.to eq('review/x') }
@@ -1715,7 +1717,8 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
create(:ci_build,
ref: 'master',
yaml_variables: yaml_variables,
- environment: 'review/$ENVIRONMENT_NAME')
+ environment: 'review/$ENVIRONMENT_NAME',
+ pipeline: pipeline)
end
it { is_expected.to eq('review/master') }
@@ -1723,7 +1726,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
describe '#expanded_kubernetes_namespace' do
- let(:build) { create(:ci_build, environment: environment, options: options) }
+ let(:build) { create(:ci_build, environment: environment, options: options, pipeline: pipeline) }
subject { build.expanded_kubernetes_namespace }
@@ -1859,7 +1862,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
context 'build is not erasable' do
- let!(:build) { create(:ci_build) }
+ let!(:build) { create(:ci_build, pipeline: pipeline) }
describe '#erasable?' do
subject { build.erasable? }
@@ -1870,7 +1873,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
context 'build is erasable' do
context 'new artifacts' do
- let!(:build) { create(:ci_build, :test_reports, :trace_artifact, :success, :artifacts) }
+ let!(:build) { create(:ci_build, :test_reports, :trace_artifact, :success, :artifacts, pipeline: pipeline) }
describe '#erasable?' do
subject { build.erasable? }
@@ -1879,7 +1882,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
describe '#erased?' do
- let!(:build) { create(:ci_build, :trace_artifact, :success, :artifacts) }
+ let!(:build) { create(:ci_build, :trace_artifact, :success, :artifacts, pipeline: pipeline) }
subject { build.erased? }
@@ -1973,13 +1976,13 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
context 'when build is created' do
- let(:build) { create(:ci_build, :created) }
+ let(:build) { create(:ci_build, :created, pipeline: pipeline) }
it { is_expected.to be_cancelable }
end
context 'when build is waiting for resource' do
- let(:build) { create(:ci_build, :waiting_for_resource) }
+ let(:build) { create(:ci_build, :waiting_for_resource, pipeline: pipeline) }
it { is_expected.to be_cancelable }
end
@@ -2042,7 +2045,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
describe '#tag_list' do
- let_it_be(:build) { create(:ci_build, tag_list: ['tag']) }
+ let_it_be(:build) { create(:ci_build, tag_list: ['tag'], pipeline: pipeline) }
context 'when tags are preloaded' do
it 'does not trigger queries' do
@@ -2059,7 +2062,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
describe '#save_tags' do
- let(:build) { create(:ci_build, tag_list: ['tag']) }
+ let(:build) { create(:ci_build, tag_list: ['tag'], pipeline: pipeline) }
it 'saves tags' do
build.save!
@@ -2088,13 +2091,13 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
describe '#has_tags?' do
context 'when build has tags' do
- subject { create(:ci_build, tag_list: ['tag']) }
+ subject { create(:ci_build, tag_list: ['tag'], pipeline: pipeline) }
it { is_expected.to have_tags }
end
context 'when build does not have tags' do
- subject { create(:ci_build, tag_list: []) }
+ subject { create(:ci_build, tag_list: [], pipeline: pipeline) }
it { is_expected.not_to have_tags }
end
@@ -2149,9 +2152,9 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
describe '.keep_artifacts!' do
- let!(:build) { create(:ci_build, artifacts_expire_at: Time.current + 7.days) }
+ let!(:build) { create(:ci_build, artifacts_expire_at: Time.current + 7.days, pipeline: pipeline) }
let!(:builds_for_update) do
- Ci::Build.where(id: create_list(:ci_build, 3, artifacts_expire_at: Time.current + 7.days).map(&:id))
+ Ci::Build.where(id: create_list(:ci_build, 3, artifacts_expire_at: Time.current + 7.days, pipeline: pipeline).map(&:id))
end
it 'resets expire_at' do
@@ -2193,7 +2196,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
describe '#keep_artifacts!' do
- let(:build) { create(:ci_build, artifacts_expire_at: Time.current + 7.days) }
+ let(:build) { create(:ci_build, artifacts_expire_at: Time.current + 7.days, pipeline: pipeline) }
subject { build.keep_artifacts! }
@@ -2215,7 +2218,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
describe '#auto_retry_expected?' do
- subject { create(:ci_build, :failed) }
+ subject { create(:ci_build, :failed, pipeline: pipeline) }
context 'when build is failed and auto retry is configured' do
before do
@@ -2237,7 +2240,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
describe '#artifacts_file_for_type' do
- let(:build) { create(:ci_build, :artifacts) }
+ let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
let(:file_type) { :archive }
subject { build.artifacts_file_for_type(file_type) }
@@ -2250,6 +2253,8 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
describe '#merge_request' do
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+
subject { pipeline.builds.take.merge_request }
context 'on a branch pipeline' do
@@ -2294,19 +2299,23 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
context 'on a detached merged request pipeline' do
- let(:pipeline) { create(:ci_pipeline, :detached_merge_request_pipeline, :with_job) }
+ let(:pipeline) do
+ create(:ci_pipeline, :detached_merge_request_pipeline, :with_job, merge_request: merge_request)
+ end
it { is_expected.to eq(pipeline.merge_request) }
end
context 'on a legacy detached merged request pipeline' do
- let(:pipeline) { create(:ci_pipeline, :legacy_detached_merge_request_pipeline, :with_job) }
+ let(:pipeline) do
+ create(:ci_pipeline, :legacy_detached_merge_request_pipeline, :with_job, merge_request: merge_request)
+ end
it { is_expected.to eq(pipeline.merge_request) }
end
context 'on a pipeline for merged results' do
- let(:pipeline) { create(:ci_pipeline, :merged_result_pipeline, :with_job) }
+ let(:pipeline) { create(:ci_pipeline, :merged_result_pipeline, :with_job, merge_request: merge_request) }
it { is_expected.to eq(pipeline.merge_request) }
end
@@ -2342,7 +2351,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
context 'when options include artifacts:expose_as' do
- let(:build) { create(:ci_build, options: { artifacts: { expose_as: 'test' } }) }
+ let(:build) { create(:ci_build, options: { artifacts: { expose_as: 'test' } }, pipeline: pipeline) }
it 'saves the presence of expose_as into build metadata' do
expect(build.metadata).to have_exposed_artifacts
@@ -2468,56 +2477,56 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
describe '#playable?' do
context 'when build is a manual action' do
context 'when build has been skipped' do
- subject { build_stubbed(:ci_build, :manual, status: :skipped) }
+ subject { build_stubbed(:ci_build, :manual, status: :skipped, pipeline: pipeline) }
it { is_expected.not_to be_playable }
end
context 'when build has been canceled' do
- subject { build_stubbed(:ci_build, :manual, status: :canceled) }
+ subject { build_stubbed(:ci_build, :manual, status: :canceled, pipeline: pipeline) }
it { is_expected.to be_playable }
end
context 'when build is successful' do
- subject { build_stubbed(:ci_build, :manual, status: :success) }
+ subject { build_stubbed(:ci_build, :manual, status: :success, pipeline: pipeline) }
it { is_expected.to be_playable }
end
context 'when build has failed' do
- subject { build_stubbed(:ci_build, :manual, status: :failed) }
+ subject { build_stubbed(:ci_build, :manual, status: :failed, pipeline: pipeline) }
it { is_expected.to be_playable }
end
context 'when build is a manual untriggered action' do
- subject { build_stubbed(:ci_build, :manual, status: :manual) }
+ subject { build_stubbed(:ci_build, :manual, status: :manual, pipeline: pipeline) }
it { is_expected.to be_playable }
end
context 'when build is a manual and degenerated' do
- subject { build_stubbed(:ci_build, :manual, :degenerated, status: :manual) }
+ subject { build_stubbed(:ci_build, :manual, :degenerated, status: :manual, pipeline: pipeline) }
it { is_expected.not_to be_playable }
end
end
context 'when build is scheduled' do
- subject { build_stubbed(:ci_build, :scheduled) }
+ subject { build_stubbed(:ci_build, :scheduled, pipeline: pipeline) }
it { is_expected.to be_playable }
end
context 'when build is not a manual action' do
- subject { build_stubbed(:ci_build, :success) }
+ subject { build_stubbed(:ci_build, :success, pipeline: pipeline) }
it { is_expected.not_to be_playable }
end
context 'when build is waiting for deployment approval' do
- subject { build_stubbed(:ci_build, :manual, environment: 'production') }
+ subject { build_stubbed(:ci_build, :manual, environment: 'production', pipeline: pipeline) }
before do
create(:deployment, :blocked, deployable: subject)
@@ -3767,7 +3776,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
describe '#any_unmet_prerequisites?' do
- let(:build) { create(:ci_build, :created) }
+ let(:build) { create(:ci_build, :created, pipeline: pipeline) }
subject { build.any_unmet_prerequisites? }
@@ -3854,7 +3863,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
describe 'state transition: any => [:preparing]' do
- let(:build) { create(:ci_build, :created) }
+ let(:build) { create(:ci_build, :created, pipeline: pipeline) }
before do
allow(build).to receive(:prerequisites).and_return([double])
@@ -3868,7 +3877,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
describe 'when the build is waiting for deployment approval' do
- let(:build) { create(:ci_build, :manual, environment: 'production') }
+ let(:build) { create(:ci_build, :manual, environment: 'production', pipeline: pipeline) }
before do
create(:deployment, :blocked, deployable: build)
@@ -3880,7 +3889,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
describe 'state transition: any => [:pending]' do
- let(:build) { create(:ci_build, :created) }
+ let(:build) { create(:ci_build, :created, pipeline: pipeline) }
it 'queues BuildQueueWorker' do
expect(BuildQueueWorker).to receive(:perform_async).with(build.id)
@@ -3900,8 +3909,10 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
describe 'state transition: pending: :running' do
- let(:runner) { create(:ci_runner) }
- let(:job) { create(:ci_build, :pending, runner: runner) }
+ let_it_be_with_reload(:runner) { create(:ci_runner) }
+ let_it_be_with_reload(:pipeline) { create(:ci_pipeline, project: project) }
+
+ let(:job) { create(:ci_build, :pending, runner: runner, pipeline: pipeline) }
before do
job.project.update_attribute(:build_timeout, 1800)
@@ -4005,7 +4016,9 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
context 'when artifacts of depended job has been erased' do
- let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0, erased_at: 1.minute.ago) }
+ let!(:pre_stage_job) do
+ create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0, erased_at: 1.minute.ago)
+ end
it { expect(job).not_to have_valid_build_dependencies }
end
@@ -4062,7 +4075,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
context 'when build is configured to be retried' do
- subject { create(:ci_build, :running, options: { script: ["ls -al"], retry: 3 }, project: project, user: user) }
+ subject { create(:ci_build, :running, options: { script: ["ls -al"], retry: 3 }, pipeline: pipeline, user: user) }
it 'retries build and assigns the same user to it' do
expect_next_instance_of(::Ci::RetryJobService) do |service|
@@ -4111,7 +4124,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
context 'when build is not configured to be retried' do
- subject { create(:ci_build, :running, project: project, user: user, pipeline: pipeline) }
+ subject { create(:ci_build, :running, pipeline: pipeline, user: user) }
let(:pipeline) do
create(:ci_pipeline,
@@ -4175,7 +4188,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
describe '.matches_tag_ids' do
- let_it_be(:build, reload: true) { create(:ci_build, project: project, user: user) }
+ let_it_be(:build, reload: true) { create(:ci_build, pipeline: pipeline, user: user) }
let(:tag_ids) { ::ActsAsTaggableOn::Tag.named_any(tag_list).ids }
@@ -4223,7 +4236,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
describe '.matches_tags' do
- let_it_be(:build, reload: true) { create(:ci_build, project: project, user: user) }
+ let_it_be(:build, reload: true) { create(:ci_build, pipeline: pipeline, user: user) }
subject { described_class.where(id: build).with_any_tags }
@@ -4249,7 +4262,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
describe 'pages deployments' do
- let_it_be(:build, reload: true) { create(:ci_build, project: project, user: user) }
+ let_it_be(:build, reload: true) { create(:ci_build, pipeline: pipeline, user: user) }
context 'when job is "pages"' do
before do
@@ -4575,7 +4588,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
describe '#artifacts_metadata_entry' do
- let_it_be(:build) { create(:ci_build, project: project) }
+ let_it_be(:build) { create(:ci_build, pipeline: pipeline) }
let(:path) { 'other_artifacts_0.1.2/another-subdirectory/banana_sample.gif' }
@@ -4635,7 +4648,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
describe '#publishes_artifacts_reports?' do
- let(:build) { create(:ci_build, options: options) }
+ let(:build) { create(:ci_build, options: options, pipeline: pipeline) }
subject { build.publishes_artifacts_reports? }
@@ -4663,7 +4676,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
describe '#runner_required_feature_names' do
- let(:build) { create(:ci_build, options: options) }
+ let(:build) { create(:ci_build, options: options, pipeline: pipeline) }
subject { build.runner_required_feature_names }
@@ -4685,7 +4698,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
describe '#supported_runner?' do
- let_it_be_with_refind(:build) { create(:ci_build) }
+ let_it_be_with_refind(:build) { create(:ci_build, pipeline: pipeline) }
subject { build.supported_runner?(runner_features) }
@@ -4793,7 +4806,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
context 'when build is a last deployment' do
- let(:build) { create(:ci_build, :success, environment: 'production', pipeline: pipeline, project: project) }
+ let(:build) { create(:ci_build, :success, environment: 'production', pipeline: pipeline) }
let(:environment) { create(:environment, name: 'production', project: build.project) }
let!(:deployment) { create(:deployment, :success, environment: environment, project: environment.project, deployable: build) }
@@ -4801,7 +4814,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
context 'when there is a newer build with deployment' do
- let(:build) { create(:ci_build, :success, environment: 'production', pipeline: pipeline, project: project) }
+ let(:build) { create(:ci_build, :success, environment: 'production', pipeline: pipeline) }
let(:environment) { create(:environment, name: 'production', project: build.project) }
let!(:deployment) { create(:deployment, :success, environment: environment, project: environment.project, deployable: build) }
let!(:last_deployment) { create(:deployment, :success, environment: environment, project: environment.project) }
@@ -4810,7 +4823,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
context 'when build with deployment has failed' do
- let(:build) { create(:ci_build, :failed, environment: 'production', pipeline: pipeline, project: project) }
+ let(:build) { create(:ci_build, :failed, environment: 'production', pipeline: pipeline) }
let(:environment) { create(:environment, name: 'production', project: build.project) }
let!(:deployment) { create(:deployment, :success, environment: environment, project: environment.project, deployable: build) }
@@ -4818,7 +4831,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
context 'when build with deployment is running' do
- let(:build) { create(:ci_build, environment: 'production', pipeline: pipeline, project: project) }
+ let(:build) { create(:ci_build, environment: 'production', pipeline: pipeline) }
let(:environment) { create(:environment, name: 'production', project: build.project) }
let!(:deployment) { create(:deployment, :success, environment: environment, project: environment.project, deployable: build) }
@@ -4828,13 +4841,13 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
describe '#degenerated?' do
context 'when build is degenerated' do
- subject { create(:ci_build, :degenerated) }
+ subject { create(:ci_build, :degenerated, pipeline: pipeline) }
it { is_expected.to be_degenerated }
end
context 'when build is valid' do
- subject { create(:ci_build) }
+ subject { create(:ci_build, pipeline: pipeline) }
it { is_expected.not_to be_degenerated }
@@ -4849,7 +4862,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
describe 'degenerate!' do
- let(:build) { create(:ci_build) }
+ let(:build) { create(:ci_build, pipeline: pipeline) }
subject { build.degenerate! }
@@ -4869,13 +4882,13 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
describe '#archived?' do
context 'when build is degenerated' do
- subject { create(:ci_build, :degenerated) }
+ subject { create(:ci_build, :degenerated, pipeline: pipeline) }
it { is_expected.to be_archived }
end
context 'for old build' do
- subject { create(:ci_build, created_at: 1.day.ago) }
+ subject { create(:ci_build, created_at: 1.day.ago, pipeline: pipeline) }
context 'when archive_builds_in is set' do
before do
@@ -4896,7 +4909,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
describe '#read_metadata_attribute' do
- let(:build) { create(:ci_build, :degenerated) }
+ let(:build) { create(:ci_build, :degenerated, pipeline: pipeline) }
let(:build_options) { { key: "build" } }
let(:metadata_options) { { key: "metadata" } }
let(:default_options) { { key: "default" } }
@@ -4933,7 +4946,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
describe '#write_metadata_attribute' do
- let(:build) { create(:ci_build, :degenerated) }
+ let(:build) { create(:ci_build, :degenerated, pipeline: pipeline) }
let(:options) { { key: "new options" } }
let(:existing_options) { { key: "existing options" } }
@@ -5059,13 +5072,15 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
subject { build.environment_auto_stop_in }
context 'when build option has environment auto_stop_in' do
- let(:build) { create(:ci_build, options: { environment: { name: 'test', auto_stop_in: '1 day' } }) }
+ let(:build) do
+ create(:ci_build, options: { environment: { name: 'test', auto_stop_in: '1 day' } }, pipeline: pipeline)
+ end
it { is_expected.to eq('1 day') }
end
context 'when build option does not have environment auto_stop_in' do
- let(:build) { create(:ci_build) }
+ let(:build) { create(:ci_build, pipeline: pipeline) }
it { is_expected.to be_nil }
end
@@ -5385,7 +5400,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
describe '.build_matchers' do
- let_it_be(:pipeline) { create(:ci_pipeline, :protected) }
+ let_it_be(:pipeline) { create(:ci_pipeline, :protected, project: project) }
subject(:matchers) { pipeline.builds.build_matchers(pipeline.project) }
@@ -5434,7 +5449,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
describe '#build_matcher' do
let_it_be(:build) do
- build_stubbed(:ci_build, tag_list: %w[tag1 tag2])
+ build_stubbed(:ci_build, tag_list: %w[tag1 tag2], pipeline: pipeline)
end
subject(:matcher) { build.build_matcher }
@@ -5570,7 +5585,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
it 'does not generate cross DB queries when a record is created via FactoryBot' do
with_cross_database_modification_prevented do
- create(:ci_build)
+ create(:ci_build, pipeline: pipeline)
end
end
@@ -5598,7 +5613,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
it_behaves_like 'cleanup by a loose foreign key' do
- let!(:model) { create(:ci_build, user: create(:user)) }
+ let!(:model) { create(:ci_build, user: create(:user), pipeline: pipeline) }
let!(:parent) { model.user }
end
@@ -5608,7 +5623,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
context 'when given new job variables' do
context 'when the cloned build has an action' do
it 'applies the new job variables' do
- build = create(:ci_build, :actionable)
+ build = create(:ci_build, :actionable, pipeline: pipeline)
create(:ci_job_variable, job: build, key: 'TEST_KEY', value: 'old value')
create(:ci_job_variable, job: build, key: 'OLD_KEY', value: 'i will not live for long')
@@ -5627,7 +5642,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
context 'when the cloned build does not have an action' do
it 'applies the old job variables' do
- build = create(:ci_build)
+ build = create(:ci_build, pipeline: pipeline)
create(:ci_job_variable, job: build, key: 'TEST_KEY', value: 'old value')
new_build = build.clone(
@@ -5645,7 +5660,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
context 'when not given new job variables' do
it 'applies the old job variables' do
- build = create(:ci_build)
+ build = create(:ci_build, pipeline: pipeline)
create(:ci_job_variable, job: build, key: 'TEST_KEY', value: 'old value')
new_build = build.clone(current_user: user)
@@ -5659,14 +5674,14 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
describe '#test_suite_name' do
- let(:build) { create(:ci_build, name: 'test') }
+ let(:build) { create(:ci_build, name: 'test', pipeline: pipeline) }
it 'uses the group name for test suite name' do
expect(build.test_suite_name).to eq('test')
end
context 'when build is part of parallel build' do
- let(:build) { create(:ci_build, name: 'build 1/2') }
+ let(:build) { create(:ci_build, name: 'build 1/2', pipeline: pipeline) }
it 'uses the group name for test suite name' do
expect(build.test_suite_name).to eq('build')
@@ -5674,7 +5689,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
context 'when build is part of matrix build' do
- let!(:matrix_build) { create(:ci_build, :matrix) }
+ let!(:matrix_build) { create(:ci_build, :matrix, pipeline: pipeline) }
it 'uses the job name for the test suite' do
expect(matrix_build.test_suite_name).to eq(matrix_build.name)
@@ -5685,7 +5700,8 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
describe '#runtime_hooks' do
let(:build1) do
FactoryBot.build(:ci_build,
- options: { hooks: { pre_get_sources_script: ["echo 'hello pre_get_sources_script'"] } })
+ options: { hooks: { pre_get_sources_script: ["echo 'hello pre_get_sources_script'"] } },
+ pipeline: pipeline)
end
subject(:runtime_hooks) { build1.runtime_hooks }
@@ -5700,7 +5716,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
describe 'partitioning', :ci_partitionable do
include Ci::PartitioningHelpers
- let(:new_pipeline) { create(:ci_pipeline) }
+ let(:new_pipeline) { create(:ci_pipeline, project: project) }
let(:ci_build) { FactoryBot.build(:ci_build, pipeline: new_pipeline) }
before do
@@ -5724,7 +5740,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
describe 'assigning token', :ci_partitionable do
include Ci::PartitioningHelpers
- let(:new_pipeline) { create(:ci_pipeline) }
+ let(:new_pipeline) { create(:ci_pipeline, project: project) }
let(:ci_build) { create(:ci_build, pipeline: new_pipeline) }
before do
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index 681f1b57a1a..65dc1a71829 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -1931,7 +1931,7 @@ RSpec.describe Ci::Runner, feature_category: :runner do
end
end
- describe '#with_upgrade_status' do
+ describe '.with_upgrade_status' do
subject { described_class.with_upgrade_status(upgrade_status) }
let_it_be(:runner_14_0_0) { create(:ci_runner, version: '14.0.0') }
@@ -1975,4 +1975,20 @@ RSpec.describe Ci::Runner, feature_category: :runner do
end
end
end
+
+ describe '#created_via_ui?' do
+ subject(:created_via_ui) { runner.created_via_ui? }
+
+ context 'when runner registered from command line' do
+ let(:runner) { create(:ci_runner) }
+
+ it { is_expected.to eq false }
+ end
+
+ context 'when runner created via UI' do
+ let(:runner) { create(:ci_runner, :created_in_ui) }
+
+ it { is_expected.to eq true }
+ end
+ end
end
diff --git a/spec/policies/ci/runner_policy_spec.rb b/spec/policies/ci/runner_policy_spec.rb
index 6039d60ec2f..e0a9e3c2870 100644
--- a/spec/policies/ci/runner_policy_spec.rb
+++ b/spec/policies/ci/runner_policy_spec.rb
@@ -3,11 +3,12 @@
require 'spec_helper'
RSpec.describe Ci::RunnerPolicy, feature_category: :runner do
+ let_it_be(:owner) { create(:user) }
+
describe 'ability :read_runner' do
let_it_be(:guest) { create(:user) }
let_it_be(:developer) { create(:user) }
let_it_be(:maintainer) { create(:user) }
- let_it_be(:owner) { create(:user) }
let_it_be_with_reload(:group) { create(:group, name: 'top-level', path: 'top-level') }
let_it_be_with_reload(:subgroup) { create(:group, name: 'subgroup', path: 'subgroup', parent: group) }
@@ -170,4 +171,24 @@ RSpec.describe Ci::RunnerPolicy, feature_category: :runner do
end
end
end
+
+ describe 'ability :read_ephemeral_token' do
+ subject(:policy) { described_class.new(user, runner) }
+
+ let_it_be(:runner) { create(:ci_runner, creator: owner) }
+
+ let(:creator) { owner }
+
+ context 'with request made by creator' do
+ let(:user) { creator }
+
+ it { expect_allowed :read_ephemeral_token }
+ end
+
+ context 'with request made by another user' do
+ let(:user) { create(:admin) }
+
+ it { expect_disallowed :read_ephemeral_token }
+ end
+ end
end
diff --git a/spec/requests/api/graphql/ci/runner_spec.rb b/spec/requests/api/graphql/ci/runner_spec.rb
index 1d40d8291b5..6c5d6b0010d 100644
--- a/spec/requests/api/graphql/ci/runner_spec.rb
+++ b/spec/requests/api/graphql/ci/runner_spec.rb
@@ -92,6 +92,7 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do
run_untagged: runner.run_untagged,
ip_address: runner.ip_address,
runner_type: runner.instance_type? ? 'INSTANCE_TYPE' : 'PROJECT_TYPE',
+ ephemeral_authentication_token: nil,
executor_name: runner.executor_type&.dasherize,
architecture_name: runner.architecture,
platform_name: runner.platform,
@@ -518,6 +519,108 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do
end
end
+ describe 'ephemeralAuthenticationToken', :freeze_time do
+ subject(:request) { post_graphql(query, current_user: user) }
+
+ let_it_be(:creator) { create(:user) }
+
+ let(:created_at) { Time.current }
+ let(:token_prefix) { '' }
+ let(:query) do
+ %(
+ query {
+ runner(id: "#{runner.to_global_id}") {
+ id
+ ephemeralAuthenticationToken
+ }
+ }
+ )
+ end
+
+ let(:runner) do
+ create(:ci_runner, :group,
+ groups: [group], creator: creator, created_at: created_at, token: "#{token_prefix}abc123")
+ end
+
+ before_all do
+ group.add_owner(creator) # Allow creating runners in the group
+ end
+
+ shared_examples 'an ephemeral_authentication_token' do
+ it 'returns token in ephemeral_authentication_token field' do
+ request
+
+ runner_data = graphql_data_at(:runner)
+ expect(runner_data).not_to be_nil
+ expect(runner_data).to match a_graphql_entity_for(runner, ephemeral_authentication_token: runner.token)
+ end
+ end
+
+ shared_examples 'a protected ephemeral_authentication_token' do
+ it 'returns nil ephemeral_authentication_token' do
+ request
+
+ runner_data = graphql_data_at(:runner)
+ expect(runner_data).not_to be_nil
+ expect(runner_data).to match a_graphql_entity_for(runner, ephemeral_authentication_token: nil)
+ end
+ end
+
+ context 'with request made by creator' do
+ let(:user) { creator }
+
+ context 'with runner created in UI' do
+ let(:token_prefix) { ::Ci::Runner::CREATED_RUNNER_TOKEN_PREFIX }
+
+ context 'with runner created in last 3 hours' do
+ let(:created_at) { (3.hours - 1.second).ago }
+
+ context 'with no runner machine registed yet' do
+ it_behaves_like 'an ephemeral_authentication_token'
+ end
+
+ context 'with first runner machine already registed' do
+ let!(:runner_machine) { create(:ci_runner_machine, runner: runner) }
+
+ it_behaves_like 'a protected ephemeral_authentication_token'
+ end
+ end
+
+ context 'with runner created almost too long ago' do
+ let(:created_at) { (3.hours - 1.second).ago }
+
+ it_behaves_like 'an ephemeral_authentication_token'
+ end
+
+ context 'with runner created too long ago' do
+ let(:created_at) { 3.hours.ago }
+
+ it_behaves_like 'a protected ephemeral_authentication_token'
+ end
+ end
+
+ context 'with runner registered from command line' do
+ let(:token_prefix) { '' }
+
+ context 'with runner created in last 3 hours' do
+ let(:created_at) { (3.hours - 1.second).ago }
+
+ it_behaves_like 'a protected ephemeral_authentication_token'
+ end
+ end
+ end
+
+ context 'when request is made by non-creator of the runner' do
+ let(:user) { create(:admin) }
+
+ context 'with runner created in UI' do
+ let(:token_prefix) { ::Ci::Runner::CREATED_RUNNER_TOKEN_PREFIX }
+
+ it_behaves_like 'a protected ephemeral_authentication_token'
+ end
+ end
+ end
+
describe 'Query limits' do
def runner_query(runner)
<<~SINGLE
diff --git a/spec/scripts/failed_tests_spec.rb b/spec/scripts/failed_tests_spec.rb
index b99fd991c55..ce0ec66cdb6 100644
--- a/spec/scripts/failed_tests_spec.rb
+++ b/spec/scripts/failed_tests_spec.rb
@@ -5,121 +5,113 @@ require_relative '../../scripts/failed_tests'
RSpec.describe FailedTests do
let(:report_file) { 'spec/fixtures/scripts/test_report.json' }
- let(:output_directory) { 'tmp/previous_test_results' }
- let(:rspec_pg_regex) { /rspec .+ pg12( .+)?/ }
- let(:rspec_ee_pg_regex) { /rspec-ee .+ pg12( .+)?/ }
-
- subject { described_class.new(previous_tests_report_path: report_file, output_directory: output_directory, rspec_pg_regex: rspec_pg_regex, rspec_ee_pg_regex: rspec_ee_pg_regex) }
-
- describe '#output_failed_test_files' do
- it 'writes the file for the suite' do
- expect(File).to receive(:open).with(File.join(output_directory, "rspec_failed_files.txt"), 'w').once
-
- subject.output_failed_test_files
- end
- end
-
- describe '#failed_files_for_suite_collection' do
- let(:failure_path) { 'path/to/fail_file_spec.rb' }
- let(:other_failure_path) { 'path/to/fail_file_spec_2.rb' }
- let(:file_contents_as_json) do
- {
- 'suites' => [
- {
- 'failed_count' => 1,
- 'name' => 'rspec unit pg12 10/12',
- 'test_cases' => [
- {
- 'status' => 'failed',
- 'file' => failure_path
- }
- ]
- },
- {
- 'failed_count' => 1,
- 'name' => 'rspec-ee unit pg12',
- 'test_cases' => [
- {
- 'status' => 'failed',
- 'file' => failure_path
- }
- ]
- },
- {
- 'failed_count' => 1,
- 'name' => 'rspec unit pg13 10/12',
- 'test_cases' => [
- {
- 'status' => 'failed',
- 'file' => other_failure_path
- }
- ]
- }
- ]
- }
- end
-
- before do
- allow(subject).to receive(:file_contents_as_json).and_return(file_contents_as_json)
- end
-
- it 'returns a list of failed file paths for suite collection' do
- result = subject.failed_files_for_suite_collection
-
- expect(result[:rspec].to_a).to match_array(failure_path)
- expect(result[:rspec_ee].to_a).to match_array(failure_path)
- end
+ let(:options) { described_class::DEFAULT_OPTIONS.merge(previous_tests_report_path: report_file) }
+ let(:failure_path) { 'path/to/fail_file_spec.rb' }
+ let(:other_failure_path) { 'path/to/fail_file_spec_2.rb' }
+ let(:file_contents_as_json) do
+ {
+ 'suites' => [
+ {
+ 'failed_count' => 1,
+ 'name' => 'rspec unit pg12 10/12',
+ 'test_cases' => [
+ {
+ 'status' => 'failed',
+ 'file' => failure_path
+ }
+ ]
+ },
+ {
+ 'failed_count' => 1,
+ 'name' => 'rspec-ee unit pg12',
+ 'test_cases' => [
+ {
+ 'status' => 'failed',
+ 'file' => failure_path
+ }
+ ]
+ },
+ {
+ 'failed_count' => 1,
+ 'name' => 'rspec unit pg13 10/12',
+ 'test_cases' => [
+ {
+ 'status' => 'failed',
+ 'file' => other_failure_path
+ }
+ ]
+ }
+ ]
+ }
end
- describe 'empty report' do
- let(:file_content) do
- '{}'
- end
-
- before do
- allow(subject).to receive(:file_contents).and_return(file_content)
- end
-
- it 'does not fail for output files' do
- subject.output_failed_test_files
- end
-
- it 'returns empty results for suite failures' do
- result = subject.failed_files_for_suite_collection
-
- expect(result.values.flatten).to be_empty
- end
- end
-
- describe 'invalid report' do
- let(:file_content) do
- ''
- end
-
- before do
- allow(subject).to receive(:file_contents).and_return(file_content)
- end
-
- it 'does not fail for output files' do
- subject.output_failed_test_files
- end
-
- it 'returns empty results for suite failures' do
- result = subject.failed_files_for_suite_collection
-
- expect(result.values.flatten).to be_empty
+ subject { described_class.new(options) }
+
+ describe '#output_failed_tests' do
+ context 'with a valid report file' do
+ before do
+ allow(subject).to receive(:file_contents_as_json).and_return(file_contents_as_json)
+ end
+
+ it 'writes the file for the suite' do
+ expect(File).to receive(:open)
+ .with(File.join(described_class::DEFAULT_OPTIONS[:output_directory], "rspec_failed_tests.txt"), 'w').once
+ expect(File).to receive(:open)
+ .with(File.join(described_class::DEFAULT_OPTIONS[:output_directory], "rspec_ee_failed_tests.txt"), 'w').once
+
+ subject.output_failed_tests
+ end
+
+ context 'when given a valid format' do
+ subject { described_class.new(options.merge(format: :json)) }
+
+ it 'writes the file for the suite' do
+ expect(File).to receive(:open)
+ .with(File.join(described_class::DEFAULT_OPTIONS[:output_directory], "rspec_failed_tests.json"), 'w').once
+ expect(File).to receive(:open)
+ .with(File.join(described_class::DEFAULT_OPTIONS[:output_directory], "rspec_ee_failed_tests.json"), 'w')
+ .once
+
+ subject.output_failed_tests
+ end
+ end
+
+ context 'when given an invalid format' do
+ subject { described_class.new(options.merge(format: :foo)) }
+
+ it 'raises an exception' do
+ expect { subject.output_failed_tests }
+ .to raise_error '[FailedTests] Unsupported format `foo` (allowed formats: `oneline` and `json`)!'
+ end
+ end
+
+ describe 'empty report' do
+ let(:file_contents_as_json) do
+ {}
+ end
+
+ it 'does not fail for output files' do
+ subject.output_failed_tests
+ end
+
+ it 'returns empty results for suite failures' do
+ result = subject.failed_cases_for_suite_collection
+
+ expect(result.values.flatten).to be_empty
+ end
+ end
end
end
describe 'missing report file' do
- let(:report_file) { 'unknownfile.json' }
+ subject { described_class.new(options.merge(previous_tests_report_path: 'unknownfile.json')) }
it 'does not fail for output files' do
- subject.output_failed_test_files
+ subject.output_failed_tests
end
it 'returns empty results for suite failures' do
- result = subject.failed_files_for_suite_collection
+ result = subject.failed_cases_for_suite_collection
expect(result.values.flatten).to be_empty
end
diff --git a/spec/scripts/pipeline_test_report_builder_spec.rb b/spec/scripts/pipeline_test_report_builder_spec.rb
index b51b4dc4887..e7529eb0d41 100644
--- a/spec/scripts/pipeline_test_report_builder_spec.rb
+++ b/spec/scripts/pipeline_test_report_builder_spec.rb
@@ -3,12 +3,11 @@
require 'fast_spec_helper'
require_relative '../../scripts/pipeline_test_report_builder'
-RSpec.describe PipelineTestReportBuilder do
+RSpec.describe PipelineTestReportBuilder, feature_category: :tooling do
let(:report_file) { 'spec/fixtures/scripts/test_report.json' }
let(:output_file_path) { 'tmp/previous_test_results/output_file.json' }
-
- subject do
- described_class.new(
+ let(:options) do
+ described_class::DEFAULT_OPTIONS.merge(
target_project: 'gitlab-org/gitlab',
mr_id: '999',
instance_base_url: 'https://gitlab.com',
@@ -16,25 +15,27 @@ RSpec.describe PipelineTestReportBuilder do
)
end
- let(:failed_pipeline_url) { 'pipeline2_url' }
+ let(:previous_pipeline_url) { '/pipelines/previous' }
- let(:failed_pipeline) do
+ let(:previous_pipeline) do
{
'status' => 'failed',
- 'created_at' => (DateTime.now - 5).to_s,
- 'web_url' => failed_pipeline_url
+ 'id' => 1,
+ 'web_url' => previous_pipeline_url
}
end
- let(:current_pipeline) do
+ let(:latest_pipeline_url) { '/pipelines/latest' }
+
+ let(:latest_pipeline) do
{
'status' => 'running',
- 'created_at' => DateTime.now.to_s,
- 'web_url' => 'pipeline1_url'
+ 'id' => 3,
+ 'web_url' => latest_pipeline_url
}
end
- let(:mr_pipelines) { [current_pipeline, failed_pipeline] }
+ let(:mr_pipelines) { [latest_pipeline, previous_pipeline] }
let(:failed_build_id) { 9999 }
@@ -68,6 +69,8 @@ RSpec.describe PipelineTestReportBuilder do
}
end
+ subject { described_class.new(options) }
+
before do
allow(subject).to receive(:pipelines_for_mr).and_return(mr_pipelines)
allow(subject).to receive(:failed_builds_for_pipeline).and_return(failed_builds_for_pipeline)
@@ -78,7 +81,7 @@ RSpec.describe PipelineTestReportBuilder do
let(:fork_pipeline) do
{
'status' => 'failed',
- 'created_at' => (DateTime.now - 5).to_s,
+ 'id' => 2,
'web_url' => fork_pipeline_url
}
end
@@ -88,7 +91,7 @@ RSpec.describe PipelineTestReportBuilder do
end
context 'pipeline in a fork project' do
- let(:mr_pipelines) { [current_pipeline, fork_pipeline] }
+ let(:mr_pipelines) { [latest_pipeline, fork_pipeline] }
it 'returns fork pipeline' do
expect(subject.previous_pipeline).to eq(fork_pipeline)
@@ -97,125 +100,105 @@ RSpec.describe PipelineTestReportBuilder do
context 'pipeline in target project' do
it 'returns failed pipeline' do
- expect(subject.previous_pipeline).to eq(failed_pipeline)
+ expect(subject.previous_pipeline).to eq(previous_pipeline)
end
end
end
- describe '#test_report_for_latest_pipeline' do
- let(:failed_build_uri) { "#{failed_pipeline_url}/tests/suite.json?build_ids[]=#{failed_build_id}" }
-
- before do
- allow(subject).to receive(:fetch).with(failed_build_uri).and_return(failed_builds_for_pipeline)
- end
-
- it 'fetches builds from pipeline related to MR' do
- expected = { "suites" => [failed_builds_for_pipeline] }.to_json
- expect(subject.test_report_for_latest_pipeline).to eq(expected)
- end
-
- context 'canonical pipeline' do
- context 'no previous pipeline' do
- let(:mr_pipelines) { [] }
+ describe '#test_report_for_pipeline' do
+ context 'for previous pipeline' do
+ let(:failed_build_uri) { "#{previous_pipeline_url}/tests/suite.json?build_ids[]=#{failed_build_id}" }
- it 'returns empty hash' do
- expect(subject.test_report_for_latest_pipeline).to eq("{}")
- end
+ before do
+ allow(subject).to receive(:fetch).with(failed_build_uri).and_return(test_report_for_build)
end
- context 'first pipeline scenario' do
- let(:mr_pipelines) do
- [
- {
- 'status' => 'running',
- 'created_at' => DateTime.now.to_s
- }
- ]
- end
-
- it 'returns empty hash' do
- expect(subject.test_report_for_latest_pipeline).to eq("{}")
- end
+ it 'fetches builds from pipeline related to MR' do
+ expected = { "suites" => [test_report_for_build.merge('job_url' => "/jobs/#{failed_build_id}")] }.to_json
+ expect(subject.test_report_for_pipeline).to eq(expected)
end
- context 'no previous failed pipeline' do
- let(:mr_pipelines) do
- [
- {
- 'status' => 'running',
- 'created_at' => DateTime.now.to_s
- },
- {
- 'status' => 'success',
- 'created_at' => (DateTime.now - 5).to_s
- }
- ]
- end
+ context 'canonical pipeline' do
+ context 'no previous pipeline' do
+ let(:mr_pipelines) { [] }
- it 'returns empty hash' do
- expect(subject.test_report_for_latest_pipeline).to eq("{}")
+ it 'returns empty hash' do
+ expect(subject.test_report_for_pipeline).to eq("{}")
+ end
end
- end
- context 'no failed test builds' do
- let(:failed_builds_for_pipeline) do
- [
- {
- 'id' => 9999,
- 'stage' => 'prepare'
- }
- ]
- end
+ context 'no failed test builds' do
+ let(:failed_builds_for_pipeline) do
+ [
+ {
+ 'id' => 9999,
+ 'stage' => 'prepare'
+ }
+ ]
+ end
- it 'returns empty hash' do
- expect(subject.test_report_for_latest_pipeline).to eq("{}")
+ it 'returns a hash with an empty "suites" array' do
+ expect(subject.test_report_for_pipeline).to eq({ suites: [] }.to_json)
+ end
end
- end
- context 'failed pipeline and failed test builds' do
- before do
- allow(subject).to receive(:fetch).with(failed_build_uri).and_return(test_report_for_build)
- end
+ context 'failed pipeline and failed test builds' do
+ before do
+ allow(subject).to receive(:fetch).with(failed_build_uri).and_return(test_report_for_build)
+ end
- it 'returns populated test list for suites' do
- actual = subject.test_report_for_latest_pipeline
- expected = {
- 'suites' => [test_report_for_build]
- }.to_json
+ it 'returns populated test list for suites' do
+ actual = subject.test_report_for_pipeline
+ expected = {
+ 'suites' => [test_report_for_build]
+ }.to_json
- expect(actual).to eq(expected)
+ expect(actual).to eq(expected)
+ end
end
- end
- context 'when receiving a server error' do
- let(:response) { instance_double('Net::HTTPResponse') }
- let(:error) { Net::HTTPServerException.new('server error', response) }
- let(:test_report_for_latest_pipeline) { subject.test_report_for_latest_pipeline }
+ context 'when receiving a server error' do
+ let(:response) { instance_double('Net::HTTPResponse') }
+ let(:error) { Net::HTTPServerException.new('server error', response) }
+ let(:test_report_for_pipeline) { subject.test_report_for_pipeline }
- before do
- allow(response).to receive(:code).and_return(response_code)
- allow(subject).to receive(:fetch).with(failed_build_uri).and_raise(error)
- end
+ before do
+ allow(response).to receive(:code).and_return(response_code)
+ allow(subject).to receive(:fetch).with(failed_build_uri).and_raise(error)
+ end
- context 'when response code is 404' do
- let(:response_code) { 404 }
+ context 'when response code is 404' do
+ let(:response_code) { 404 }
- it 'continues without the missing reports' do
- expected = { 'suites' => [] }.to_json
+ it 'continues without the missing reports' do
+ expected = { suites: [] }.to_json
- expect { test_report_for_latest_pipeline }.not_to raise_error
- expect(test_report_for_latest_pipeline).to eq(expected)
+ expect { test_report_for_pipeline }.not_to raise_error
+ expect(test_report_for_pipeline).to eq(expected)
+ end
end
- end
- context 'when response code is unexpected' do
- let(:response_code) { 500 }
+ context 'when response code is unexpected' do
+ let(:response_code) { 500 }
- it 'raises HTTPServerException' do
- expect { test_report_for_latest_pipeline }.to raise_error(error)
+ it 'raises HTTPServerException' do
+ expect { test_report_for_pipeline }.to raise_error(error)
+ end
end
end
end
end
+
+ context 'for latest pipeline' do
+ let(:failed_build_uri) { "#{latest_pipeline_url}/tests/suite.json?build_ids[]=#{failed_build_id}" }
+
+ subject { described_class.new(options.merge(pipeline_index: :latest)) }
+
+ it 'fetches builds from pipeline related to MR' do
+ expect(subject).to receive(:fetch).with(failed_build_uri).and_return(test_report_for_build)
+
+ subject.test_report_for_pipeline
+ end
+ end
end
end
diff --git a/spec/tooling/lib/tooling/mappings/base_spec.rb b/spec/tooling/lib/tooling/mappings/base_spec.rb
new file mode 100644
index 00000000000..935f833fa8b
--- /dev/null
+++ b/spec/tooling/lib/tooling/mappings/base_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require_relative '../../../../../tooling/lib/tooling/mappings/view_to_js_mappings'
+
+RSpec.describe Tooling::Mappings::Base, feature_category: :tooling do
+ describe '#folders_for_available_editions' do
+ let(:base_folder_path) { 'app/views' }
+
+ subject { described_class.new.folders_for_available_editions(base_folder_path) }
+
+ context 'when FOSS' do
+ before do
+ allow(GitlabEdition).to receive(:ee?).and_return(false)
+ allow(GitlabEdition).to receive(:jh?).and_return(false)
+ end
+
+ it 'returns the correct paths' do
+ expect(subject).to match_array([base_folder_path])
+ end
+ end
+
+ context 'when EE' do
+ before do
+ allow(GitlabEdition).to receive(:ee?).and_return(true)
+ allow(GitlabEdition).to receive(:jh?).and_return(false)
+ end
+
+ it 'returns the correct paths' do
+ expect(subject).to match_array([base_folder_path, "ee/#{base_folder_path}"])
+ end
+ end
+
+ context 'when JiHu' do
+ before do
+ allow(GitlabEdition).to receive(:ee?).and_return(true)
+ allow(GitlabEdition).to receive(:jh?).and_return(true)
+ end
+
+ it 'returns the correct paths' do
+ expect(subject).to match_array([base_folder_path, "ee/#{base_folder_path}", "jh/#{base_folder_path}"])
+ end
+ end
+ end
+end
diff --git a/spec/tooling/lib/tooling/mappings/js_to_system_specs_mappings_spec.rb b/spec/tooling/lib/tooling/mappings/js_to_system_specs_mappings_spec.rb
new file mode 100644
index 00000000000..72e02547938
--- /dev/null
+++ b/spec/tooling/lib/tooling/mappings/js_to_system_specs_mappings_spec.rb
@@ -0,0 +1,169 @@
+# frozen_string_literal: true
+
+require 'tempfile'
+require_relative '../../../../../tooling/lib/tooling/mappings/js_to_system_specs_mappings'
+
+RSpec.describe Tooling::Mappings::JsToSystemSpecsMappings, feature_category: :tooling do
+ # We set temporary folders, and those readers give access to those folder paths
+ attr_accessor :js_base_folder, :system_specs_base_folder
+
+ around do |example|
+ Dir.mktmpdir do |tmp_js_base_folder|
+ Dir.mktmpdir do |tmp_system_specs_base_folder|
+ self.system_specs_base_folder = tmp_system_specs_base_folder
+ self.js_base_folder = tmp_js_base_folder
+
+ example.run
+ end
+ end
+ end
+
+ describe '#execute' do
+ let(:instance) do
+ described_class.new(
+ system_specs_base_folder: system_specs_base_folder,
+ js_base_folder: js_base_folder
+ )
+ end
+
+ subject { instance.execute(changed_files) }
+
+ context 'when no JS files were changed' do
+ let(:changed_files) { [] }
+
+ it 'returns nothing' do
+ expect(subject).to match_array([])
+ end
+ end
+
+ context 'when some JS files were changed' do
+ let(:changed_files) { ["#{js_base_folder}/issues/secret_values.js"] }
+
+ context 'when the JS files are not present on disk' do
+ it 'returns nothing' do
+ expect(subject).to match_array([])
+ end
+ end
+
+ context 'when the JS files are present on disk' do
+ before do
+ FileUtils.mkdir_p("#{js_base_folder}/issues")
+ File.write("#{js_base_folder}/issues/secret_values.js", "hello")
+ end
+
+ context 'when no system specs match the JS keyword' do
+ it 'returns nothing' do
+ expect(subject).to match_array([])
+ end
+ end
+
+ context 'when a system spec matches the JS keyword' do
+ before do
+ FileUtils.mkdir_p("#{system_specs_base_folder}/confidential_issues")
+ File.write("#{system_specs_base_folder}/confidential_issues/issues_spec.rb", "a test")
+ end
+
+ it 'returns something' do
+ expect(subject).to match_array(["#{system_specs_base_folder}/confidential_issues/issues_spec.rb"])
+ end
+ end
+ end
+ end
+ end
+
+ describe '#filter_files' do
+ subject { described_class.new(js_base_folder: js_base_folder).filter_files(changed_files) }
+
+ before do
+ File.write("#{js_base_folder}/index.js", "index.js")
+ File.write("#{js_base_folder}/index-with-ee-in-it.js", "index-with-ee-in-it.js")
+ File.write("#{js_base_folder}/index-with-jh-in-it.js", "index-with-jh-in-it.js")
+ end
+
+ context 'when no files were changed' do
+ let(:changed_files) { [] }
+
+ it 'returns an empty array' do
+ expect(subject).to match_array([])
+ end
+ end
+
+ context 'when JS files were changed' do
+ let(:changed_files) do
+ [
+ "#{js_base_folder}/index.js",
+ "#{js_base_folder}/index-with-ee-in-it.js",
+ "#{js_base_folder}/index-with-jh-in-it.js"
+ ]
+ end
+
+ it 'returns the path to the JS files' do
+ # "nil" group represents FOSS JS files in app/assets/javascripts
+ expect(subject).to match(nil => [
+ "#{js_base_folder}/index.js",
+ "#{js_base_folder}/index-with-ee-in-it.js",
+ "#{js_base_folder}/index-with-jh-in-it.js"
+ ])
+ end
+ end
+
+ context 'when JS files are deleted' do
+ let(:changed_files) { ["#{system_specs_base_folder}/deleted.html"] }
+
+ it 'returns an empty array' do
+ expect(subject).to match_array([])
+ end
+ end
+ end
+
+ describe '#construct_js_keywords' do
+ subject { described_class.new.construct_js_keywords(js_files) }
+
+ let(:js_files) do
+ %w[
+ app/assets/javascripts/boards/issue_board_filters.js
+ ee/app/assets/javascripts/queries/epic_due_date.query.graphql
+ ]
+ end
+
+ it 'returns a singularized keyword based on the first folder the file is in' do
+ expect(subject).to eq(%w[board query])
+ end
+ end
+
+ describe '#system_specs_for_edition' do
+ subject do
+ described_class.new(system_specs_base_folder: system_specs_base_folder).system_specs_for_edition(edition)
+ end
+
+ context 'when FOSS' do
+ let(:edition) { nil }
+
+ it 'checks the correct folder' do
+ expect(Dir).to receive(:[]).with("#{system_specs_base_folder}/**/*").and_call_original
+
+ subject
+ end
+ end
+
+ context 'when EE' do
+ let(:edition) { 'ee' }
+
+ it 'checks the correct folder' do
+ expect(Dir).to receive(:[]).with("ee#{system_specs_base_folder}/**/*").and_call_original
+
+ subject
+ end
+ end
+
+ context 'when JiHu' do
+ let(:edition) { 'jh' }
+
+ it 'checks the correct folder' do
+ expect(Dir).to receive(:[]).with("jh#{system_specs_base_folder}/**/*").and_call_original
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/tooling/lib/tooling/view_to_js_mappings_spec.rb b/spec/tooling/lib/tooling/mappings/view_to_js_mappings_spec.rb
index b09df2a9200..eaa0124370d 100644
--- a/spec/tooling/lib/tooling/view_to_js_mappings_spec.rb
+++ b/spec/tooling/lib/tooling/mappings/view_to_js_mappings_spec.rb
@@ -1,9 +1,9 @@
# frozen_string_literal: true
require 'tempfile'
-require_relative '../../../../tooling/lib/tooling/view_to_js_mappings'
+require_relative '../../../../../tooling/lib/tooling/mappings/view_to_js_mappings'
-RSpec.describe Tooling::ViewToJsMappings, feature_category: :tooling do
+RSpec.describe Tooling::Mappings::ViewToJsMappings, feature_category: :tooling do
# We set temporary folders, and those readers give access to those folder paths
attr_accessor :view_base_folder, :js_base_folder
@@ -32,7 +32,7 @@ RSpec.describe Tooling::ViewToJsMappings, feature_category: :tooling do
context 'when no view files have been changed' do
before do
- allow(instance).to receive(:view_files).and_return([])
+ allow(instance).to receive(:filter_files).and_return([])
end
it 'returns nothing' do
@@ -140,8 +140,8 @@ RSpec.describe Tooling::ViewToJsMappings, feature_category: :tooling do
end
end
- describe '#view_files' do
- subject { described_class.new(view_base_folder: view_base_folder).view_files(changed_files) }
+ describe '#filter_files' do
+ subject { described_class.new(view_base_folder: view_base_folder).filter_files(changed_files) }
before do
File.write("#{js_base_folder}/index.js", "index.js")
@@ -181,45 +181,6 @@ RSpec.describe Tooling::ViewToJsMappings, feature_category: :tooling do
end
end
- describe '#folders_for_available_editions' do
- let(:base_folder_path) { 'app/views' }
-
- subject { described_class.new.folders_for_available_editions(base_folder_path) }
-
- context 'when FOSS' do
- before do
- allow(GitlabEdition).to receive(:ee?).and_return(false)
- allow(GitlabEdition).to receive(:jh?).and_return(false)
- end
-
- it 'returns the correct paths' do
- expect(subject).to match_array([base_folder_path])
- end
- end
-
- context 'when EE' do
- before do
- allow(GitlabEdition).to receive(:ee?).and_return(true)
- allow(GitlabEdition).to receive(:jh?).and_return(false)
- end
-
- it 'returns the correct paths' do
- expect(subject).to eq([base_folder_path, "ee/#{base_folder_path}"])
- end
- end
-
- context 'when JiHu' do
- before do
- allow(GitlabEdition).to receive(:ee?).and_return(true)
- allow(GitlabEdition).to receive(:jh?).and_return(true)
- end
-
- it 'returns the correct paths' do
- expect(subject).to eq([base_folder_path, "ee/#{base_folder_path}", "jh/#{base_folder_path}"])
- end
- end
- end
-
describe '#find_partials' do
subject { described_class.new(view_base_folder: view_base_folder).find_partials(file_path) }
diff --git a/tooling/bin/js_to_system_specs_mappings b/tooling/bin/js_to_system_specs_mappings
new file mode 100755
index 00000000000..59a66ab0691
--- /dev/null
+++ b/tooling/bin/js_to_system_specs_mappings
@@ -0,0 +1,10 @@
+#!/usr/bin/env ruby
+# frozen_string_literal: true
+
+require_relative '../lib/tooling/mappings/js_to_system_specs_mappings'
+
+changes = ARGV.shift
+output_file = ARGV.shift
+changed_files = File.read(changes).split(' ')
+
+File.write(output_file, Tooling::Mappings::JsToSystemSpecsMappings.new.execute(changed_files).join(' '))
diff --git a/tooling/bin/view_to_js_mappings b/tooling/bin/view_to_js_mappings
index 2cebb91892e..483003aac5e 100755
--- a/tooling/bin/view_to_js_mappings
+++ b/tooling/bin/view_to_js_mappings
@@ -1,10 +1,10 @@
#!/usr/bin/env ruby
# frozen_string_literal: true
-require_relative '../lib/tooling/view_to_js_mappings'
+require_relative '../lib/tooling/mappings/view_to_js_mappings'
changes = ARGV.shift
output_file = ARGV.shift
changed_files = File.read(changes).split(' ')
-File.write(output_file, Tooling::ViewToJsMappings.new.execute(changed_files).join(' '))
+File.write(output_file, Tooling::Mappings::ViewToJsMappings.new.execute(changed_files).join(' '))
diff --git a/tooling/lib/tooling/mappings/base.rb b/tooling/lib/tooling/mappings/base.rb
new file mode 100644
index 00000000000..93d3a967114
--- /dev/null
+++ b/tooling/lib/tooling/mappings/base.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require_relative '../../../../lib/gitlab_edition'
+
+# Returns system specs files that are related to the JS files that were changed in the MR.
+module Tooling
+ module Mappings
+ class Base
+ # Input: A list of space-separated files
+ # Output: A list of space-separated specs files (JS, Ruby, ...)
+ def execute(changed_files)
+ raise "Not Implemented"
+ end
+
+ # Input: A list of space-separated files
+ # Output: array/hash of files
+ def filter_files(changed_files)
+ raise "Not Implemented"
+ end
+
+ # Input: A folder
+ # Output: An array of folders, each prefixed with a GitLab edition
+ def folders_for_available_editions(base_folder)
+ foss_prefix = base_folder
+ extension_prefixes = ::GitlabEdition.extensions.map { |prefix| "#{prefix}/#{foss_prefix}" }
+ [foss_prefix, *extension_prefixes]
+ end
+ end
+ end
+end
diff --git a/tooling/lib/tooling/mappings/js_to_system_specs_mappings.rb b/tooling/lib/tooling/mappings/js_to_system_specs_mappings.rb
new file mode 100644
index 00000000000..365e466011b
--- /dev/null
+++ b/tooling/lib/tooling/mappings/js_to_system_specs_mappings.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'active_support/inflector'
+
+require_relative 'base'
+require_relative '../../../../lib/gitlab_edition'
+
+# Returns system specs files that are related to the JS files that were changed in the MR.
+module Tooling
+ module Mappings
+ class JsToSystemSpecsMappings < Base
+ def initialize(js_base_folder: 'app/assets/javascripts', system_specs_base_folder: 'spec/features')
+ @js_base_folder = js_base_folder
+ @js_base_folders = folders_for_available_editions(js_base_folder)
+ @system_specs_base_folder = system_specs_base_folder
+
+ # Cannot be extracted to a constant, as it depends on a variable
+ @first_js_folder_extract_regexp = %r{
+ (?:.*/)? # Skips the GitLab edition (e.g. ee/, jh/)
+ #{@js_base_folder}/ # Most likely app/assets/javascripts/
+ ([\w-]*) # Captures the first folder
+ }x
+ end
+
+ def execute(changed_files)
+ filter_files(changed_files).flat_map do |edition, js_files|
+ js_keywords_regexp = Regexp.union(construct_js_keywords(js_files))
+
+ system_specs_for_edition(edition).select do |system_spec_file|
+ system_spec_file if js_keywords_regexp.match?(system_spec_file)
+ end
+ end
+ end
+
+ # Keep the files that are in the @js_base_folders folders
+ #
+ # Returns a hash, where the key is the GitLab edition, and the values the JS specs
+ def filter_files(changed_files)
+ selected_files = changed_files.select do |filename|
+ filename.start_with?(*@js_base_folders) && File.exist?(filename)
+ end
+
+ selected_files.group_by { |filename| filename[/^#{Regexp.union(::GitlabEdition.extensions)}/] }
+ end
+
+ # Extract keywords in the JS filenames to be used for searching matching system specs
+ def construct_js_keywords(js_files)
+ js_files.map do |js_file|
+ filename = js_file.scan(@first_js_folder_extract_regexp).flatten.first
+ filename.singularize
+ end.uniq
+ end
+
+ def system_specs_for_edition(edition)
+ all_files_in_folders_glob = File.join(@system_specs_base_folder, '**', '*')
+ all_files_in_folders_glob = File.join(edition, all_files_in_folders_glob) if edition
+ Dir[all_files_in_folders_glob].select { |f| File.file?(f) }
+ end
+ end
+ end
+end
diff --git a/tooling/lib/tooling/mappings/view_to_js_mappings.rb b/tooling/lib/tooling/mappings/view_to_js_mappings.rb
new file mode 100644
index 00000000000..db80eb9bfe8
--- /dev/null
+++ b/tooling/lib/tooling/mappings/view_to_js_mappings.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+require_relative 'base'
+require_relative '../../../../lib/gitlab_edition'
+
+# Returns JS files that are related to the Rails views files that were changed in the MR.
+module Tooling
+ module Mappings
+ class ViewToJsMappings < Base
+ # The HTML attribute value pattern we're looking for to match an HTML file to a JS file.
+ HTML_ATTRIBUTE_VALUE_REGEXP = /js-[-\w]+/.freeze
+
+ # Search for Rails partials included in an HTML file
+ RAILS_PARTIAL_INVOCATION_REGEXP = %r{(?:render|render_if_exist)(?: |\()(?:partial: ?)?['"]([\w/-]+)['"]}.freeze
+
+ def initialize(view_base_folder: 'app/views', js_base_folder: 'app/assets/javascripts')
+ @view_base_folders = folders_for_available_editions(view_base_folder)
+ @js_base_folders = folders_for_available_editions(js_base_folder)
+ end
+
+ def execute(changed_files)
+ changed_view_files = filter_files(changed_files)
+
+ partials = changed_view_files.flat_map do |file|
+ find_partials(file)
+ end
+
+ files_to_scan = changed_view_files + partials
+ js_tags = files_to_scan.flat_map do |file|
+ find_pattern_in_file(file, HTML_ATTRIBUTE_VALUE_REGEXP)
+ end
+ js_tags_regexp = Regexp.union(js_tags)
+
+ @js_base_folders.flat_map do |js_base_folder|
+ Dir["#{js_base_folder}/**/*.{js,vue}"].select do |js_file|
+ file_content = File.read(js_file)
+ js_tags_regexp.match?(file_content)
+ end
+ end
+ end
+
+ # Keep the files that are in the @view_base_folders folder
+ def filter_files(changed_files)
+ changed_files.select do |filename|
+ filename.start_with?(*@view_base_folders) &&
+ File.exist?(filename)
+ end
+ end
+
+ # Note: We only search for partials with depth 1. We don't do recursive search, as
+ # it is probably not necessary for a first iteration.
+ def find_partials(file)
+ partial_paths = find_pattern_in_file(file, RAILS_PARTIAL_INVOCATION_REGEXP)
+ partial_paths.flat_map do |partial_path|
+ view_file_folder = File.dirname(file)
+ partial_relative_folder = File.dirname(partial_path)
+
+ dirname =
+ if partial_relative_folder == '.' # The partial is in the same folder as the HTML file
+ view_file_folder
+ else
+ File.join(view_file_folder, partial_relative_folder)
+ end
+
+ Dir["#{dirname}/_#{File.basename(partial_path)}.*"]
+ end
+ end
+
+ def find_pattern_in_file(file, pattern)
+ File.read(file).scan(pattern).flatten.uniq
+ end
+ end
+ end
+end
diff --git a/tooling/lib/tooling/view_to_js_mappings.rb b/tooling/lib/tooling/view_to_js_mappings.rb
deleted file mode 100644
index 76704a04469..00000000000
--- a/tooling/lib/tooling/view_to_js_mappings.rb
+++ /dev/null
@@ -1,77 +0,0 @@
-# frozen_string_literal: true
-
-require_relative '../../../lib/gitlab_edition'
-
-# Returns JS files that are related to the Rails views files that were changed in the MR.
-module Tooling
- class ViewToJsMappings
- # The HTML attribute value pattern we're looking for to match an HTML file to a JS file.
- HTML_ATTRIBUTE_VALUE_REGEXP = /js-[-\w]+/.freeze
-
- # Search for Rails partials included in an HTML file
- RAILS_PARTIAL_INVOCATION_REGEXP = %r{(?:render|render_if_exist)(?: |\()(?:partial: ?)?['"]([\w/-]+)['"]}.freeze
-
- def initialize(view_base_folder: 'app/views', js_base_folder: 'app/assets/javascripts')
- @view_base_folders = folders_for_available_editions(view_base_folder)
- @js_base_folders = folders_for_available_editions(js_base_folder)
- end
-
- def execute(changed_files)
- changed_view_files = view_files(changed_files)
-
- partials = changed_view_files.flat_map do |file|
- find_partials(file)
- end
-
- files_to_scan = changed_view_files + partials
- js_tags = files_to_scan.flat_map do |file|
- find_pattern_in_file(file, HTML_ATTRIBUTE_VALUE_REGEXP)
- end
- js_tags_regexp = Regexp.union(js_tags)
-
- @js_base_folders.flat_map do |js_base_folder|
- Dir["#{js_base_folder}/**/*.{js,vue}"].select do |js_file|
- file_content = File.read(js_file)
- js_tags_regexp.match?(file_content)
- end
- end
- end
-
- # Keep the files that are in the @view_base_folders folder
- def view_files(changed_files)
- changed_files.select do |filename|
- filename.start_with?(*@view_base_folders) &&
- File.exist?(filename)
- end
- end
-
- def folders_for_available_editions(base_folder)
- foss_prefix = base_folder
- extension_prefixes = ::GitlabEdition.extensions.map { |prefix| "#{prefix}/#{foss_prefix}" }
- [foss_prefix, *extension_prefixes]
- end
-
- # Note: We only search for partials with depth 1. We don't do recursive search, as
- # it is probably not necessary for a first iteration.
- def find_partials(file)
- partial_paths = find_pattern_in_file(file, RAILS_PARTIAL_INVOCATION_REGEXP)
- partial_paths.flat_map do |partial_path|
- view_file_folder = File.dirname(file)
- partial_relative_folder = File.dirname(partial_path)
-
- dirname =
- if partial_relative_folder == '.' # The partial is in the same folder as the HTML file
- view_file_folder
- else
- File.join(view_file_folder, partial_relative_folder)
- end
-
- Dir["#{dirname}/_#{File.basename(partial_path)}.*"]
- end
- end
-
- def find_pattern_in_file(file, pattern)
- File.read(file).scan(pattern).flatten.uniq
- end
- end
-end
diff --git a/workhorse/.tool-versions b/workhorse/.tool-versions
index ee9584ceddc..18a7cdea814 100644
--- a/workhorse/.tool-versions
+++ b/workhorse/.tool-versions
@@ -1 +1 @@
-golang 1.18.7
+golang 1.18.9