summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitlab/ci/review.gitlab-ci.yml12
-rw-r--r--.gitlab/ci/rules.gitlab-ci.yml4
-rw-r--r--.rubocop_todo.yml20
-rw-r--r--app/assets/javascripts/api.js7
-rw-r--r--app/assets/javascripts/packages_and_registries/dependency_proxy/app.vue103
-rw-r--r--app/assets/javascripts/projects/commit_box/info/components/commit_box_pipeline_mini_graph.vue4
-rw-r--r--app/assets/javascripts/projects/commit_box/info/components/commit_box_pipeline_status.vue74
-rw-r--r--app/assets/javascripts/projects/commit_box/info/constants.js8
-rw-r--r--app/assets/javascripts/projects/commit_box/info/graphql/queries/get_latest_pipeline_status.query.graphql14
-rw-r--r--app/assets/javascripts/projects/commit_box/info/index.js3
-rw-r--r--app/assets/javascripts/projects/commit_box/info/init_commit_pipeline_status.js34
-rw-r--r--app/assets/javascripts/reports/components/report_section.vue2
-rw-r--r--app/assets/stylesheets/page_bundles/jira_connect_users.scss14
-rw-r--r--app/controllers/projects/merge_requests_controller.rb3
-rw-r--r--app/helpers/external_link_helper.rb2
-rw-r--r--app/models/ci/job_artifact.rb5
-rw-r--r--app/presenters/merge_request_presenter.rb14
-rw-r--r--app/serializers/merge_request_poll_widget_entity.rb14
-rw-r--r--app/services/ci/job_artifacts/update_unknown_locked_status_service.rb79
-rw-r--r--app/views/groups/dependency_proxies/show.html.haml2
-rw-r--r--app/views/jira_connect/users/show.html.haml14
-rw-r--r--app/views/projects/commit/_commit_box.html.haml4
-rw-r--r--app/workers/all_queues.yml9
-rw-r--r--app/workers/ci/update_locked_unknown_artifacts_worker.rb26
-rw-r--r--config/feature_flags/development/change_response_code_merge_status.yml8
-rw-r--r--config/feature_flags/development/ci_job_artifacts_backlog_large_loop_limit.yml8
-rw-r--r--config/feature_flags/development/ci_job_artifacts_backlog_work.yml8
-rw-r--r--config/feature_flags/development/distribute_github_parallel_import.yml8
-rw-r--r--config/feature_flags/development/users_expanding_widgets_usage_data.yml8
-rw-r--r--config/initializers/1_settings.rb3
-rw-r--r--config/metrics/counts_28d/20210216175055_merge_requests.yml3
-rw-r--r--config/metrics/counts_all/20210216175045_merge_requests.yml3
-rw-r--r--doc/api/merge_requests.md2
-rw-r--r--doc/ci/jobs/index.md16
-rw-r--r--doc/development/database/loose_foreign_keys.md419
-rw-r--r--lib/api/merge_requests.rb6
-rw-r--r--lib/gitlab/github_import/parallel_scheduling.rb6
-rw-r--r--lib/gitlab/usage_data.rb1
-rw-r--r--lib/gitlab/usage_data_counters/known_events/common.yml4
-rw-r--r--lib/system_check/base_check.rb12
-rw-r--r--locale/gitlab.pot27
-rw-r--r--package.json2
-rw-r--r--spec/features/projects/import_export/import_file_spec.rb6
-rw-r--r--spec/features/task_lists_spec.rb16
-rw-r--r--spec/frontend/api_spec.js12
-rw-r--r--spec/frontend/commit/components/commit_box_pipeline_status_spec.js150
-rw-r--r--spec/frontend/commit/mock_data.js20
-rw-r--r--spec/frontend/packages_and_registries/dependency_proxy/app_spec.js64
-rw-r--r--spec/initializers/mail_encoding_patch_spec.rb5
-rw-r--r--spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb6
-rw-r--r--spec/lib/gitlab/github_import/importer/issues_importer_spec.rb6
-rw-r--r--spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb6
-rw-r--r--spec/lib/gitlab/github_import/importer/notes_importer_spec.rb6
-rw-r--r--spec/lib/gitlab/github_import/parallel_scheduling_spec.rb26
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb2
-rw-r--r--spec/models/board_spec.rb4
-rw-r--r--spec/models/ci/job_artifact_spec.rb9
-rw-r--r--spec/models/project_spec.rb4
-rw-r--r--spec/requests/api/merge_requests_spec.rb30
-rw-r--r--spec/serializers/merge_request_poll_widget_entity_spec.rb36
-rw-r--r--spec/services/ci/job_artifacts/update_unknown_locked_status_service_spec.rb145
-rw-r--r--spec/support/shared_contexts/url_shared_context.rb4
-rw-r--r--spec/support/shared_examples/requests/api/graphql/group_and_project_boards_query_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/services/boards/boards_list_service_shared_examples.rb2
-rw-r--r--spec/validators/addressable_url_validator_spec.rb4
-rw-r--r--spec/workers/ci/update_locked_unknown_artifacts_worker_spec.rb44
-rw-r--r--yarn.lock8
67 files changed, 1451 insertions, 181 deletions
diff --git a/.gitlab/ci/review.gitlab-ci.yml b/.gitlab/ci/review.gitlab-ci.yml
index c2aa84494fc..13f1541f002 100644
--- a/.gitlab/ci/review.gitlab-ci.yml
+++ b/.gitlab/ci/review.gitlab-ci.yml
@@ -1,8 +1,3 @@
-include:
- - project: gitlab-org/modelops/applied-ml/review-recommender/ci-templates
- ref: v0.2.1
- file: recommender/Reviewers.gitlab-ci.yml
-
review-cleanup:
extends:
- .default-retry
@@ -70,10 +65,3 @@ danger-review-local:
- .review:rules:danger-local
script:
- run_timed_command danger_as_local
-
-reviewers-recommender:
- extends:
- - .default-retry
- - .review:rules:reviewers-recommender
- stage: test
- needs: []
diff --git a/.gitlab/ci/rules.gitlab-ci.yml b/.gitlab/ci/rules.gitlab-ci.yml
index 17cb6778a5f..e808a0297a6 100644
--- a/.gitlab/ci/rules.gitlab-ci.yml
+++ b/.gitlab/ci/rules.gitlab-ci.yml
@@ -1637,10 +1637,6 @@
- <<: *if-merge-request
changes: *danger-patterns
-.review:rules:reviewers-recommender:
- rules:
- - <<: *if-merge-request
-
###############
# Setup rules #
###############
diff --git a/.rubocop_todo.yml b/.rubocop_todo.yml
index 534861d90b2..cc4992020ae 100644
--- a/.rubocop_todo.yml
+++ b/.rubocop_todo.yml
@@ -194,19 +194,6 @@ RSpec/ReturnFromStub:
RSpec/ScatteredLet:
Enabled: false
-# Offense count: 24
-# Configuration parameters: EnforcedStyle, IgnoredPatterns.
-# SupportedStyles: snake_case, camelCase
-RSpec/VariableName:
- Exclude:
- - 'spec/features/projects/import_export/import_file_spec.rb'
- - 'spec/features/task_lists_spec.rb'
- - 'spec/initializers/mail_encoding_patch_spec.rb'
- - 'spec/models/board_spec.rb'
- - 'spec/support/shared_contexts/url_shared_context.rb'
- - 'spec/support/shared_examples/requests/api/graphql/group_and_project_boards_query_shared_examples.rb'
- - 'spec/support/shared_examples/services/boards/boards_list_service_shared_examples.rb'
-
# Offense count: 26
# Cop supports --auto-correct.
# Configuration parameters: Include.
@@ -343,13 +330,6 @@ Style/AccessorGrouping:
Style/BarePercentLiterals:
Enabled: false
-# Offense count: 6
-# Cop supports --auto-correct.
-Style/BisectedAttrAccessor:
- Exclude:
- - 'lib/system_check/base_check.rb'
- - 'qa/qa/resource/api_fabricator.rb'
-
# Offense count: 42
# Cop supports --auto-correct.
Style/CaseLikeIf:
diff --git a/app/assets/javascripts/api.js b/app/assets/javascripts/api.js
index 33d7da8fd53..d2856d99ef0 100644
--- a/app/assets/javascripts/api.js
+++ b/app/assets/javascripts/api.js
@@ -93,6 +93,7 @@ const Api = {
notificationSettingsPath: '/api/:version/notification_settings',
deployKeysPath: '/api/:version/deploy_keys',
secureFilesPath: '/api/:version/projects/:project_id/secure_files',
+ dependencyProxyPath: '/api/:version/groups/:id/dependency_proxy/cache',
group(groupId, callback = () => {}) {
const url = Api.buildUrl(Api.groupPath).replace(':id', groupId);
@@ -999,6 +1000,12 @@ const Api = {
return result;
},
+
+ deleteDependencyProxyCacheList(groupId, options = {}) {
+ const url = Api.buildUrl(this.dependencyProxyPath).replace(':id', groupId);
+
+ return axios.delete(url, { params: { ...options } });
+ },
};
export default Api;
diff --git a/app/assets/javascripts/packages_and_registries/dependency_proxy/app.vue b/app/assets/javascripts/packages_and_registries/dependency_proxy/app.vue
index eb112238c11..4ddefcbdf6a 100644
--- a/app/assets/javascripts/packages_and_registries/dependency_proxy/app.vue
+++ b/app/assets/javascripts/packages_and_registries/dependency_proxy/app.vue
@@ -1,13 +1,18 @@
<script>
import {
GlAlert,
+ GlDropdown,
+ GlDropdownItem,
GlEmptyState,
GlFormGroup,
GlFormInputGroup,
+ GlModal,
+ GlModalDirective,
GlSkeletonLoader,
GlSprintf,
} from '@gitlab/ui';
-import { s__ } from '~/locale';
+import { __, s__, n__, sprintf } from '~/locale';
+import Api from '~/api';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import TitleArea from '~/vue_shared/components/registry/title_area.vue';
import ManifestsList from '~/packages_and_registries/dependency_proxy/components/manifests_list.vue';
@@ -22,16 +27,22 @@ import getDependencyProxyDetailsQuery from '~/packages_and_registries/dependency
export default {
components: {
GlAlert,
+ GlDropdown,
+ GlDropdownItem,
GlEmptyState,
GlFormGroup,
GlFormInputGroup,
+ GlModal,
GlSkeletonLoader,
GlSprintf,
ClipboardButton,
TitleArea,
ManifestsList,
},
- inject: ['groupPath', 'dependencyProxyAvailable', 'noManifestsIllustration'],
+ directives: {
+ GlModalDirective,
+ },
+ inject: ['groupPath', 'groupId', 'dependencyProxyAvailable', 'noManifestsIllustration'],
i18n: {
proxyNotAvailableText: s__(
'DependencyProxy|Dependency Proxy feature is limited to public groups for now.',
@@ -41,6 +52,19 @@ export default {
blobCountAndSize: s__('DependencyProxy|Contains %{count} blobs of images (%{size})'),
pageTitle: s__('DependencyProxy|Dependency Proxy'),
noManifestTitle: s__('DependencyProxy|There are no images in the cache'),
+ deleteCacheAlertMessageSuccess: s__(
+ 'DependencyProxy|All items in the cache are scheduled for removal.',
+ ),
+ },
+ confirmClearCacheModal: 'confirm-clear-cache-modal',
+ modalButtons: {
+ primary: {
+ text: s__('DependencyProxy|Clear cache'),
+ attributes: [{ variant: 'danger' }],
+ },
+ secondary: {
+ text: __('Cancel'),
+ },
},
links: {
DEPENDENCY_PROXY_DOCS_PATH,
@@ -48,6 +72,8 @@ export default {
data() {
return {
group: {},
+ showDeleteCacheAlert: false,
+ deleteCacheAlertMessage: '',
};
},
apollo: {
@@ -80,6 +106,33 @@ export default {
manifests() {
return this.group.dependencyProxyManifests.nodes;
},
+ modalTitleWithCount() {
+ return sprintf(
+ n__(
+ 'Clear %{count} image from cache?',
+ 'Clear %{count} images from cache?',
+ this.group.dependencyProxyBlobCount,
+ ),
+ {
+ count: this.group.dependencyProxyBlobCount,
+ },
+ );
+ },
+ modalConfirmationMessageWithCount() {
+ return sprintf(
+ n__(
+ 'You are about to clear %{count} image from the cache. Once you confirm, the next time a pipeline runs it must pull an image or tag from Docker Hub. Are you sure?',
+ 'You are about to clear %{count} images from the cache. Once you confirm, the next time a pipeline runs it must pull an image or tag from Docker Hub. Are you sure?',
+ this.group.dependencyProxyBlobCount,
+ ),
+ {
+ count: this.group.dependencyProxyBlobCount,
+ },
+ );
+ },
+ showDeleteDropdown() {
+ return this.group.dependencyProxyBlobCount > 0;
+ },
},
methods: {
fetchNextPage() {
@@ -103,13 +156,47 @@ export default {
},
});
},
+ async submit() {
+ try {
+ await Api.deleteDependencyProxyCacheList(this.groupId);
+
+ this.deleteCacheAlertMessage = this.$options.i18n.deleteCacheAlertMessageSuccess;
+ this.showDeleteCacheAlert = true;
+ } catch (err) {
+ this.deleteCacheAlertMessage = err;
+ this.showDeleteCacheAlert = true;
+ }
+ },
},
};
</script>
<template>
<div>
- <title-area :title="$options.i18n.pageTitle" :info-messages="infoMessages" />
+ <gl-alert
+ v-if="showDeleteCacheAlert"
+ data-testid="delete-cache-alert"
+ @dismiss="showDeleteCacheAlert = false"
+ >
+ {{ deleteCacheAlertMessage }}
+ </gl-alert>
+ <title-area :title="$options.i18n.pageTitle" :info-messages="infoMessages">
+ <template v-if="showDeleteDropdown" #right-actions>
+ <gl-dropdown
+ icon="ellipsis_v"
+ text="More actions"
+ :text-sr-only="true"
+ category="tertiary"
+ no-caret
+ >
+ <gl-dropdown-item
+ v-gl-modal-directive="$options.confirmClearCacheModal"
+ variant="danger"
+ >{{ $options.i18n.clearCache }}</gl-dropdown-item
+ >
+ </gl-dropdown>
+ </template>
+ </title-area>
<gl-alert
v-if="!dependencyProxyAvailable"
:dismissible="false"
@@ -159,5 +246,15 @@ export default {
:title="$options.i18n.noManifestTitle"
/>
</div>
+
+ <gl-modal
+ :modal-id="$options.confirmClearCacheModal"
+ :title="modalTitleWithCount"
+ :action-primary="$options.modalButtons.primary"
+ :action-secondary="$options.modalButtons.secondary"
+ @primary="submit"
+ >
+ {{ modalConfirmationMessageWithCount }}
+ </gl-modal>
</div>
</template>
diff --git a/app/assets/javascripts/projects/commit_box/info/components/commit_box_pipeline_mini_graph.vue b/app/assets/javascripts/projects/commit_box/info/components/commit_box_pipeline_mini_graph.vue
index b996d3f15d1..8511f9bdb0f 100644
--- a/app/assets/javascripts/projects/commit_box/info/components/commit_box_pipeline_mini_graph.vue
+++ b/app/assets/javascripts/projects/commit_box/info/components/commit_box_pipeline_mini_graph.vue
@@ -10,7 +10,7 @@ import {
import { formatStages } from '../utils';
import getLinkedPipelinesQuery from '../graphql/queries/get_linked_pipelines.query.graphql';
import getPipelineStagesQuery from '../graphql/queries/get_pipeline_stages.query.graphql';
-import { PIPELINE_STAGES_POLL_INTERVAL } from '../constants';
+import { COMMIT_BOX_POLL_INTERVAL } from '../constants';
export default {
i18n: {
@@ -65,7 +65,7 @@ export default {
return getQueryHeaders(this.graphqlResourceEtag);
},
query: getPipelineStagesQuery,
- pollInterval: PIPELINE_STAGES_POLL_INTERVAL,
+ pollInterval: COMMIT_BOX_POLL_INTERVAL,
variables() {
return {
fullPath: this.fullPath,
diff --git a/app/assets/javascripts/projects/commit_box/info/components/commit_box_pipeline_status.vue b/app/assets/javascripts/projects/commit_box/info/components/commit_box_pipeline_status.vue
new file mode 100644
index 00000000000..5a9d3129809
--- /dev/null
+++ b/app/assets/javascripts/projects/commit_box/info/components/commit_box_pipeline_status.vue
@@ -0,0 +1,74 @@
+<script>
+import { GlLoadingIcon, GlLink } from '@gitlab/ui';
+import CiIcon from '~/vue_shared/components/ci_icon.vue';
+import createFlash from '~/flash';
+import {
+ getQueryHeaders,
+ toggleQueryPollingByVisibility,
+} from '~/pipelines/components/graph/utils';
+import getLatestPipelineStatusQuery from '../graphql/queries/get_latest_pipeline_status.query.graphql';
+import { COMMIT_BOX_POLL_INTERVAL, PIPELINE_STATUS_FETCH_ERROR } from '../constants';
+
+export default {
+ PIPELINE_STATUS_FETCH_ERROR,
+ components: {
+ CiIcon,
+ GlLoadingIcon,
+ GlLink,
+ },
+ inject: {
+ fullPath: {
+ default: '',
+ },
+ iid: {
+ default: '',
+ },
+ graphqlResourceEtag: {
+ default: '',
+ },
+ },
+ apollo: {
+ pipelineStatus: {
+ context() {
+ return getQueryHeaders(this.graphqlResourceEtag);
+ },
+ query: getLatestPipelineStatusQuery,
+ pollInterval: COMMIT_BOX_POLL_INTERVAL,
+ variables() {
+ return {
+ fullPath: this.fullPath,
+ iid: this.iid,
+ };
+ },
+ update({ project }) {
+ return project?.pipeline?.detailedStatus || {};
+ },
+ error() {
+ createFlash({ message: this.$options.PIPELINE_STATUS_FETCH_ERROR });
+ },
+ },
+ },
+ data() {
+ return {
+ pipelineStatus: {},
+ };
+ },
+ computed: {
+ loading() {
+ return this.$apollo.queries.pipelineStatus.loading;
+ },
+ },
+ mounted() {
+ toggleQueryPollingByVisibility(this.$apollo.queries.pipelineStatus);
+ },
+};
+</script>
+
+<template>
+ <div class="gl-display-inline-block gl-vertical-align-middle gl-mr-2">
+ <gl-loading-icon v-if="loading" />
+ <gl-link v-else :href="pipelineStatus.detailsPath">
+ <ci-icon :status="pipelineStatus" :size="24" />
+ </gl-link>
+ </div>
+</template>
diff --git a/app/assets/javascripts/projects/commit_box/info/constants.js b/app/assets/javascripts/projects/commit_box/info/constants.js
index fc4f03482e2..be0bf715314 100644
--- a/app/assets/javascripts/projects/commit_box/info/constants.js
+++ b/app/assets/javascripts/projects/commit_box/info/constants.js
@@ -1 +1,7 @@
-export const PIPELINE_STAGES_POLL_INTERVAL = 10000;
+import { __ } from '~/locale';
+
+export const COMMIT_BOX_POLL_INTERVAL = 10000;
+
+export const PIPELINE_STATUS_FETCH_ERROR = __(
+ 'There was a problem fetching the latest pipeline status.',
+);
diff --git a/app/assets/javascripts/projects/commit_box/info/graphql/queries/get_latest_pipeline_status.query.graphql b/app/assets/javascripts/projects/commit_box/info/graphql/queries/get_latest_pipeline_status.query.graphql
new file mode 100644
index 00000000000..cec96f82336
--- /dev/null
+++ b/app/assets/javascripts/projects/commit_box/info/graphql/queries/get_latest_pipeline_status.query.graphql
@@ -0,0 +1,14 @@
+query getLatestPipelineStatus($fullPath: ID!, $iid: ID!) {
+ project(fullPath: $fullPath) {
+ id
+ pipeline(iid: $iid) {
+ id
+ detailedStatus {
+ id
+ detailsPath
+ icon
+ group
+ }
+ }
+ }
+}
diff --git a/app/assets/javascripts/projects/commit_box/info/index.js b/app/assets/javascripts/projects/commit_box/info/index.js
index 69fe2d30489..7500c152b6a 100644
--- a/app/assets/javascripts/projects/commit_box/info/index.js
+++ b/app/assets/javascripts/projects/commit_box/info/index.js
@@ -2,6 +2,7 @@ import { fetchCommitMergeRequests } from '~/commit_merge_requests';
import { initCommitPipelineMiniGraph } from './init_commit_pipeline_mini_graph';
import { initDetailsButton } from './init_details_button';
import { loadBranches } from './load_branches';
+import initCommitPipelineStatus from './init_commit_pipeline_status';
export const initCommitBoxInfo = () => {
// Display commit related branches
@@ -14,4 +15,6 @@ export const initCommitBoxInfo = () => {
initCommitPipelineMiniGraph();
initDetailsButton();
+
+ initCommitPipelineStatus();
};
diff --git a/app/assets/javascripts/projects/commit_box/info/init_commit_pipeline_status.js b/app/assets/javascripts/projects/commit_box/info/init_commit_pipeline_status.js
new file mode 100644
index 00000000000..d5e62531283
--- /dev/null
+++ b/app/assets/javascripts/projects/commit_box/info/init_commit_pipeline_status.js
@@ -0,0 +1,34 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import createDefaultClient from '~/lib/graphql';
+import CommitBoxPipelineStatus from './components/commit_box_pipeline_status.vue';
+
+Vue.use(VueApollo);
+
+const apolloProvider = new VueApollo({
+ defaultClient: createDefaultClient({}, { useGet: true }),
+});
+
+export default (selector = '.js-commit-pipeline-status') => {
+ const el = document.querySelector(selector);
+
+ if (!el) {
+ return;
+ }
+
+ const { fullPath, iid, graphqlResourceEtag } = el.dataset;
+
+ // eslint-disable-next-line no-new
+ new Vue({
+ el,
+ apolloProvider,
+ provide: {
+ fullPath,
+ iid,
+ graphqlResourceEtag,
+ },
+ render(createElement) {
+ return createElement(CommitBoxPipelineStatus);
+ },
+ });
+};
diff --git a/app/assets/javascripts/reports/components/report_section.vue b/app/assets/javascripts/reports/components/report_section.vue
index 7a490210f0b..ae201a61db6 100644
--- a/app/assets/javascripts/reports/components/report_section.vue
+++ b/app/assets/javascripts/reports/components/report_section.vue
@@ -172,7 +172,7 @@ export default {
},
methods: {
toggleCollapsed() {
- if (this.trackAction && this.glFeatures.usersExpandingWidgetsUsageData) {
+ if (this.trackAction) {
api.trackRedisHllUserEvent(this.trackAction);
}
diff --git a/app/assets/stylesheets/page_bundles/jira_connect_users.scss b/app/assets/stylesheets/page_bundles/jira_connect_users.scss
index 6725bf8f1a1..602910adad9 100644
--- a/app/assets/stylesheets/page_bundles/jira_connect_users.scss
+++ b/app/assets/stylesheets/page_bundles/jira_connect_users.scss
@@ -1,13 +1 @@
-@import 'mixins_and_variables_and_functions';
-
-.jira-connect-users-container {
- margin-left: auto;
- margin-right: auto;
- width: px-to-rem(350px);
-}
-
-.devise-layout-html body .navless-container {
- @include media-breakpoint-down(xs) {
- padding-top: 65px;
- }
-}
+@import '../themes/theme_indigo';
diff --git a/app/controllers/projects/merge_requests_controller.rb b/app/controllers/projects/merge_requests_controller.rb
index 7baf876e792..3858873bb46 100644
--- a/app/controllers/projects/merge_requests_controller.rb
+++ b/app/controllers/projects/merge_requests_controller.rb
@@ -44,8 +44,9 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
push_frontend_feature_flag(:secure_vulnerability_training, project, default_enabled: :yaml)
push_frontend_feature_flag(:issue_assignees_widget, @project, default_enabled: :yaml)
push_frontend_feature_flag(:realtime_labels, project, default_enabled: :yaml)
+
# Usage data feature flags
- push_frontend_feature_flag(:users_expanding_widgets_usage_data, project, default_enabled: :yaml)
+ #
push_frontend_feature_flag(:diff_settings_usage_data, default_enabled: :yaml)
end
diff --git a/app/helpers/external_link_helper.rb b/app/helpers/external_link_helper.rb
index c951d0daf96..53dacfe0566 100644
--- a/app/helpers/external_link_helper.rb
+++ b/app/helpers/external_link_helper.rb
@@ -5,7 +5,7 @@ module ExternalLinkHelper
def external_link(body, url, options = {})
link = link_to url, { target: '_blank', rel: 'noopener noreferrer' }.merge(options) do
- "#{body}#{sprite_icon('external-link', css_class: 'gl-ml-1')}".html_safe
+ "#{body}#{sprite_icon('external-link', css_class: 'gl-ml-2')}".html_safe
end
sanitize(link, tags: %w(a svg use), attributes: %w(target rel data-testid class href).concat(options.stringify_keys.keys))
end
diff --git a/app/models/ci/job_artifact.rb b/app/models/ci/job_artifact.rb
index 3426c4d5248..dff8bb89021 100644
--- a/app/models/ci/job_artifact.rb
+++ b/app/models/ci/job_artifact.rb
@@ -186,6 +186,7 @@ module Ci
scope :downloadable, -> { where(file_type: DOWNLOADABLE_TYPES) }
scope :unlocked, -> { joins(job: :pipeline).merge(::Ci::Pipeline.unlocked) }
+ scope :order_expired_asc, -> { order(expire_at: :asc) }
scope :order_expired_desc, -> { order(expire_at: :desc) }
scope :with_destroy_preloads, -> { includes(project: [:route, :statistics]) }
@@ -273,6 +274,10 @@ module Ci
self.where(project: project).sum(:size)
end
+ def self.pluck_job_id
+ pluck(:job_id)
+ end
+
##
# FastDestroyAll concerns
# rubocop: disable CodeReuse/ServiceClass
diff --git a/app/presenters/merge_request_presenter.rb b/app/presenters/merge_request_presenter.rb
index 6dd3908b21d..907bd37f37b 100644
--- a/app/presenters/merge_request_presenter.rb
+++ b/app/presenters/merge_request_presenter.rb
@@ -191,13 +191,17 @@ class MergeRequestPresenter < Gitlab::View::Presenter::Delegated
end
def mergeable_discussions_state
- # This avoids calling MergeRequest#mergeable_discussions_state without
- # considering the state of the MR first. If a MR isn't mergeable, we can
- # safely short-circuit it.
- if merge_request.mergeable_state?(skip_ci_check: true, skip_discussions_check: true)
+ if Feature.enabled?(:change_response_code_merge_status, project, default_enabled: :yaml)
merge_request.mergeable_discussions_state?
else
- false
+ # This avoids calling MergeRequest#mergeable_discussions_state without
+ # considering the state of the MR first. If a MR isn't mergeable, we can
+ # safely short-circuit it.
+ if merge_request.mergeable_state?(skip_ci_check: true, skip_discussions_check: true)
+ merge_request.mergeable_discussions_state?
+ else
+ false
+ end
end
end
diff --git a/app/serializers/merge_request_poll_widget_entity.rb b/app/serializers/merge_request_poll_widget_entity.rb
index 12998d70a22..5c37337ab09 100644
--- a/app/serializers/merge_request_poll_widget_entity.rb
+++ b/app/serializers/merge_request_poll_widget_entity.rb
@@ -33,13 +33,17 @@ class MergeRequestPollWidgetEntity < Grape::Entity
# Booleans
expose :mergeable_discussions_state?, as: :mergeable_discussions_state do |merge_request|
- # This avoids calling MergeRequest#mergeable_discussions_state without
- # considering the state of the MR first. If a MR isn't mergeable, we can
- # safely short-circuit it.
- if merge_request.mergeable_state?(skip_ci_check: true, skip_discussions_check: true)
+ if Feature.enabled?(:change_response_code_merge_status, merge_request.project, default_enabled: :yaml)
merge_request.mergeable_discussions_state?
else
- false
+ # This avoids calling MergeRequest#mergeable_discussions_state without
+ # considering the state of the MR first. If a MR isn't mergeable, we can
+ # safely short-circuit it.
+ if merge_request.mergeable_state?(skip_ci_check: true, skip_discussions_check: true)
+ merge_request.mergeable_discussions_state?
+ else
+ false
+ end
end
end
diff --git a/app/services/ci/job_artifacts/update_unknown_locked_status_service.rb b/app/services/ci/job_artifacts/update_unknown_locked_status_service.rb
new file mode 100644
index 00000000000..0d35a90ed04
--- /dev/null
+++ b/app/services/ci/job_artifacts/update_unknown_locked_status_service.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+module Ci
+ module JobArtifacts
+ class UpdateUnknownLockedStatusService
+ include ::Gitlab::ExclusiveLeaseHelpers
+ include ::Gitlab::LoopHelpers
+
+ BATCH_SIZE = 100
+ LOOP_TIMEOUT = 5.minutes
+ LOOP_LIMIT = 100
+ LARGE_LOOP_LIMIT = 500
+ EXCLUSIVE_LOCK_KEY = 'unknown_status_job_artifacts:update:lock'
+ LOCK_TIMEOUT = 6.minutes
+
+ def initialize
+ @removed_count = 0
+ @locked_count = 0
+ @start_at = Time.current
+ @loop_limit = Feature.enabled?(:ci_job_artifacts_backlog_large_loop_limit) ? LARGE_LOOP_LIMIT : LOOP_LIMIT
+ end
+
+ def execute
+ in_lock(EXCLUSIVE_LOCK_KEY, ttl: LOCK_TIMEOUT, retries: 1) do
+ update_locked_status_on_unknown_artifacts
+ end
+
+ { removed: @removed_count, locked: @locked_count }
+ end
+
+ private
+
+ def update_locked_status_on_unknown_artifacts
+ loop_until(timeout: LOOP_TIMEOUT, limit: @loop_limit) do
+ unknown_status_build_ids = safely_ordered_ci_job_artifacts_locked_unknown_relation.pluck_job_id.uniq
+
+ locked_pipe_build_ids = ::Ci::Build
+ .with_pipeline_locked_artifacts
+ .id_in(unknown_status_build_ids)
+ .pluck_primary_key
+
+ @locked_count += update_unknown_artifacts(locked_pipe_build_ids, Ci::JobArtifact.lockeds[:artifacts_locked])
+
+ unlocked_pipe_build_ids = unknown_status_build_ids - locked_pipe_build_ids
+ service_response = batch_destroy_artifacts(unlocked_pipe_build_ids)
+ @removed_count += service_response[:destroyed_artifacts_count]
+ end
+ end
+
+ def update_unknown_artifacts(build_ids, locked_value)
+ return 0 unless build_ids.any?
+
+ expired_locked_unknown_artifacts.for_job_ids(build_ids).update_all(locked: locked_value)
+ end
+
+ def batch_destroy_artifacts(build_ids)
+ deleteable_artifacts_relation =
+ if build_ids.any?
+ expired_locked_unknown_artifacts.for_job_ids(build_ids)
+ else
+ Ci::JobArtifact.none
+ end
+
+ Ci::JobArtifacts::DestroyBatchService.new(deleteable_artifacts_relation).execute
+ end
+
+ def expired_locked_unknown_artifacts
+ # UPDATE queries perform better without the specific order and limit
+ # https://gitlab.com/gitlab-org/gitlab/-/merge_requests/76509#note_891260455
+ Ci::JobArtifact.expired_before(@start_at).artifact_unknown
+ end
+
+ def safely_ordered_ci_job_artifacts_locked_unknown_relation
+ # Adding the ORDER and LIMIT improves performance when we don't have build_id
+ expired_locked_unknown_artifacts.limit(BATCH_SIZE).order_expired_asc
+ end
+ end
+ end
+end
diff --git a/app/views/groups/dependency_proxies/show.html.haml b/app/views/groups/dependency_proxies/show.html.haml
index 47caec717af..940a504438d 100644
--- a/app/views/groups/dependency_proxies/show.html.haml
+++ b/app/views/groups/dependency_proxies/show.html.haml
@@ -4,4 +4,4 @@
#js-dependency-proxy{ data: { group_path: @group.full_path,
dependency_proxy_available: dependency_proxy_available.to_s,
- no_manifests_illustration: image_path('illustrations/docker-empty-state.svg') } }
+ no_manifests_illustration: image_path('illustrations/docker-empty-state.svg'), group_id: @group.id } }
diff --git a/app/views/jira_connect/users/show.html.haml b/app/views/jira_connect/users/show.html.haml
index cf88acd6976..29805a2c42d 100644
--- a/app/views/jira_connect/users/show.html.haml
+++ b/app/views/jira_connect/users/show.html.haml
@@ -1,8 +1,14 @@
-.jira-connect-users-container.gl-text-center
- - user_link = link_to(current_user.to_reference, user_path(current_user), target: '_blank', rel: 'noopener noreferrer')
- %h2= _('You are signed in to GitLab as %{user_link}').html_safe % { user_link: user_link }
+.gl-text-center.gl-mx-auto.gl-pt-6
+ %h3.gl-mb-4
+ = _('You are signed in to GitLab as:')
- %p= s_('Integrations|You can now close this window and return to the GitLab for Jira application.')
+ .gl-display-flex.gl-flex-direction-column.gl-align-items-center.gl-mb-4
+ = link_to user_path(current_user), target: '_blank', rel: 'noopener noreferrer' do
+ = user_avatar_without_link(user: current_user, size: 60, css_class: 'gl-mr-0! gl-mb-2', has_tooltip: false)
+ = link_to current_user.to_reference, user_path(current_user), target: '_blank', rel: 'noopener noreferrer'
+
+ %p.gl-mb-6
+ = s_('JiraService|You can now close this window and%{br}return to the GitLab for Jira application.').html_safe % { br: '<br>'.html_safe }
- if @jira_app_link
%p= external_link s_('Integrations|Return to GitLab for Jira'), @jira_app_link, class: 'gl-button btn btn-confirm'
diff --git a/app/views/projects/commit/_commit_box.html.haml b/app/views/projects/commit/_commit_box.html.haml
index 5e15500c556..23572d1d6ac 100644
--- a/app/views/projects/commit/_commit_box.html.haml
+++ b/app/views/projects/commit/_commit_box.html.haml
@@ -47,9 +47,7 @@
- if can?(current_user, :read_pipeline, @last_pipeline)
.well-segment.pipeline-info
- .status-icon-container
- = link_to project_pipeline_path(@project, @last_pipeline.id), class: "ci-status-icon-#{@last_pipeline.status}" do
- = ci_icon_for_status(@last_pipeline.status)
+ .js-commit-pipeline-status{ data: { full_path: @project.full_path, iid: @last_pipeline.iid, graphql_resource_etag: graphql_etag_pipeline_path(@last_pipeline) } }
#{ _('Pipeline') }
= link_to "##{@last_pipeline.id}", project_pipeline_path(@project, @last_pipeline.id)
= ci_label_for_status(@last_pipeline.status)
diff --git a/app/workers/all_queues.yml b/app/workers/all_queues.yml
index 74d926e182d..bfb70e0d496 100644
--- a/app/workers/all_queues.yml
+++ b/app/workers/all_queues.yml
@@ -264,6 +264,15 @@
:weight: 1
:idempotent: true
:tags: []
+- :name: cronjob:ci_update_locked_unknown_artifacts
+ :worker_name: Ci::UpdateLockedUnknownArtifactsWorker
+ :feature_category: :build_artifacts
+ :has_external_dependencies:
+ :urgency: :throttled
+ :resource_boundary: :unknown
+ :weight: 1
+ :idempotent:
+ :tags: []
- :name: cronjob:clusters_integrations_check_prometheus_health
:worker_name: Clusters::Integrations::CheckPrometheusHealthWorker
:feature_category: :incident_management
diff --git a/app/workers/ci/update_locked_unknown_artifacts_worker.rb b/app/workers/ci/update_locked_unknown_artifacts_worker.rb
new file mode 100644
index 00000000000..2d37ebb3c93
--- /dev/null
+++ b/app/workers/ci/update_locked_unknown_artifacts_worker.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+module Ci
+ class UpdateLockedUnknownArtifactsWorker # rubocop:disable Scalability/IdempotentWorker
+ include ApplicationWorker
+
+ data_consistency :sticky
+ urgency :throttled
+
+ # rubocop:disable Scalability/CronWorkerContext
+ # This worker does not perform work scoped to a context
+ include CronjobQueue
+ # rubocop:enable Scalability/CronWorkerContext
+
+ feature_category :build_artifacts
+
+ def perform
+ return unless ::Feature.enabled?(:ci_job_artifacts_backlog_work)
+
+ artifact_counts = Ci::JobArtifacts::UpdateUnknownLockedStatusService.new.execute
+
+ log_extra_metadata_on_done(:removed_count, artifact_counts[:removed])
+ log_extra_metadata_on_done(:locked_count, artifact_counts[:locked])
+ end
+ end
+end
diff --git a/config/feature_flags/development/change_response_code_merge_status.yml b/config/feature_flags/development/change_response_code_merge_status.yml
new file mode 100644
index 00000000000..7fa8260fe78
--- /dev/null
+++ b/config/feature_flags/development/change_response_code_merge_status.yml
@@ -0,0 +1,8 @@
+---
+name: change_response_code_merge_status
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/82465/
+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/356930
+milestone: '14.10'
+type: development
+group: group::code review
+default_enabled: false
diff --git a/config/feature_flags/development/ci_job_artifacts_backlog_large_loop_limit.yml b/config/feature_flags/development/ci_job_artifacts_backlog_large_loop_limit.yml
new file mode 100644
index 00000000000..1415d9e0db7
--- /dev/null
+++ b/config/feature_flags/development/ci_job_artifacts_backlog_large_loop_limit.yml
@@ -0,0 +1,8 @@
+---
+name: ci_job_artifacts_backlog_large_loop_limit
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/76509
+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/347151
+milestone: '14.10'
+type: development
+group: group::pipeline execution
+default_enabled: false
diff --git a/config/feature_flags/development/ci_job_artifacts_backlog_work.yml b/config/feature_flags/development/ci_job_artifacts_backlog_work.yml
new file mode 100644
index 00000000000..a97982d2043
--- /dev/null
+++ b/config/feature_flags/development/ci_job_artifacts_backlog_work.yml
@@ -0,0 +1,8 @@
+---
+name: ci_job_artifacts_backlog_work
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/76509
+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/347144
+milestone: '14.10'
+type: development
+group: group::pipeline execution
+default_enabled: false
diff --git a/config/feature_flags/development/distribute_github_parallel_import.yml b/config/feature_flags/development/distribute_github_parallel_import.yml
new file mode 100644
index 00000000000..e7f9ac78afb
--- /dev/null
+++ b/config/feature_flags/development/distribute_github_parallel_import.yml
@@ -0,0 +1,8 @@
+---
+name: distribute_github_parallel_import
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/83616
+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/356800
+milestone: '14.10'
+type: development
+group: group::source code
+default_enabled: false
diff --git a/config/feature_flags/development/users_expanding_widgets_usage_data.yml b/config/feature_flags/development/users_expanding_widgets_usage_data.yml
deleted file mode 100644
index 3b68aff5d71..00000000000
--- a/config/feature_flags/development/users_expanding_widgets_usage_data.yml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-name: users_expanding_widgets_usage_data
-introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/57133
-rollout_issue_url:
-milestone: '13.11'
-type: development
-group: group::code review
-default_enabled: true
diff --git a/config/initializers/1_settings.rb b/config/initializers/1_settings.rb
index 20983134889..7b0a23bed87 100644
--- a/config/initializers/1_settings.rb
+++ b/config/initializers/1_settings.rb
@@ -446,6 +446,9 @@ Settings.cron_jobs['pipeline_schedule_worker']['job_class'] = 'PipelineScheduleW
Settings.cron_jobs['expire_build_artifacts_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['expire_build_artifacts_worker']['cron'] ||= '*/7 * * * *'
Settings.cron_jobs['expire_build_artifacts_worker']['job_class'] = 'ExpireBuildArtifactsWorker'
+Settings.cron_jobs['update_locked_unknown_artifacts_worker'] ||= Settingslogic.new({})
+Settings.cron_jobs['update_locked_unknown_artifacts_worker']['cron'] ||= '*/7 * * * *'
+Settings.cron_jobs['update_locked_unknown_artifacts_worker']['job_class'] = 'Ci::UpdateLockedUnknownArtifactsWorker'
Settings.cron_jobs['ci_pipelines_expire_artifacts_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['ci_pipelines_expire_artifacts_worker']['cron'] ||= '*/23 * * * *'
Settings.cron_jobs['ci_pipelines_expire_artifacts_worker']['job_class'] = 'Ci::PipelineArtifacts::ExpireArtifactsWorker'
diff --git a/config/metrics/counts_28d/20210216175055_merge_requests.yml b/config/metrics/counts_28d/20210216175055_merge_requests.yml
index 06c46964954..c57b9e281cf 100644
--- a/config/metrics/counts_28d/20210216175055_merge_requests.yml
+++ b/config/metrics/counts_28d/20210216175055_merge_requests.yml
@@ -7,7 +7,8 @@ product_stage: create
product_group: group::code review
product_category: code_review
value_type: number
-status: active
+status: removed
+milestone_removed: '14.10'
time_frame: 28d
data_source: database
distribution:
diff --git a/config/metrics/counts_all/20210216175045_merge_requests.yml b/config/metrics/counts_all/20210216175045_merge_requests.yml
index 179a6258a15..2871c70abaa 100644
--- a/config/metrics/counts_all/20210216175045_merge_requests.yml
+++ b/config/metrics/counts_all/20210216175045_merge_requests.yml
@@ -7,7 +7,8 @@ product_stage: create
product_group: group::code review
product_category: code_review
value_type: number
-status: active
+status: removed
+milestone_removed: '14.10'
time_frame: all
data_source: database
distribution:
diff --git a/doc/api/merge_requests.md b/doc/api/merge_requests.md
index 0c065c0f2f5..f7e6e2913f6 100644
--- a/doc/api/merge_requests.md
+++ b/doc/api/merge_requests.md
@@ -1629,7 +1629,7 @@ This API returns specific HTTP status codes on failure:
|:------------|--------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------|
| `401` | `Unauthorized` | This user does not have permission to accept this merge request. |
| `405` | `Method Not Allowed` | The merge request cannot be accepted because it is `Draft`, `Closed`, `Pipeline Pending Completion`, or `Failed`. `Success` is required. |
-| `406` | `Branch cannot be merged` | The branch has conflicts and cannot be merged. |
+| `406` | `Branch cannot be merged` | The merge request can not be merged. |
| `409` | `SHA does not match HEAD of source branch` | The provided `sha` parameter does not match the HEAD of the source. |
For additional important notes on response data, read [Single merge request response notes](#single-merge-request-response-notes).
diff --git a/doc/ci/jobs/index.md b/doc/ci/jobs/index.md
index 4fa251eb3cf..23e62da083c 100644
--- a/doc/ci/jobs/index.md
+++ b/doc/ci/jobs/index.md
@@ -43,6 +43,18 @@ Clicking an individual job shows you its job log, and allows you to:
- Retry the job.
- Erase the job log.
+### View all jobs in a project
+
+> - An improved view was [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/293862) in GitLab 14.10, [with a flag](../../administration/feature_flags.md) named `jobs_table_vue`. Disabled by default.
+> - The job status filter was [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/82539) in GitLab 14.10, [with a flag](../../administration/feature_flags.md) named `jobs_table_vue_search`. Disabled by default.
+
+To view the full list of jobs that ran in a project:
+
+1. On the top bar, select **Menu > Projects** and find the project.
+1. On the left sidebar, select **CI/CD > Jobs**.
+
+You can filter the list by [job status](#the-order-of-jobs-in-a-pipeline).
+
## See why a job failed
When a pipeline fails or is allowed to fail, there are several places where you
@@ -63,9 +75,9 @@ You can also see the reason it failed on the Job detail page.
The order of jobs in a pipeline depends on the type of pipeline graph.
- For [full pipeline graphs](../pipelines/index.md#view-full-pipeline-graph), jobs are sorted by name.
-- For [pipeline mini graphs](../pipelines/index.md#pipeline-mini-graphs), jobs are sorted by severity and then by name.
+- For [pipeline mini graphs](../pipelines/index.md#pipeline-mini-graphs), jobs are sorted by status, and then by name.
-The order of severity is:
+The job status order is:
- failed
- warning
diff --git a/doc/development/database/loose_foreign_keys.md b/doc/development/database/loose_foreign_keys.md
index 17a825b4812..2d3812ec584 100644
--- a/doc/development/database/loose_foreign_keys.md
+++ b/doc/development/database/loose_foreign_keys.md
@@ -95,27 +95,27 @@ Created database 'gitlabhq_test_ee'
Created database 'gitlabhq_geo_test_ee'
Showing cross-schema foreign keys (20):
- ID | HAS_LFK | FROM | TO | COLUMN | ON_DELETE
- 0 | N | ci_builds | projects | project_id | cascade
- 1 | N | ci_job_artifacts | projects | project_id | cascade
- 2 | N | ci_pipelines | projects | project_id | cascade
- 3 | Y | ci_pipelines | merge_requests | merge_request_id | cascade
- 4 | N | external_pull_requests | projects | project_id | cascade
- 5 | N | ci_sources_pipelines | projects | project_id | cascade
- 6 | N | ci_stages | projects | project_id | cascade
- 7 | N | ci_pipeline_schedules | projects | project_id | cascade
- 8 | N | ci_runner_projects | projects | project_id | cascade
- 9 | Y | dast_site_profiles_pipelines | ci_pipelines | ci_pipeline_id | cascade
- 10 | Y | vulnerability_feedback | ci_pipelines | pipeline_id | nullify
- 11 | N | ci_variables | projects | project_id | cascade
- 12 | N | ci_refs | projects | project_id | cascade
- 13 | N | ci_builds_metadata | projects | project_id | cascade
- 14 | N | ci_subscriptions_projects | projects | downstream_project_id | cascade
- 15 | N | ci_subscriptions_projects | projects | upstream_project_id | cascade
- 16 | N | ci_sources_projects | projects | source_project_id | cascade
- 17 | N | ci_job_token_project_scope_links | projects | source_project_id | cascade
- 18 | N | ci_job_token_project_scope_links | projects | target_project_id | cascade
- 19 | N | ci_project_monthly_usages | projects | project_id | cascade
+ ID | HAS_LFK | FROM | TO | COLUMN | ON_DELETE
+ 0 | N | ci_builds | projects | project_id | cascade
+ 1 | N | ci_job_artifacts | projects | project_id | cascade
+ 2 | N | ci_pipelines | projects | project_id | cascade
+ 3 | Y | ci_pipelines | merge_requests | merge_request_id | cascade
+ 4 | N | external_pull_requests | projects | project_id | cascade
+ 5 | N | ci_sources_pipelines | projects | project_id | cascade
+ 6 | N | ci_stages | projects | project_id | cascade
+ 7 | N | ci_pipeline_schedules | projects | project_id | cascade
+ 8 | N | ci_runner_projects | projects | project_id | cascade
+ 9 | Y | dast_site_profiles_pipelines | ci_pipelines | ci_pipeline_id | cascade
+ 10 | Y | vulnerability_feedback | ci_pipelines | pipeline_id | nullify
+ 11 | N | ci_variables | projects | project_id | cascade
+ 12 | N | ci_refs | projects | project_id | cascade
+ 13 | N | ci_builds_metadata | projects | project_id | cascade
+ 14 | N | ci_subscriptions_projects | projects | downstream_project_id | cascade
+ 15 | N | ci_subscriptions_projects | projects | upstream_project_id | cascade
+ 16 | N | ci_sources_projects | projects | source_project_id | cascade
+ 17 | N | ci_job_token_project_scope_links | projects | source_project_id | cascade
+ 18 | N | ci_job_token_project_scope_links | projects | target_project_id | cascade
+ 19 | N | ci_project_monthly_usages | projects | project_id | cascade
To match FK write one or many filters to match against FROM/TO/COLUMN:
- scripts/decomposition/generate-loose-foreign-key <filter(s)...>
@@ -480,3 +480,380 @@ it executes `occurrence.pipeline.created_at`.
When looping through the vulnerability occurrences in the Sidekiq worker, we
could try to load the corresponding pipeline and choose to skip processing that
occurrence if pipeline is not found.
+
+## Architecture
+
+The loose foreign keys feature is implemented within the `LooseForeignKeys` Ruby namespace. The
+code is isolated from the core application code and theoretically, it could be a standalone library.
+
+The feature is invoked solely in the [`LooseForeignKeys::CleanupWorker`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/app/workers/loose_foreign_keys/cleanup_worker.rb) worker class. The worker is scheduled via a
+cron job where the schedule depends on the configuration of the GitLab instance.
+
+- Non-decomposed GitLab (1 database): invoked every minute.
+- Decomposed GitLab (2 databases, CI and Main): invoked every minute, cleaning up one database
+at a time. For example, the cleanup worker for the main database runs every two minutes.
+
+To avoid lock contention and the processing of the same database rows, the worker does not run
+parallel. This behavior is ensured with a Redis lock.
+
+**Record cleanup procedure:**
+
+1. Acquire the Redis lock.
+1. Determine which database to clean up.
+1. Collect all database tables where the deletions are tracked (parent tables).
+ - This is achieved by reading the `config/gitlab_loose_foreign_keys.yml` file.
+ - A table is considered "tracked" when a loose foreign key definition exists for the table and
+ the `DELETE` trigger is installed.
+1. Cycle through the tables with an infinite loop.
+1. For each table, load a batch of deleted parent records to clean up.
+1. Depending on the YAML configuration, build `DELETE` or `UPDATE` (nullify) queries for the
+referenced child tables.
+1. Invoke the queries.
+1. Repeat until all child records are cleaned up or the maximum limit is reached.
+1. Remove the deleted parent records when all child records are cleaned up.
+
+### Database structure
+
+The feature relies on triggers installed on the parent tables. When a parent record is deleted,
+the trigger will automatically insert a new record into the `loose_foreign_keys_deleted_records`
+database table.
+
+The inserted record will store the following information about the deleted record:
+
+- `fully_qualified_table_name`: name of the database table where the record was located.
+- `primary_key_value`: the ID of the record, the value will be present in the child tables as
+the foreign key value. At the moment, composite primary keys are not supported, the parent table
+must have an `id` column.
+- `status`: defaults to pending, represents the status of the cleanup process.
+- `consume_after`: defaults to the current time.
+- `cleanup_attempts`: defaults to 0. The number of times the worker tried to clean up this record.
+A non-zero number would mean that this record has many child records and cleaning it up requires
+several runs.
+
+#### Database decomposition
+
+The `loose_foreign_keys_deleted_records` table will exist on both database servers (Ci and Main)
+after the [database decomposition](https://gitlab.com/groups/gitlab-org/-/epics/6168). The worker
+ill determine which parent tables belong to which database by reading the
+`lib/gitlab/database/gitlab_schemas.yml` YAML file.
+
+Example:
+
+- Main database tables
+ - `projects`
+ - `namespaces`
+ - `merge_requests`
+- Ci database tables
+ - `ci_builds`
+ - `ci_pipelines`
+
+When the worker is invoked for the Ci database, the worker will load deleted records only from the
+`ci_builds` and `ci_pipelines` tables. During the cleanup process, `DELETE` and `UPDATE` queries
+will mostly run on tables located in the Main database. In this example, one `UPDATE` query will
+nullify the `merge_requests.head_pipeline_id` column.
+
+#### Database partitioning
+
+Due to the large volume of inserts the database table receives daily, a special partitioning
+strategy was implemented to address data bloat concerns. Originally, the
+[time-decay](https://about.gitlab.com/company/team/structure/working-groups/database-scalability/time-decay.html)
+strategy was considered for the feature but due to the large data volume we decided to implement a
+new strategy.
+
+A deleted record is considered fully processed when all its direct children records have been
+cleaned up. When this happens, the loose foreign key worker will update the `status` column of
+the deleted record. After this step, the record is no longer needed.
+
+The sliding partitioning strategy provides an efficient way of cleaning up old, unused data by
+adding a new database partition and removing the old one when certain conditions are met.
+The `loose_foreign_keys_deleted_records` database table is list partitioned where most of the
+time there is only one partition attached to the table.
+
+```sql
+ Partitioned table "public.loose_foreign_keys_deleted_records"
+ Column | Type | Collation | Nullable | Default | Storage | Stats target | Description
+----------------------------+--------------------------+-----------+----------+----------------------------------------------------------------+----------+--------------+-------------
+ id | bigint | | not null | nextval('loose_foreign_keys_deleted_records_id_seq'::regclass) | plain | |
+ partition | bigint | | not null | 84 | plain | |
+ primary_key_value | bigint | | not null | | plain | |
+ status | smallint | | not null | 1 | plain | |
+ created_at | timestamp with time zone | | not null | now() | plain | |
+ fully_qualified_table_name | text | | not null | | extended | |
+ consume_after | timestamp with time zone | | | now() | plain | |
+ cleanup_attempts | smallint | | | 0 | plain | |
+Partition key: LIST (partition)
+Indexes:
+ "loose_foreign_keys_deleted_records_pkey" PRIMARY KEY, btree (partition, id)
+ "index_loose_foreign_keys_deleted_records_for_partitioned_query" btree (partition, fully_qualified_table_name, consume_after, id) WHERE status = 1
+Check constraints:
+ "check_1a541f3235" CHECK (char_length(fully_qualified_table_name) <= 150)
+Partitions: gitlab_partitions_dynamic.loose_foreign_keys_deleted_records_84 FOR VALUES IN ('84')
+```
+
+The `partition` column controls the insert direction, the `partition` value determines which
+partition will get the deleted rows inserted via the trigger. Notice that the default value of
+the `partition` table matches with the value of the list partition (84). In `INSERT` query
+within the trigger thevalue of the `partition` is omitted, the trigger always relies on the
+default value of the column.
+
+Example `INSERT` query for the trigger:
+
+```sql
+INSERT INTO loose_foreign_keys_deleted_records
+(fully_qualified_table_name, primary_key_value)
+SELECT TG_TABLE_SCHEMA || '.' || TG_TABLE_NAME, old_table.id FROM old_table;
+```
+
+The partition "sliding" process is controlled by two, regularly executed callbacks. These
+callbackes are defined within the `LooseForeignKeys::DeletedRecord` model.
+
+The `next_partition_if` callback controls when to create a new partition. A new partition will
+be created when the current partition has at least one record older than 24 hours. A new partition
+is added by the [`PartitionManager`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/database/partitioning/partition_manager.rb)
+using the following steps:
+
+1. Create a new partition, where the `VALUE` for the partition is `CURRENT_PARTITION + 1`.
+1. Update the default value of the `partition` column to `CURRENT_PARTITION + 1`.
+
+With these steps, new `INSERT`-s via the triggers will end up in the new partition. At this point,
+the database table has two partitions.
+
+The `detach_partition_if` callback determines if the old partitions can be detached from the table.
+A partition is detachable if there are no pending (unprocessed) records in the partition
+(`status = 1`). The detached partitions will be available for some time, you can see the list
+detached partitions in the `detached_partitions` table:
+
+```sql
+select * from detached_partitions;
+```
+
+#### Cleanup queries
+
+The `LooseForeignKeys::CleanupWorker` has its database query builder which depends on `Arel`.
+The feature doesn't reference any application-specific `ActiveRecord` models to avoid unexpected
+side effects. The database queries are batched, which means that several parent records are being
+cleaned up at the same time.
+
+Example `DELETE` query:
+
+```sql
+DELETE
+FROM "merge_request_metrics"
+WHERE ("merge_request_metrics"."id") IN
+ (SELECT "merge_request_metrics"."id"
+ FROM "merge_request_metrics"
+ WHERE "merge_request_metrics"."pipeline_id" IN (1, 2, 10, 20)
+ LIMIT 1000 FOR UPDATE SKIP LOCKED)
+```
+
+The primary key values of the parent records are 1, 2, 10, and 20.
+
+Example `UPDATE` (nullify) query:
+
+```sql
+UPDATE "merge_requests"
+SET "head_pipeline_id" = NULL
+WHERE ("merge_requests"."id") IN
+ (SELECT "merge_requests"."id"
+ FROM "merge_requests"
+ WHERE "merge_requests"."head_pipeline_id" IN (3, 4, 30, 40)
+ LIMIT 500 FOR UPDATE SKIP LOCKED)
+```
+
+These queries are batched, which means that in many cases, several invocations are needed to clean
+up all associated child records.
+
+The batching is implemented with loops, the processing will stop when all associated child records
+are cleaned up or the limit is reached.
+
+```ruby
+loop do
+ modification_count = process_batch_with_skip_locked
+
+ break if modification_count == 0 || over_limit?
+end
+
+loop do
+ modification_count = process_batch
+
+ break if modification_count == 0 || over_limit?
+end
+```
+
+The loop-based batch processing is preferred over `EachBatch` for the following reasons:
+
+- The records in the batch are modified, so the next batch will contain different records.
+- There is always an index on the foreign key column however, the column is usually not unique.
+`EachBatch` requires a unique column for the iteration.
+- The record order doesn't matter for the cleanup.
+
+Notice that we have two loops. The initial loop will process records with the `SKIP LOCKED` clause.
+The query will skip rows that are locked by other application processes. This will ensure that the
+cleanup worker will less likely to become blocked. The second loop will execute the database
+queries without `SKIP LOCKED` to ensure that all records have been processed.
+
+#### Processing limits
+
+A constant, large volume of record updates or deletions can cause incidents and affect the
+availability of GitLab:
+
+- Increased table bloat.
+- Increased number of pending WAL files.
+- Busy tables, difficulty when acquiring locks.
+
+To mitigate these issues, several limits are applied when the worker runs.
+
+- Each query has `LIMIT`, a query cannot process an unbounded number of rows.
+- The maximum number of record deletions and record updates is limited.
+- The maximum runtime (30 seconds) for the database queries is limited.
+
+The limit rules are implemented in the `LooseForeignKeys::ModificationTracker` class. When one of
+the limits (record modification count, time limit) is reached the processing is stopped
+immediately. After some time, the next scheduled worker will continue the cleanup process.
+
+#### Performance characteristics
+
+The database trigger on the parent tables will **decrease** the record deletion speed. Each
+statement that removes rows from the parent table will invoke the trigger to insert records
+into the `loose_foreign_keys_deleted_records` table.
+
+The queries within the cleanup worker are fairly efficient index scans, with limits in place
+they're unlikely to affect other parts of the application.
+
+The database queries are not running in transaction, when an error happens for example a statement
+timeout or a worker crash, the next job will continue the processing.
+
+## Troubleshooting
+
+### Accumulation of deleted records
+
+There can be cases where the workers need to process an unusually large amount of data. This can
+happen under normal usage, for example when a large project or group is deleted. In this scenario,
+there can be several million rows to be deleted or nullified. Due to the limits enforced by the
+worker, processing this data will take some time.
+
+When cleaning up "heavy-hitters", the feature ensures fair processing by rescheduling larger
+batches for later. This gives time for other deleted records to be processed.
+
+For example, a project with millions of `ci_builds` records is deleted. The `ci_builds` records
+will be deleted by the loose foreign keys feature.
+
+1. The cleanup worker is scheduled and picks up a batch of deleted `projects` records. The large
+project is part of the batch.
+1. Deletion of the orphaned `ci_builds` rows has started.
+1. The time limit is reached, but the cleanup is not complete.
+1. The `cleanup_attempts` column is incremented for the deleted records.
+1. Go to step 1. The next cleanup worker continues the cleanup.
+1. When the `cleanup_attempts` reaches 3, the batch is re-scheduled 10 minutes later by updating
+the `consume_after` column.
+1. The next cleanup worker will process a different batch.
+
+We have Prometheus metrics in place to monitor the deleted record cleanup:
+
+- `loose_foreign_key_processed_deleted_records`: Number of processed deleted records. When large
+cleanup happens, this number would decrease.
+- `loose_foreign_key_incremented_deleted_records`: Number of deleted records which were not
+finished processing. The `cleanup_attempts` column was incremented.
+- `loose_foreign_key_rescheduled_deleted_records`: Number of deleted records that had to be
+rescheduled at a later time after 3 cleanup attempts.
+
+Example Thanos query:
+
+```plaintext
+loose_foreign_key_rescheduled_deleted_records{env="gprd", table="ci_runners"}
+```
+
+Another way to look at the situation is by running a database query. This query gives the exact
+counts of the unprocessed records:
+
+```sql
+SELECT partition, fully_qualified_table_name, count(*)
+FROM loose_foreign_keys_deleted_records
+WHERE
+status = 1
+GROUP BY 1, 2;
+```
+
+Example output:
+
+```sql
+ partition | fully_qualified_table_name | count
+-----------+----------------------------+-------
+ 87 | public.ci_builds | 874
+ 87 | public.ci_job_artifacts | 6658
+ 87 | public.ci_pipelines | 102
+ 87 | public.ci_runners | 111
+ 87 | public.merge_requests | 255
+ 87 | public.namespaces | 25
+ 87 | public.projects | 6
+```
+
+The query includes the partition number which can be useful to detect if the cleanup process is
+significantly lagging behind. When multiple different partition values are present in the list
+that means the cleanup of some deleted records didn't finish in several days (1 new partition
+is added every day).
+
+Steps to diagnose the problem:
+
+- Check which records are accumulating.
+- Try to get an estimate of the number of remaining records.
+- Looking into the worker performance stats (Kibana or Thanos).
+
+Possible solutions:
+
+- Short-term: increase the batch sizes.
+- Long-term: invoke the worker more frequently. Parallelize the worker
+
+For a one-time fix, we can run the cleanup worker several times from the rails console. The worker
+can run parallelly however, this can introduce lock contention and it could increase the worker
+runtime.
+
+```ruby
+LooseForeignKeys::CleanupWorker.new.perform
+```
+
+When the cleanup is done, the older partitions will be automatically detached by the
+`PartitionManager`.
+
+### PartitionManager bug
+
+NOTE:
+This issue happened in the past on Staging and it has been mitigated.
+
+When adding a new partition, the default value of the `partition` column is also updated. This is
+a schema change that is executed in the same transaction as the new partition creation. It's highly
+unlikely that the `partition` column goes outdated.
+
+However, if this happens then this can cause application-wide incidents because the `partition`
+value points to a partition that doesn't exist. Symptom: deletion of records from tables where the
+`DELETE` trigger is installed fails.
+
+```sql
+\d+ loose_foreign_keys_deleted_records;
+
+ Column | Type | Collation | Nullable | Default | Storage | Stats target | Description
+----------------------------+--------------------------+-----------+----------+----------------------------------------------------------------+----------+--------------+-------------
+ id | bigint | | not null | nextval('loose_foreign_keys_deleted_records_id_seq'::regclass) | plain | |
+ partition | bigint | | not null | 4 | plain | |
+ primary_key_value | bigint | | not null | | plain | |
+ status | smallint | | not null | 1 | plain | |
+ created_at | timestamp with time zone | | not null | now() | plain | |
+ fully_qualified_table_name | text | | not null | | extended | |
+ consume_after | timestamp with time zone | | | now() | plain | |
+ cleanup_attempts | smallint | | | 0 | plain | |
+Partition key: LIST (partition)
+Indexes:
+ "loose_foreign_keys_deleted_records_pkey" PRIMARY KEY, btree (partition, id)
+ "index_loose_foreign_keys_deleted_records_for_partitioned_query" btree (partition, fully_qualified_table_name, consume_after, id) WHERE status = 1
+Check constraints:
+ "check_1a541f3235" CHECK (char_length(fully_qualified_table_name) <= 150)
+Partitions: gitlab_partitions_dynamic.loose_foreign_keys_deleted_records_3 FOR VALUES IN ('3')
+```
+
+Check the default value of the `partition` column and compare it with the available partitions
+(4 vs 3). The partition with the value of 4 does not exist. To mitigate the problem an emergency
+schema change is required:
+
+```sql
+ALTER TABLE loose_foreign_keys_deleted_records ALTER COLUMN partition SET DEFAULT 3;
+```
diff --git a/lib/api/merge_requests.rb b/lib/api/merge_requests.rb
index de9a2a198d9..202f8ac838f 100644
--- a/lib/api/merge_requests.rb
+++ b/lib/api/merge_requests.rb
@@ -458,7 +458,11 @@ module API
not_allowed! if !immediately_mergeable && !automatically_mergeable
- render_api_error!('Branch cannot be merged', 406) unless merge_request.mergeable?(skip_ci_check: automatically_mergeable)
+ if Feature.enabled?(:change_response_code_merge_status, user_project, default_enabled: :yaml)
+ render_api_error!('Branch cannot be merged', 422) unless merge_request.mergeable?(skip_ci_check: automatically_mergeable)
+ else
+ render_api_error!('Branch cannot be merged', 406) unless merge_request.mergeable?(skip_ci_check: automatically_mergeable)
+ end
check_sha_param!(params, merge_request)
diff --git a/lib/gitlab/github_import/parallel_scheduling.rb b/lib/gitlab/github_import/parallel_scheduling.rb
index 586bfc8cf98..97de2a49e72 100644
--- a/lib/gitlab/github_import/parallel_scheduling.rb
+++ b/lib/gitlab/github_import/parallel_scheduling.rb
@@ -209,7 +209,11 @@ module Gitlab
# Default batch settings for parallel import (can be redefined in Importer classes)
# Example: { size: 100, delay: 1.minute }
def parallel_import_batch
- {}
+ if Feature.enabled?(:distribute_github_parallel_import, default_enabled: :yaml)
+ { size: 1000, delay: 1.minute }
+ else
+ {}
+ end
end
def abort_on_failure
diff --git a/lib/gitlab/usage_data.rb b/lib/gitlab/usage_data.rb
index a3afbea89c8..b465d4bcc9b 100644
--- a/lib/gitlab/usage_data.rb
+++ b/lib/gitlab/usage_data.rb
@@ -507,7 +507,6 @@ module Gitlab
{
deploy_keys: distinct_count(::DeployKey.where(time_period), :user_id),
keys: distinct_count(::Key.regular_keys.where(time_period), :user_id),
- merge_requests: distinct_count(::MergeRequest.where(time_period), :author_id),
projects_with_disable_overriding_approvers_per_merge_request: count(::Project.where(time_period.merge(disable_overriding_approvers_per_merge_request: true))),
projects_without_disable_overriding_approvers_per_merge_request: count(::Project.where(time_period.merge(disable_overriding_approvers_per_merge_request: [false, nil]))),
remote_mirrors: distinct_count(::Project.with_remote_mirrors.where(time_period), :creator_id),
diff --git a/lib/gitlab/usage_data_counters/known_events/common.yml b/lib/gitlab/usage_data_counters/known_events/common.yml
index 685362bea5a..0d89a5181ec 100644
--- a/lib/gitlab/usage_data_counters/known_events/common.yml
+++ b/lib/gitlab/usage_data_counters/known_events/common.yml
@@ -339,22 +339,18 @@
redis_slot: secure
category: secure
aggregation: weekly
- feature_flag: users_expanding_widgets_usage_data
- name: users_expanding_testing_code_quality_report
redis_slot: testing
category: testing
aggregation: weekly
- feature_flag: users_expanding_widgets_usage_data
- name: users_expanding_testing_accessibility_report
redis_slot: testing
category: testing
aggregation: weekly
- feature_flag: users_expanding_widgets_usage_data
- name: users_expanding_testing_license_compliance_report
redis_slot: testing
category: testing
aggregation: weekly
- feature_flag: users_expanding_widgets_usage_data
- name: users_visiting_testing_license_compliance_full_report
redis_slot: testing
category: testing
diff --git a/lib/system_check/base_check.rb b/lib/system_check/base_check.rb
index c36cacbaf4f..ae3a9412e5c 100644
--- a/lib/system_check/base_check.rb
+++ b/lib/system_check/base_check.rb
@@ -64,20 +64,14 @@ module SystemCheck
call_or_return(@skip_reason) || 'skipped'
end
- # Define a reason why we skipped the SystemCheck (during runtime)
+ # Define or get a reason why we skipped the SystemCheck (during runtime)
#
# This is used when you need dynamic evaluation like when you have
# multiple reasons why a check can fail
#
# @param [String] reason to be displayed
- attr_writer :skip_reason
-
- # Skip reason defined during runtime
- #
- # This value have precedence over the one defined in the subclass
- #
- # @return [String] the reason
- attr_reader :skip_reason
+ # @return [String] reason to be displayed
+ attr_accessor :skip_reason
# Does the check support automatically repair routine?
#
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index 474d3c82e30..632169b3289 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -7621,6 +7621,11 @@ msgstr ""
msgid "Clear"
msgstr ""
+msgid "Clear %{count} image from cache?"
+msgid_plural "Clear %{count} images from cache?"
+msgstr[0] ""
+msgstr[1] ""
+
msgid "Clear all repository checks"
msgstr ""
@@ -12155,9 +12160,15 @@ msgstr ""
msgid "Dependency list"
msgstr ""
+msgid "DependencyProxy|All items in the cache are scheduled for removal."
+msgstr ""
+
msgid "DependencyProxy|Cached %{time}"
msgstr ""
+msgid "DependencyProxy|Clear cache"
+msgstr ""
+
msgid "DependencyProxy|Clear the Dependency Proxy cache automatically"
msgstr ""
@@ -20369,9 +20380,6 @@ msgstr ""
msgid "Integrations|You can close this window."
msgstr ""
-msgid "Integrations|You can now close this window and return to the GitLab for Jira application."
-msgstr ""
-
msgid "Integrations|You can use this alias in your Slack commands"
msgstr ""
@@ -21506,6 +21514,9 @@ msgstr ""
msgid "JiraService|Work on Jira issues without leaving GitLab. Add a Jira menu to access a read-only list of your Jira issues. %{jira_issues_link_start}Learn more.%{link_end}"
msgstr ""
+msgid "JiraService|You can now close this window and%{br}return to the GitLab for Jira application."
+msgstr ""
+
msgid "JiraService|You must configure Jira before enabling this integration. %{jira_doc_link_start}Learn more.%{link_end}"
msgstr ""
@@ -37990,6 +38001,9 @@ msgstr ""
msgid "There was a problem fetching the keep latest artifacts setting."
msgstr ""
+msgid "There was a problem fetching the latest pipeline status."
+msgstr ""
+
msgid "There was a problem fetching the pipeline stages."
msgstr ""
@@ -42724,6 +42738,11 @@ msgstr ""
msgid "You are about to add %{usersTag} people to the discussion. They will all receive a notification."
msgstr ""
+msgid "You are about to clear %{count} image from the cache. Once you confirm, the next time a pipeline runs it must pull an image or tag from Docker Hub. Are you sure?"
+msgid_plural "You are about to clear %{count} images from the cache. Once you confirm, the next time a pipeline runs it must pull an image or tag from Docker Hub. Are you sure?"
+msgstr[0] ""
+msgstr[1] ""
+
msgid "You are about to delete this forked project containing:"
msgstr ""
@@ -42820,7 +42839,7 @@ msgstr ""
msgid "You are receiving this message because you are a GitLab administrator for %{url}."
msgstr ""
-msgid "You are signed in to GitLab as %{user_link}"
+msgid "You are signed in to GitLab as:"
msgstr ""
msgid "You are trying to upload something other than an image. Please upload a .png, .jpg, .jpeg, .gif, .bmp, .tiff or .ico."
diff --git a/package.json b/package.json
index a8c796cf926..382511cb69d 100644
--- a/package.json
+++ b/package.json
@@ -256,7 +256,7 @@
"stylelint": "^14.3.0",
"timezone-mock": "^1.0.8",
"vue-jest": "4.0.1",
- "webpack-dev-server": "4.8.0",
+ "webpack-dev-server": "4.8.1",
"xhr-mock": "^2.5.1",
"yarn-check-webpack-plugin": "^1.2.0",
"yarn-deduplicate": "^4.0.0"
diff --git a/spec/features/projects/import_export/import_file_spec.rb b/spec/features/projects/import_export/import_file_spec.rb
index 1e5c5d33ad9..c7fbaa85483 100644
--- a/spec/features/projects/import_export/import_file_spec.rb
+++ b/spec/features/projects/import_export/import_file_spec.rb
@@ -24,9 +24,9 @@ RSpec.describe 'Import/Export - project import integration test', :js do
context 'when selecting the namespace' do
let(:user) { create(:admin) }
let!(:namespace) { user.namespace }
- let(:randomHex) { SecureRandom.hex }
- let(:project_name) { 'Test Project Name' + randomHex }
- let(:project_path) { 'test-project-name' + randomHex }
+ let(:random_hex) { SecureRandom.hex }
+ let(:project_name) { 'Test Project Name' + random_hex }
+ let(:project_path) { 'test-project-name' + random_hex }
it 'user imports an exported project successfully', :sidekiq_might_not_need_inline do
visit new_project_path
diff --git a/spec/features/task_lists_spec.rb b/spec/features/task_lists_spec.rb
index 0f8daaf8e15..6907701de9c 100644
--- a/spec/features/task_lists_spec.rb
+++ b/spec/features/task_lists_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe 'Task Lists', :js do
MARKDOWN
end
- let(:singleIncompleteMarkdown) do
+ let(:single_incomplete_markdown) do
<<-MARKDOWN.strip_heredoc
This is a task list:
@@ -30,7 +30,7 @@ RSpec.describe 'Task Lists', :js do
MARKDOWN
end
- let(:singleCompleteMarkdown) do
+ let(:single_complete_markdown) do
<<-MARKDOWN.strip_heredoc
This is a task list:
@@ -94,7 +94,7 @@ RSpec.describe 'Task Lists', :js do
end
describe 'single incomplete task' do
- let!(:issue) { create(:issue, description: singleIncompleteMarkdown, author: user, project: project) }
+ let!(:issue) { create(:issue, description: single_incomplete_markdown, author: user, project: project) }
it 'renders' do
visit_issue(project, issue)
@@ -113,7 +113,7 @@ RSpec.describe 'Task Lists', :js do
end
describe 'single complete task' do
- let!(:issue) { create(:issue, description: singleCompleteMarkdown, author: user, project: project) }
+ let!(:issue) { create(:issue, description: single_complete_markdown, author: user, project: project) }
it 'renders' do
visit_issue(project, issue)
@@ -171,7 +171,7 @@ RSpec.describe 'Task Lists', :js do
describe 'single incomplete task' do
let!(:note) do
- create(:note, note: singleIncompleteMarkdown, noteable: issue,
+ create(:note, note: single_incomplete_markdown, noteable: issue,
project: project, author: user)
end
@@ -186,7 +186,7 @@ RSpec.describe 'Task Lists', :js do
describe 'single complete task' do
let!(:note) do
- create(:note, note: singleCompleteMarkdown, noteable: issue,
+ create(:note, note: single_complete_markdown, noteable: issue,
project: project, author: user)
end
@@ -264,7 +264,7 @@ RSpec.describe 'Task Lists', :js do
end
describe 'single incomplete task' do
- let!(:merge) { create(:merge_request, :simple, description: singleIncompleteMarkdown, author: user, source_project: project) }
+ let!(:merge) { create(:merge_request, :simple, description: single_incomplete_markdown, author: user, source_project: project) }
it 'renders for description' do
visit_merge_request(project, merge)
@@ -283,7 +283,7 @@ RSpec.describe 'Task Lists', :js do
end
describe 'single complete task' do
- let!(:merge) { create(:merge_request, :simple, description: singleCompleteMarkdown, author: user, source_project: project) }
+ let!(:merge) { create(:merge_request, :simple, description: single_complete_markdown, author: user, source_project: project) }
it 'renders for description' do
visit_merge_request(project, merge)
diff --git a/spec/frontend/api_spec.js b/spec/frontend/api_spec.js
index 91de109441a..0dceb92db94 100644
--- a/spec/frontend/api_spec.js
+++ b/spec/frontend/api_spec.js
@@ -1671,6 +1671,18 @@ describe('Api', () => {
});
});
+ describe('dependency proxy cache', () => {
+ it('schedules the cache list for deletion', async () => {
+ const groupId = 1;
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${groupId}/dependency_proxy/cache`;
+
+ mock.onDelete(expectedUrl).reply(httpStatus.ACCEPTED);
+ const { status } = await Api.deleteDependencyProxyCacheList(groupId, {});
+
+ expect(status).toBe(httpStatus.ACCEPTED);
+ });
+ });
+
describe('Feature Flag User List', () => {
let expectedUrl;
let projectId;
diff --git a/spec/frontend/commit/components/commit_box_pipeline_status_spec.js b/spec/frontend/commit/components/commit_box_pipeline_status_spec.js
new file mode 100644
index 00000000000..db7b7b45397
--- /dev/null
+++ b/spec/frontend/commit/components/commit_box_pipeline_status_spec.js
@@ -0,0 +1,150 @@
+import { GlLoadingIcon, GlLink } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import createFlash from '~/flash';
+import CiIcon from '~/vue_shared/components/ci_icon.vue';
+import CommitBoxPipelineStatus from '~/projects/commit_box/info/components/commit_box_pipeline_status.vue';
+import {
+ COMMIT_BOX_POLL_INTERVAL,
+ PIPELINE_STATUS_FETCH_ERROR,
+} from '~/projects/commit_box/info/constants';
+import getLatestPipelineStatusQuery from '~/projects/commit_box/info/graphql/queries/get_latest_pipeline_status.query.graphql';
+import * as graphQlUtils from '~/pipelines/components/graph/utils';
+import { mockPipelineStatusResponse } from '../mock_data';
+
+const mockProvide = {
+ fullPath: 'root/ci-project',
+ iid: '46',
+ graphqlResourceEtag: '/api/graphql:pipelines/id/320',
+};
+
+Vue.use(VueApollo);
+
+jest.mock('~/flash');
+
+describe('Commit box pipeline status', () => {
+ let wrapper;
+
+ const statusSuccessHandler = jest.fn().mockResolvedValue(mockPipelineStatusResponse);
+ const failedHandler = jest.fn().mockRejectedValue(new Error('GraphQL error'));
+
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findStatusIcon = () => wrapper.findComponent(CiIcon);
+ const findPipelineLink = () => wrapper.findComponent(GlLink);
+
+ const advanceToNextFetch = () => {
+ jest.advanceTimersByTime(COMMIT_BOX_POLL_INTERVAL);
+ };
+
+ const createMockApolloProvider = (handler) => {
+ const requestHandlers = [[getLatestPipelineStatusQuery, handler]];
+
+ return createMockApollo(requestHandlers);
+ };
+
+ const createComponent = (handler = statusSuccessHandler) => {
+ wrapper = shallowMount(CommitBoxPipelineStatus, {
+ provide: {
+ ...mockProvide,
+ },
+ apolloProvider: createMockApolloProvider(handler),
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('loading state', () => {
+ it('should display loading state when loading', () => {
+ createComponent();
+
+ expect(findLoadingIcon().exists()).toBe(true);
+ expect(findStatusIcon().exists()).toBe(false);
+ });
+ });
+
+ describe('loaded state', () => {
+ beforeEach(async () => {
+ createComponent();
+
+ await waitForPromises();
+ });
+
+ it('should display pipeline status after the query is resolved successfully', async () => {
+ expect(findStatusIcon().exists()).toBe(true);
+
+ expect(findLoadingIcon().exists()).toBe(false);
+ expect(createFlash).toHaveBeenCalledTimes(0);
+ });
+
+ it('should link to the latest pipeline', () => {
+ const {
+ data: {
+ project: {
+ pipeline: {
+ detailedStatus: { detailsPath },
+ },
+ },
+ },
+ } = mockPipelineStatusResponse;
+
+ expect(findPipelineLink().attributes('href')).toBe(detailsPath);
+ });
+ });
+
+ describe('error state', () => {
+ it('createFlash should show if there is an error fetching the pipeline status', async () => {
+ createComponent(failedHandler);
+
+ await waitForPromises();
+
+ expect(createFlash).toHaveBeenCalledWith({
+ message: PIPELINE_STATUS_FETCH_ERROR,
+ });
+ });
+ });
+
+ describe('polling', () => {
+ it('polling interval is set for pipeline stages', () => {
+ createComponent();
+
+ const expectedInterval = wrapper.vm.$apollo.queries.pipelineStatus.options.pollInterval;
+
+ expect(expectedInterval).toBe(COMMIT_BOX_POLL_INTERVAL);
+ });
+
+ it('polls for pipeline status', async () => {
+ createComponent();
+
+ await waitForPromises();
+
+ expect(statusSuccessHandler).toHaveBeenCalledTimes(1);
+
+ advanceToNextFetch();
+ await waitForPromises();
+
+ expect(statusSuccessHandler).toHaveBeenCalledTimes(2);
+
+ advanceToNextFetch();
+ await waitForPromises();
+
+ expect(statusSuccessHandler).toHaveBeenCalledTimes(3);
+ });
+
+ it('toggles pipelineStatus polling with visibility check', async () => {
+ jest.spyOn(graphQlUtils, 'toggleQueryPollingByVisibility');
+
+ createComponent();
+
+ await waitForPromises();
+
+ expect(graphQlUtils.toggleQueryPollingByVisibility).toHaveBeenCalledWith(
+ wrapper.vm.$apollo.queries.pipelineStatus,
+ );
+ });
+ });
+});
diff --git a/spec/frontend/commit/mock_data.js b/spec/frontend/commit/mock_data.js
index 79631de974d..8db162c07c2 100644
--- a/spec/frontend/commit/mock_data.js
+++ b/spec/frontend/commit/mock_data.js
@@ -141,3 +141,23 @@ export const mockPipelineStagesQueryResponse = {
},
},
};
+
+export const mockPipelineStatusResponse = {
+ data: {
+ project: {
+ id: 'gid://gitlab/Project/20',
+ pipeline: {
+ id: 'gid://gitlab/Ci::Pipeline/320',
+ detailedStatus: {
+ id: 'pending-320-320',
+ detailsPath: '/root/ci-project/-/pipelines/320',
+ icon: 'status_pending',
+ group: 'pending',
+ __typename: 'DetailedStatus',
+ },
+ __typename: 'Pipeline',
+ },
+ __typename: 'Project',
+ },
+ },
+};
diff --git a/spec/frontend/packages_and_registries/dependency_proxy/app_spec.js b/spec/frontend/packages_and_registries/dependency_proxy/app_spec.js
index 79894e25889..4e6870be5ff 100644
--- a/spec/frontend/packages_and_registries/dependency_proxy/app_spec.js
+++ b/spec/frontend/packages_and_registries/dependency_proxy/app_spec.js
@@ -1,19 +1,26 @@
import {
+ GlAlert,
+ GlDropdown,
+ GlDropdownItem,
GlFormInputGroup,
GlFormGroup,
+ GlModal,
GlSkeletonLoader,
GlSprintf,
GlEmptyState,
} from '@gitlab/ui';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
+import MockAdapter from 'axios-mock-adapter';
import createMockApollo from 'helpers/mock_apollo_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { stripTypenames } from 'helpers/graphql_helpers';
import waitForPromises from 'helpers/wait_for_promises';
import { GRAPHQL_PAGE_SIZE } from '~/packages_and_registries/dependency_proxy/constants';
+import axios from '~/lib/utils/axios_utils';
import DependencyProxyApp from '~/packages_and_registries/dependency_proxy/app.vue';
+import TitleArea from '~/vue_shared/components/registry/title_area.vue';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import ManifestsList from '~/packages_and_registries/dependency_proxy/components/manifests_list.vue';
@@ -21,13 +28,25 @@ import getDependencyProxyDetailsQuery from '~/packages_and_registries/dependency
import { proxyDetailsQuery, proxyData, pagination, proxyManifests } from './mock_data';
+const dummyApiVersion = 'v3000';
+const dummyGrouptId = 1;
+const dummyUrlRoot = '/gitlab';
+const dummyGon = {
+ api_version: dummyApiVersion,
+ relative_url_root: dummyUrlRoot,
+};
+let originalGon;
+const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${dummyGrouptId}/dependency_proxy/cache`;
+
describe('DependencyProxyApp', () => {
let wrapper;
let apolloProvider;
let resolver;
+ let mock;
const provideDefaults = {
groupPath: 'gitlab-org',
+ groupId: dummyGrouptId,
dependencyProxyAvailable: true,
noManifestsIllustration: 'noManifestsIllustration',
};
@@ -43,9 +62,14 @@ describe('DependencyProxyApp', () => {
apolloProvider,
provide,
stubs: {
+ GlAlert,
+ GlDropdown,
+ GlDropdownItem,
GlFormInputGroup,
GlFormGroup,
+ GlModal,
GlSprintf,
+ TitleArea,
},
});
}
@@ -59,13 +83,24 @@ describe('DependencyProxyApp', () => {
const findProxyCountText = () => wrapper.findByTestId('proxy-count');
const findManifestList = () => wrapper.findComponent(ManifestsList);
const findEmptyState = () => wrapper.findComponent(GlEmptyState);
+ const findClearCacheDropdownList = () => wrapper.findComponent(GlDropdown);
+ const findClearCacheModal = () => wrapper.findComponent(GlModal);
+ const findClearCacheAlert = () => wrapper.findComponent(GlAlert);
beforeEach(() => {
resolver = jest.fn().mockResolvedValue(proxyDetailsQuery());
+
+ originalGon = window.gon;
+ window.gon = { ...dummyGon };
+
+ mock = new MockAdapter(axios);
+ mock.onDelete(expectedUrl).reply(202, {});
});
afterEach(() => {
wrapper.destroy();
+ window.gon = originalGon;
+ mock.restore();
});
describe('when the dependency proxy is not available', () => {
@@ -95,6 +130,12 @@ describe('DependencyProxyApp', () => {
expect(resolver).not.toHaveBeenCalled();
});
+
+ it('hides the clear cache dropdown list', () => {
+ createComponent(createComponentArguments);
+
+ expect(findClearCacheDropdownList().exists()).toBe(false);
+ });
});
describe('when the dependency proxy is available', () => {
@@ -165,6 +206,7 @@ describe('DependencyProxyApp', () => {
}),
);
createComponent();
+
return waitForPromises();
});
@@ -214,6 +256,28 @@ describe('DependencyProxyApp', () => {
fullPath: provideDefaults.groupPath,
});
});
+
+ it('shows the clear cache dropdown list', () => {
+ expect(findClearCacheDropdownList().exists()).toBe(true);
+ });
+
+ it('shows the clear cache confirmation modal', () => {
+ const modal = findClearCacheModal();
+
+ expect(modal.find('.modal-title').text()).toContain('Clear 2 images from cache?');
+ expect(modal.props('actionPrimary').text).toBe('Clear cache');
+ });
+
+ it('submits the clear cache request', async () => {
+ findClearCacheModal().vm.$emit('primary', { preventDefault: jest.fn() });
+
+ await waitForPromises();
+
+ expect(findClearCacheAlert().exists()).toBe(true);
+ expect(findClearCacheAlert().text()).toBe(
+ 'All items in the cache are scheduled for removal.',
+ );
+ });
});
});
});
diff --git a/spec/initializers/mail_encoding_patch_spec.rb b/spec/initializers/mail_encoding_patch_spec.rb
index e19a8737459..12539c9ca52 100644
--- a/spec/initializers/mail_encoding_patch_spec.rb
+++ b/spec/initializers/mail_encoding_patch_spec.rb
@@ -1,8 +1,7 @@
# frozen_string_literal: true
-# rubocop:disable RSpec/VariableDefinition
+# rubocop:disable RSpec/VariableDefinition, RSpec/VariableName
require 'fast_spec_helper'
-
require 'mail'
require_relative '../../config/initializers/mail_encoding_patch'
@@ -206,4 +205,4 @@ RSpec.describe 'Mail quoted-printable transfer encoding patch and Unicode charac
end
end
end
-# rubocop:enable RSpec/VariableDefinition
+# rubocop:enable RSpec/VariableDefinition, RSpec/VariableName
diff --git a/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb
index 1c7b35ed928..6eb92cdeab9 100644
--- a/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb
@@ -98,9 +98,9 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNotesImporter do
.to receive(:each_object_to_import)
.and_yield(github_comment)
- expect(Gitlab::GithubImport::ImportDiffNoteWorker)
- .to receive(:perform_async)
- .with(project.id, an_instance_of(Hash), an_instance_of(String))
+ expect(Gitlab::GithubImport::ImportDiffNoteWorker).to receive(:bulk_perform_in).with(1.second, [
+ [project.id, an_instance_of(Hash), an_instance_of(String)]
+ ], batch_size: 1000, batch_delay: 1.minute)
waiter = importer.parallel_import
diff --git a/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb
index 2c2b6a2aff0..6b807bdf098 100644
--- a/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb
@@ -91,9 +91,9 @@ RSpec.describe Gitlab::GithubImport::Importer::IssuesImporter do
.to receive(:each_object_to_import)
.and_yield(github_issue)
- expect(Gitlab::GithubImport::ImportIssueWorker)
- .to receive(:perform_async)
- .with(project.id, an_instance_of(Hash), an_instance_of(String))
+ expect(Gitlab::GithubImport::ImportIssueWorker).to receive(:bulk_perform_in).with(1.second, [
+ [project.id, an_instance_of(Hash), an_instance_of(String)]
+ ], batch_size: 1000, batch_delay: 1.minute)
waiter = importer.parallel_import
diff --git a/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb b/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
index a2c7d51214a..6dfd4424342 100644
--- a/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
@@ -118,9 +118,9 @@ RSpec.describe Gitlab::GithubImport::Importer::LfsObjectsImporter do
expect(service).to receive(:execute).and_return([lfs_download_object])
end
- expect(Gitlab::GithubImport::ImportLfsObjectWorker)
- .to receive(:perform_async)
- .with(project.id, an_instance_of(Hash), an_instance_of(String))
+ expect(Gitlab::GithubImport::ImportLfsObjectWorker).to receive(:bulk_perform_in).with(1.second, [
+ [project.id, an_instance_of(Hash), an_instance_of(String)]
+ ], batch_size: 1000, batch_delay: 1.minute)
waiter = importer.parallel_import
diff --git a/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb
index 3782dab5ee3..3b4fe652da8 100644
--- a/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb
@@ -84,9 +84,9 @@ RSpec.describe Gitlab::GithubImport::Importer::NotesImporter do
.to receive(:each_object_to_import)
.and_yield(github_comment)
- expect(Gitlab::GithubImport::ImportNoteWorker)
- .to receive(:perform_async)
- .with(project.id, an_instance_of(Hash), an_instance_of(String))
+ expect(Gitlab::GithubImport::ImportNoteWorker).to receive(:bulk_perform_in).with(1.second, [
+ [project.id, an_instance_of(Hash), an_instance_of(String)]
+ ], batch_size: 1000, batch_delay: 1.minute)
waiter = importer.parallel_import
diff --git a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
index 9f7c8d691f7..200898f8f03 100644
--- a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
+++ b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
@@ -22,10 +22,6 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling do
def collection_method
:issues
end
-
- def parallel_import_batch
- { size: 10, delay: 1.minute }
- end
end
end
@@ -261,7 +257,7 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling do
let(:repr_class) { double(:representation) }
let(:worker_class) { double(:worker) }
let(:object) { double(:object) }
- let(:batch_size) { 200 }
+ let(:batch_size) { 1000 }
let(:batch_delay) { 1.minute }
before do
@@ -281,7 +277,6 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling do
context 'with multiple objects' do
before do
- allow(importer).to receive(:parallel_import_batch) { { size: batch_size, delay: batch_delay } }
expect(importer).to receive(:each_object_to_import).and_yield(object).and_yield(object).and_yield(object)
end
@@ -295,6 +290,25 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling do
importer.parallel_import
end
end
+
+ context 'when distribute_github_parallel_import feature flag is disabled' do
+ before do
+ stub_feature_flags(distribute_github_parallel_import: false)
+ end
+
+ it 'imports data in parallel' do
+ expect(importer)
+ .to receive(:each_object_to_import)
+ .and_yield(object)
+
+ expect(worker_class)
+ .to receive(:perform_async)
+ .with(project.id, { title: 'Foo' }, an_instance_of(String))
+
+ expect(importer.parallel_import)
+ .to be_an_instance_of(Gitlab::JobWaiter)
+ end
+ end
end
describe '#each_object_to_import' do
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index b5a1eada7c5..8a919a0a72e 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -166,7 +166,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(described_class.usage_activity_by_stage_create({})).to include(
deploy_keys: 2,
keys: 2,
- merge_requests: 2,
projects_with_disable_overriding_approvers_per_merge_request: 2,
projects_without_disable_overriding_approvers_per_merge_request: 6,
remote_mirrors: 2,
@@ -175,7 +174,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(described_class.usage_activity_by_stage_create(described_class.monthly_time_range_db_params)).to include(
deploy_keys: 1,
keys: 1,
- merge_requests: 1,
projects_with_disable_overriding_approvers_per_merge_request: 1,
projects_without_disable_overriding_approvers_per_merge_request: 3,
remote_mirrors: 1,
diff --git a/spec/models/board_spec.rb b/spec/models/board_spec.rb
index 90a06b80f9c..775cccd2aec 100644
--- a/spec/models/board_spec.rb
+++ b/spec/models/board_spec.rb
@@ -21,10 +21,12 @@ RSpec.describe Board do
end
describe '#order_by_name_asc' do
+ # rubocop:disable RSpec/VariableName
let!(:board_B) { create(:board, project: project, name: 'B') }
let!(:board_C) { create(:board, project: project, name: 'C') }
let!(:board_a) { create(:board, project: project, name: 'a') }
let!(:board_A) { create(:board, project: project, name: 'A') }
+ # rubocop:enable RSpec/VariableName
it 'returns in case-insensitive alphabetical order and then by ascending id' do
expect(project.boards.order_by_name_asc).to eq [board_a, board_A, board_B, board_C]
@@ -32,10 +34,12 @@ RSpec.describe Board do
end
describe '#first_board' do
+ # rubocop:disable RSpec/VariableName
let!(:board_B) { create(:board, project: project, name: 'B') }
let!(:board_C) { create(:board, project: project, name: 'C') }
let!(:board_a) { create(:board, project: project, name: 'a') }
let!(:board_A) { create(:board, project: project, name: 'A') }
+ # rubocop:enable RSpec/VariableName
it 'return the first case-insensitive alphabetical board as a relation' do
expect(project.boards.first_board).to eq [board_a]
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index 2b046142b59..24c318d0218 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -279,6 +279,15 @@ RSpec.describe Ci::JobArtifact do
end
end
+ describe '.order_expired_asc' do
+ let_it_be(:first_artifact) { create(:ci_job_artifact, expire_at: 2.days.ago) }
+ let_it_be(:second_artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
+
+ it 'returns ordered artifacts' do
+ expect(described_class.order_expired_asc).to eq([first_artifact, second_artifact])
+ end
+ end
+
describe '.for_project' do
it 'returns artifacts only for given project(s)', :aggregate_failures do
artifact1 = create(:ci_job_artifact)
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 1ad211b0c81..1a0e0274539 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -536,7 +536,7 @@ RSpec.describe Project, factory_default: :keep do
project = build(:project)
aggregate_failures do
- urls_with_CRLF.each do |url|
+ urls_with_crlf.each do |url|
project.import_url = url
expect(project).not_to be_valid
@@ -549,7 +549,7 @@ RSpec.describe Project, factory_default: :keep do
project = build(:project)
aggregate_failures do
- valid_urls_with_CRLF.each do |url|
+ valid_urls_with_crlf.each do |url|
project.import_url = url
expect(project).to be_valid
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index b1183bb10fa..aea0b5c2a81 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -2554,14 +2554,32 @@ RSpec.describe API::MergeRequests do
expect(response).to have_gitlab_http_status(:ok)
end
- it "returns 406 if branch can't be merged" do
- allow_any_instance_of(MergeRequest)
- .to receive(:can_be_merged?).and_return(false)
+ context 'when change_response_code_merge_status is enabled' do
+ it "returns 422 if branch can't be merged" do
+ allow_any_instance_of(MergeRequest)
+ .to receive(:can_be_merged?).and_return(false)
- put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user)
+ put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user)
- expect(response).to have_gitlab_http_status(:not_acceptable)
- expect(json_response['message']).to eq('Branch cannot be merged')
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ expect(json_response['message']).to eq('Branch cannot be merged')
+ end
+ end
+
+ context 'when change_response_code_merge_status is disabled' do
+ before do
+ stub_feature_flags(change_response_code_merge_status: false)
+ end
+
+ it "returns 406 if branch can't be merged" do
+ allow_any_instance_of(MergeRequest)
+ .to receive(:can_be_merged?).and_return(false)
+
+ put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user)
+
+ expect(response).to have_gitlab_http_status(:not_acceptable)
+ expect(json_response['message']).to eq('Branch cannot be merged')
+ end
end
it "returns 405 if merge_request is not open" do
diff --git a/spec/serializers/merge_request_poll_widget_entity_spec.rb b/spec/serializers/merge_request_poll_widget_entity_spec.rb
index 581efd331ef..ac125518946 100644
--- a/spec/serializers/merge_request_poll_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_poll_widget_entity_spec.rb
@@ -182,4 +182,40 @@ RSpec.describe MergeRequestPollWidgetEntity do
end
end
end
+
+ describe '#mergeable_discussions_state?' do
+ context 'when change_response_code_merge_status is true' do
+ before do
+ stub_feature_flags(change_response_code_merge_status: true)
+ end
+
+ it 'returns mergeable discussions state' do
+ expect(subject[:mergeable_discussions_state]).to eq(true)
+ end
+ end
+
+ context 'when change_response_code_merge_status is false' do
+ context 'when merge request is in a mergeable state' do
+ before do
+ stub_feature_flags(change_response_code_merge_status: false)
+ allow(resource).to receive(:mergeable_discussions_state?).and_return(true)
+ end
+
+ it 'returns mergeable discussions state' do
+ expect(subject[:mergeable_discussions_state]).to eq(true)
+ end
+ end
+
+ context 'when merge request is not in a mergeable state' do
+ before do
+ stub_feature_flags(change_response_code_merge_status: false)
+ allow(resource).to receive(:mergeable_discussions_state?).and_return(false)
+ end
+
+ it 'returns mergeable discussions state' do
+ expect(subject[:mergeable_discussions_state]).to eq(false)
+ end
+ end
+ end
+ end
end
diff --git a/spec/services/ci/job_artifacts/update_unknown_locked_status_service_spec.rb b/spec/services/ci/job_artifacts/update_unknown_locked_status_service_spec.rb
new file mode 100644
index 00000000000..67412e41fb8
--- /dev/null
+++ b/spec/services/ci/job_artifacts/update_unknown_locked_status_service_spec.rb
@@ -0,0 +1,145 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::JobArtifacts::UpdateUnknownLockedStatusService, :clean_gitlab_redis_shared_state do
+ include ExclusiveLeaseHelpers
+
+ let(:service) { described_class.new }
+
+ describe '.execute' do
+ subject { service.execute }
+
+ let_it_be(:locked_pipeline) { create(:ci_pipeline, :artifacts_locked) }
+ let_it_be(:pipeline) { create(:ci_pipeline, :unlocked) }
+ let_it_be(:locked_job) { create(:ci_build, :success, pipeline: locked_pipeline) }
+ let_it_be(:job) { create(:ci_build, :success, pipeline: pipeline) }
+
+ let!(:unknown_unlocked_artifact) do
+ create(:ci_job_artifact, :junit, expire_at: 1.hour.ago, job: job, locked: Ci::JobArtifact.lockeds[:unknown])
+ end
+
+ let!(:unknown_locked_artifact) do
+ create(:ci_job_artifact, :lsif,
+ expire_at: 1.day.ago,
+ job: locked_job,
+ locked: Ci::JobArtifact.lockeds[:unknown]
+ )
+ end
+
+ let!(:unlocked_artifact) do
+ create(:ci_job_artifact, :archive, expire_at: 1.hour.ago, job: job, locked: Ci::JobArtifact.lockeds[:unlocked])
+ end
+
+ let!(:locked_artifact) do
+ create(:ci_job_artifact, :sast, :raw,
+ expire_at: 1.day.ago,
+ job: locked_job,
+ locked: Ci::JobArtifact.lockeds[:artifacts_locked]
+ )
+ end
+
+ context 'when artifacts are expired' do
+ it 'sets artifact_locked when the pipeline is locked' do
+ expect { service.execute }
+ .to change { unknown_locked_artifact.reload.locked }.from('unknown').to('artifacts_locked')
+ .and not_change { Ci::JobArtifact.exists?(locked_artifact.id) }
+ end
+
+ it 'destroys the artifact when the pipeline is unlocked' do
+ expect { subject }.to change { Ci::JobArtifact.exists?(unknown_unlocked_artifact.id) }.from(true).to(false)
+ end
+
+ it 'does not update ci_job_artifact rows with known locked values' do
+ expect { service.execute }
+ .to not_change(locked_artifact, :attributes)
+ .and not_change { Ci::JobArtifact.exists?(locked_artifact.id) }
+ .and not_change(unlocked_artifact, :attributes)
+ .and not_change { Ci::JobArtifact.exists?(unlocked_artifact.id) }
+ end
+
+ it 'logs the counts of affected artifacts' do
+ expect(subject).to eq({ removed: 1, locked: 1 })
+ end
+ end
+
+ context 'in a single iteration' do
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+ end
+
+ context 'due to the LOOP_TIMEOUT' do
+ before do
+ stub_const("#{described_class}::LOOP_TIMEOUT", 0.seconds)
+ end
+
+ it 'affects the earliest expired artifact first' do
+ subject
+
+ expect(unknown_locked_artifact.reload.locked).to eq('artifacts_locked')
+ expect(unknown_unlocked_artifact.reload.locked).to eq('unknown')
+ end
+
+ it 'reports the number of destroyed artifacts' do
+ is_expected.to eq({ removed: 0, locked: 1 })
+ end
+ end
+
+ context 'due to @loop_limit' do
+ before do
+ stub_const("#{described_class}::LARGE_LOOP_LIMIT", 1)
+ end
+
+ it 'affects the most recently expired artifact first' do
+ subject
+
+ expect(unknown_locked_artifact.reload.locked).to eq('artifacts_locked')
+ expect(unknown_unlocked_artifact.reload.locked).to eq('unknown')
+ end
+
+ it 'reports the number of destroyed artifacts' do
+ is_expected.to eq({ removed: 0, locked: 1 })
+ end
+ end
+ end
+
+ context 'when artifact is not expired' do
+ let!(:unknown_unlocked_artifact) do
+ create(:ci_job_artifact, :junit,
+ expire_at: 1.year.from_now,
+ job: job,
+ locked: Ci::JobArtifact.lockeds[:unknown]
+ )
+ end
+
+ it 'does not change the locked status' do
+ expect { service.execute }.not_to change { unknown_unlocked_artifact.locked }
+ expect(Ci::JobArtifact.exists?(unknown_unlocked_artifact.id)).to eq(true)
+ end
+ end
+
+ context 'when exclusive lease has already been taken by the other instance' do
+ before do
+ stub_exclusive_lease_taken(described_class::EXCLUSIVE_LOCK_KEY, timeout: described_class::LOCK_TIMEOUT)
+ end
+
+ it 'raises an error and' do
+ expect { subject }.to raise_error(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
+ end
+ end
+
+ context 'when there are no unknown status artifacts' do
+ before do
+ Ci::JobArtifact.update_all(locked: :unlocked)
+ end
+
+ it 'does not raise error' do
+ expect { subject }.not_to raise_error
+ end
+
+ it 'reports the number of destroyed artifacts' do
+ is_expected.to eq({ removed: 0, locked: 0 })
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_contexts/url_shared_context.rb b/spec/support/shared_contexts/url_shared_context.rb
index da1d6e0049c..0e1534bf6c7 100644
--- a/spec/support/shared_contexts/url_shared_context.rb
+++ b/spec/support/shared_contexts/url_shared_context.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
RSpec.shared_context 'valid urls with CRLF' do
- let(:valid_urls_with_CRLF) do
+ let(:valid_urls_with_crlf) do
[
"http://example.com/pa%0dth",
"http://example.com/pa%0ath",
@@ -16,7 +16,7 @@ RSpec.shared_context 'valid urls with CRLF' do
end
RSpec.shared_context 'invalid urls' do
- let(:urls_with_CRLF) do
+ let(:urls_with_crlf) do
[
"git://example.com/pa%0dth",
"git://example.com/pa%0ath",
diff --git a/spec/support/shared_examples/requests/api/graphql/group_and_project_boards_query_shared_examples.rb b/spec/support/shared_examples/requests/api/graphql/group_and_project_boards_query_shared_examples.rb
index 01ed6c26576..da9d254039b 100644
--- a/spec/support/shared_examples/requests/api/graphql/group_and_project_boards_query_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/graphql/group_and_project_boards_query_shared_examples.rb
@@ -54,11 +54,13 @@ RSpec.shared_examples 'group and project boards query' do
end
context 'when using default sorting' do
+ # rubocop:disable RSpec/VariableName
let!(:board_B) { create(:board, resource_parent: board_parent, name: 'B') }
let!(:board_C) { create(:board, resource_parent: board_parent, name: 'C') }
let!(:board_a) { create(:board, resource_parent: board_parent, name: 'a') }
let!(:board_A) { create(:board, resource_parent: board_parent, name: 'A') }
let(:boards) { [board_a, board_A, board_B, board_C] }
+ # rubocop:enable RSpec/VariableName
context 'when ascending' do
it_behaves_like 'sorted paginated query' do
diff --git a/spec/support/shared_examples/services/boards/boards_list_service_shared_examples.rb b/spec/support/shared_examples/services/boards/boards_list_service_shared_examples.rb
index 0e2bddc19ab..fd832d4484d 100644
--- a/spec/support/shared_examples/services/boards/boards_list_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/boards/boards_list_service_shared_examples.rb
@@ -13,10 +13,12 @@ RSpec.shared_examples 'boards list service' do
end
RSpec.shared_examples 'multiple boards list service' do
+ # rubocop:disable RSpec/VariableName
let(:service) { described_class.new(parent, double) }
let!(:board_B) { create(:board, resource_parent: parent, name: 'B-board') }
let!(:board_c) { create(:board, resource_parent: parent, name: 'c-board') }
let!(:board_a) { create(:board, resource_parent: parent, name: 'a-board') }
+ # rubocop:enable RSpec/VariableName
describe '#execute' do
it 'returns all issue boards' do
diff --git a/spec/validators/addressable_url_validator_spec.rb b/spec/validators/addressable_url_validator_spec.rb
index 7e2cc2afa8a..b3a4459db30 100644
--- a/spec/validators/addressable_url_validator_spec.rb
+++ b/spec/validators/addressable_url_validator_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe AddressableUrlValidator do
it 'allows urls with encoded CR or LF characters' do
aggregate_failures do
- valid_urls_with_CRLF.each do |url|
+ valid_urls_with_crlf.each do |url|
validator.validate_each(badge, :link_url, url)
expect(badge.errors).to be_empty
@@ -40,7 +40,7 @@ RSpec.describe AddressableUrlValidator do
it 'does not allow urls with CR or LF characters' do
aggregate_failures do
- urls_with_CRLF.each do |url|
+ urls_with_crlf.each do |url|
badge = build(:badge, link_url: 'http://www.example.com')
validator.validate_each(badge, :link_url, url)
diff --git a/spec/workers/ci/update_locked_unknown_artifacts_worker_spec.rb b/spec/workers/ci/update_locked_unknown_artifacts_worker_spec.rb
new file mode 100644
index 00000000000..b42d135b1b6
--- /dev/null
+++ b/spec/workers/ci/update_locked_unknown_artifacts_worker_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::UpdateLockedUnknownArtifactsWorker do
+ let(:worker) { described_class.new }
+
+ describe '#perform' do
+ it 'executes an instance of Ci::JobArtifacts::UpdateUnknownLockedStatusService' do
+ expect_next_instance_of(Ci::JobArtifacts::UpdateUnknownLockedStatusService) do |instance|
+ expect(instance).to receive(:execute).and_call_original
+ end
+
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:removed_count, 0)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:locked_count, 0)
+
+ worker.perform
+ end
+
+ context 'with the ci_job_artifacts_backlog_work flag shut off' do
+ before do
+ stub_feature_flags(ci_job_artifacts_backlog_work: false)
+ end
+
+ it 'does not instantiate a new Ci::JobArtifacts::UpdateUnknownLockedStatusService' do
+ expect(Ci::JobArtifacts::UpdateUnknownLockedStatusService).not_to receive(:new)
+
+ worker.perform
+ end
+
+ it 'does not log any artifact counts' do
+ expect(worker).not_to receive(:log_extra_metadata_on_done)
+
+ worker.perform
+ end
+
+ it 'does not query the database' do
+ query_count = ActiveRecord::QueryRecorder.new { worker.perform }.count
+
+ expect(query_count).to eq(0)
+ end
+ end
+ end
+end
diff --git a/yarn.lock b/yarn.lock
index d9a323d037d..1b2b09927a0 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -12776,10 +12776,10 @@ webpack-dev-middleware@^5.3.1:
range-parser "^1.2.1"
schema-utils "^4.0.0"
-webpack-dev-server@4.8.0:
- version "4.8.0"
- resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-4.8.0.tgz#022bb845946e31ca01527509a942869ecfc7e047"
- integrity sha512-yZ7OWVP1nOtv8s10R/ZCsH6zf6QKkNusMRBE9DsQbOknRzKaFYYrbwVPCXp8ynUOTt3RlD9szM8H0pUlrJ6wcw==
+webpack-dev-server@4.8.1:
+ version "4.8.1"
+ resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-4.8.1.tgz#58f9d797710d6e25fa17d6afab8708f958c11a29"
+ integrity sha512-dwld70gkgNJa33czmcj/PlKY/nOy/BimbrgZRaR9vDATBQAYgLzggR0nxDtPLJiLrMgZwbE6RRfJ5vnBBasTyg==
dependencies:
"@types/bonjour" "^3.5.9"
"@types/connect-history-api-fallback" "^1.3.5"