diff options
author | GitLab Bot <gitlab-bot@gitlab.com> | 2021-06-16 18:10:35 +0000 |
---|---|---|
committer | GitLab Bot <gitlab-bot@gitlab.com> | 2021-06-16 18:10:35 +0000 |
commit | 3597fb6d337baab5847863feedc98b433fb4000c (patch) | |
tree | 7c2cf77f03fc2dc43af4198bdddc75221edacaac | |
parent | ec4abad65d774cfc94110577589d44de5da825de (diff) | |
download | gitlab-ce-3597fb6d337baab5847863feedc98b433fb4000c.tar.gz |
Add latest changes from gitlab-org/gitlab@master
113 files changed, 2098 insertions, 538 deletions
diff --git a/.gitlab/merge_request_templates/Pipeline Configuration.md b/.gitlab/merge_request_templates/Pipeline Configuration.md new file mode 100644 index 00000000000..920abf086cb --- /dev/null +++ b/.gitlab/merge_request_templates/Pipeline Configuration.md @@ -0,0 +1,38 @@ +<!-- See Pipelines for the GitLab project: https://docs.gitlab.com/ee/development/pipelines.html --> +<!-- When in doubt about a Pipeline configuration change, feel free to ping @gl-quality/eng-prod. --> + +## What does this MR do? + +<!-- Briefly describe what this MR is about --> + +## Related issues + +<!-- Link related issues below. --> + +## Check-list + +### Pre-merge + +Consider the effect of the changes in this merge request on the following: + +- [ ] Different [pipeline types](https://docs.gitlab.com/ee/development/pipelines.html#pipelines-for-merge-requests) +- Non-canonical projects: + - [ ] `gitlab-foss` + - [ ] `security` + - [ ] `dev` + - [ ] personal forks +- [ ] [Pipeline performance](https://about.gitlab.com/handbook/engineering/quality/performance-indicators/#average-merge-request-pipeline-duration-for-gitlab) + +**If new jobs are added:** + +- [ ] Change-related rules (e.g. frontend/backend/database file changes): _____ +- [ ] Frequency they are running (MRs, main branch, nightly, bi-hourly): _____ +- [ ] Add a duration chart to https://app.periscopedata.com/app/gitlab/652085/Engineering-Productivity---Pipeline-Build-Durations if there are new jobs added to merge request pipelines + +This will help keep track of expected cost increases to the [GitLab project average pipeline cost per merge request](https://about.gitlab.com/handbook/engineering/quality/performance-indicators/#gitlab-project-average-pipeline-cost-per-merge-request) RPI + +### Post-merge + +- [ ] Consider communicating these changes to the broader team following the [communication guideline for pipeline changes](https://about.gitlab.com/handbook/engineering/quality/engineering-productivity-team/#pipeline-changes) + +/label ~tooling ~"tooling::pipelines" ~"Engineering Productivity" diff --git a/.rubocop_manual_todo.yml b/.rubocop_manual_todo.yml index dcd6dcf4062..dae8084b656 100644 --- a/.rubocop_manual_todo.yml +++ b/.rubocop_manual_todo.yml @@ -13,7 +13,6 @@ # WIP See https://gitlab.com/gitlab-org/gitlab/-/issues/322903 Graphql/Descriptions: Exclude: - - 'ee/app/graphql/ee/types/list_limit_metric_enum.rb' - 'ee/app/graphql/types/epic_state_enum.rb' - 'ee/app/graphql/types/health_status_enum.rb' - 'ee/app/graphql/types/iteration_state_enum.rb' diff --git a/app/assets/javascripts/content_editor/components/top_toolbar.vue b/app/assets/javascripts/content_editor/components/top_toolbar.vue index 07fdd3147e2..d3363ce092b 100644 --- a/app/assets/javascripts/content_editor/components/top_toolbar.vue +++ b/app/assets/javascripts/content_editor/components/top_toolbar.vue @@ -64,6 +64,15 @@ export default { @execute="trackToolbarControlExecution" /> <toolbar-button + data-testid="strike" + content-type="strike" + icon-name="strikethrough" + editor-command="toggleStrike" + :label="__('Strikethrough')" + :tiptap-editor="contentEditor.tiptapEditor" + @execute="trackToolbarControlExecution" + /> + <toolbar-button data-testid="code" content-type="code" icon-name="code" diff --git a/app/assets/javascripts/content_editor/extensions/strike.js b/app/assets/javascripts/content_editor/extensions/strike.js new file mode 100644 index 00000000000..6f228e00994 --- /dev/null +++ b/app/assets/javascripts/content_editor/extensions/strike.js @@ -0,0 +1,9 @@ +import { Strike } from '@tiptap/extension-strike'; + +export const tiptapExtension = Strike; +export const serializer = { + open: '~~', + close: '~~', + mixable: true, + expelEnclosingWhitespace: true, +}; diff --git a/app/assets/javascripts/content_editor/services/create_content_editor.js b/app/assets/javascripts/content_editor/services/create_content_editor.js index df45287e6cb..8a54da6f57d 100644 --- a/app/assets/javascripts/content_editor/services/create_content_editor.js +++ b/app/assets/javascripts/content_editor/services/create_content_editor.js @@ -19,6 +19,7 @@ import * as Link from '../extensions/link'; import * as ListItem from '../extensions/list_item'; import * as OrderedList from '../extensions/ordered_list'; import * as Paragraph from '../extensions/paragraph'; +import * as Strike from '../extensions/strike'; import * as Text from '../extensions/text'; import buildSerializerConfig from './build_serializer_config'; import { ContentEditor } from './content_editor'; @@ -44,6 +45,7 @@ const builtInContentEditorExtensions = [ ListItem, OrderedList, Paragraph, + Strike, Text, ]; diff --git a/app/assets/javascripts/issuable_list/components/issuable_item.vue b/app/assets/javascripts/issuable_list/components/issuable_item.vue index 348dc054f57..20d1dce3905 100644 --- a/app/assets/javascripts/issuable_list/components/issuable_item.vue +++ b/app/assets/javascripts/issuable_list/components/issuable_item.vue @@ -50,6 +50,9 @@ export default { }, }, computed: { + issuableId() { + return getIdFromGraphQLId(this.issuable.id); + }, createdInPastDay() { const createdSecondsAgo = differenceInSeconds(new Date(this.issuable.createdAt), new Date()); return createdSecondsAgo < SECONDS_IN_DAY; @@ -61,7 +64,7 @@ export default { return this.issuable.gitlabWebUrl || this.issuable.webUrl; }, authorId() { - return getIdFromGraphQLId(`${this.author.id}`); + return getIdFromGraphQLId(this.author.id); }, isIssuableUrlExternal() { return isExternal(this.webUrl); @@ -70,10 +73,10 @@ export default { return this.issuable.labels?.nodes || this.issuable.labels || []; }, labelIdsString() { - return JSON.stringify(this.labels.map((label) => label.id)); + return JSON.stringify(this.labels.map((label) => getIdFromGraphQLId(label.id))); }, assignees() { - return this.issuable.assignees || []; + return this.issuable.assignees?.nodes || this.issuable.assignees || []; }, createdAt() { return sprintf(__('created %{timeAgo}'), { @@ -157,7 +160,7 @@ export default { <template> <li - :id="`issuable_${issuable.id}`" + :id="`issuable_${issuableId}`" class="issue gl-px-5!" :class="{ closed: issuable.closedAt, today: createdInPastDay }" :data-labels="labelIdsString" @@ -167,7 +170,7 @@ export default { <gl-form-checkbox class="gl-mr-0" :checked="checked" - :data-id="issuable.id" + :data-id="issuableId" @input="$emit('checked-input', $event)" > <span class="gl-sr-only">{{ issuable.title }}</span> diff --git a/app/assets/javascripts/issuable_list/components/issuable_list_root.vue b/app/assets/javascripts/issuable_list/components/issuable_list_root.vue index 45584205be0..a19c76cfe3f 100644 --- a/app/assets/javascripts/issuable_list/components/issuable_list_root.vue +++ b/app/assets/javascripts/issuable_list/components/issuable_list_root.vue @@ -1,7 +1,7 @@ <script> -import { GlSkeletonLoading, GlPagination } from '@gitlab/ui'; +import { GlKeysetPagination, GlSkeletonLoading, GlPagination } from '@gitlab/ui'; import { uniqueId } from 'lodash'; - +import { getIdFromGraphQLId } from '~/graphql_shared/utils'; import { updateHistory, setUrlParams } from '~/lib/utils/url_utility'; import FilteredSearchBar from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue'; @@ -19,6 +19,7 @@ export default { tag: 'ul', }, components: { + GlKeysetPagination, GlSkeletonLoading, IssuableTabs, FilteredSearchBar, @@ -140,6 +141,21 @@ export default { required: false, default: false, }, + useKeysetPagination: { + type: Boolean, + required: false, + default: false, + }, + hasNextPage: { + type: Boolean, + required: false, + default: false, + }, + hasPreviousPage: { + type: Boolean, + required: false, + default: false, + }, }, data() { return { @@ -211,7 +227,7 @@ export default { }, methods: { issuableId(issuable) { - return issuable.id || issuable.iid || uniqueId(); + return getIdFromGraphQLId(issuable.id) || issuable.iid || uniqueId(); }, issuableChecked(issuable) { return this.checkedIssuables[this.issuableId(issuable)]?.checked; @@ -315,8 +331,16 @@ export default { <slot v-else name="empty-state"></slot> </template> + <div v-if="showPaginationControls && useKeysetPagination" class="gl-text-center gl-mt-3"> + <gl-keyset-pagination + :has-next-page="hasNextPage" + :has-previous-page="hasPreviousPage" + @next="$emit('next-page')" + @prev="$emit('previous-page')" + /> + </div> <gl-pagination - v-if="showPaginationControls" + v-else-if="showPaginationControls" :per-page="defaultPageSize" :total-items="totalItems" :value="currentPage" diff --git a/app/assets/javascripts/issues_list/components/issue_card_time_info.vue b/app/assets/javascripts/issues_list/components/issue_card_time_info.vue index 8d00d337bac..70d73aca925 100644 --- a/app/assets/javascripts/issues_list/components/issue_card_time_info.vue +++ b/app/assets/javascripts/issues_list/components/issue_card_time_info.vue @@ -42,6 +42,9 @@ export default { } return __('Milestone'); }, + milestoneLink() { + return this.issue.milestone.webPath || this.issue.milestone.webUrl; + }, dueDate() { return this.issue.dueDate && dateInWords(new Date(this.issue.dueDate), true); }, @@ -49,7 +52,7 @@ export default { return isInPast(new Date(this.issue.dueDate)); }, timeEstimate() { - return this.issue.timeStats?.humanTimeEstimate; + return this.issue.humanTimeEstimate || this.issue.timeStats?.humanTimeEstimate; }, showHealthStatus() { return this.hasIssuableHealthStatusFeature && this.issue.healthStatus; @@ -85,7 +88,7 @@ export default { class="issuable-milestone gl-display-none gl-sm-display-inline-block! gl-mr-3" data-testid="issuable-milestone" > - <gl-link v-gl-tooltip :href="issue.milestone.webUrl" :title="milestoneDate"> + <gl-link v-gl-tooltip :href="milestoneLink" :title="milestoneDate"> <gl-icon name="clock" /> {{ issue.milestone.title }} </gl-link> diff --git a/app/assets/javascripts/issues_list/components/issues_list_app.vue b/app/assets/javascripts/issues_list/components/issues_list_app.vue index d5cab77f26c..dbf7717b248 100644 --- a/app/assets/javascripts/issues_list/components/issues_list_app.vue +++ b/app/assets/javascripts/issues_list/components/issues_list_app.vue @@ -9,7 +9,7 @@ import { GlTooltipDirective, } from '@gitlab/ui'; import fuzzaldrinPlus from 'fuzzaldrin-plus'; -import { toNumber } from 'lodash'; +import getIssuesQuery from 'ee_else_ce/issues_list/queries/get_issues.query.graphql'; import createFlash from '~/flash'; import CsvImportExportButtons from '~/issuable/components/csv_import_export_buttons.vue'; import IssuableByEmail from '~/issuable/components/issuable_by_email.vue'; @@ -17,13 +17,12 @@ import IssuableList from '~/issuable_list/components/issuable_list_root.vue'; import { IssuableListTabs, IssuableStates } from '~/issuable_list/constants'; import { API_PARAM, - apiSortParams, CREATED_DESC, i18n, + initialPageParams, MAX_LIST_SIZE, PAGE_SIZE, PARAM_DUE_DATE, - PARAM_PAGE, PARAM_SORT, PARAM_STATE, RELATIVE_POSITION_DESC, @@ -49,7 +48,8 @@ import { getSortOptions, } from '~/issues_list/utils'; import axios from '~/lib/utils/axios_utils'; -import { convertObjectPropsToCamelCase, getParameterByName } from '~/lib/utils/common_utils'; +import { getParameterByName } from '~/lib/utils/common_utils'; +import { scrollUp } from '~/lib/utils/scroll_utils'; import { DEFAULT_NONE_ANY, OPERATOR_IS_ONLY, @@ -107,9 +107,6 @@ export default { emptyStateSvgPath: { default: '', }, - endpoint: { - default: '', - }, exportCsvPath: { default: '', }, @@ -173,15 +170,43 @@ export default { dueDateFilter: getDueDateValue(getParameterByName(PARAM_DUE_DATE)), exportCsvPathWithQuery: this.getExportCsvPathWithQuery(), filterTokens: getFilterTokens(window.location.search), - isLoading: false, issues: [], - page: toNumber(getParameterByName(PARAM_PAGE)) || 1, + pageInfo: {}, + pageParams: initialPageParams, showBulkEditSidebar: false, sortKey: getSortKey(getParameterByName(PARAM_SORT)) || defaultSortKey, state: state || IssuableStates.Opened, totalIssues: 0, }; }, + apollo: { + issues: { + query: getIssuesQuery, + variables() { + return { + projectPath: this.projectPath, + search: this.searchQuery, + sort: this.sortKey, + state: this.state, + ...this.pageParams, + ...this.apiFilterParams, + }; + }, + update: ({ project }) => project.issues.nodes, + result({ data }) { + this.pageInfo = data.project.issues.pageInfo; + this.totalIssues = data.project.issues.count; + this.exportCsvPathWithQuery = this.getExportCsvPathWithQuery(); + }, + error(error) { + createFlash({ message: this.$options.i18n.errorFetchingIssues, captureError: true, error }); + }, + skip() { + return !this.hasProjectIssues; + }, + debounce: 200, + }, + }, computed: { hasSearch() { return this.searchQuery || Object.keys(this.urlFilterParams).length; @@ -348,7 +373,6 @@ export default { return { due_date: this.dueDateFilter, - page: this.page, search: this.searchQuery, state: this.state, ...urlSortParams[this.sortKey], @@ -361,7 +385,6 @@ export default { }, mounted() { eventHub.$on('issuables:toggleBulkEdit', this.toggleBulkEditSidebar); - this.fetchIssues(); }, beforeDestroy() { eventHub.$off('issuables:toggleBulkEdit', this.toggleBulkEditSidebar); @@ -406,54 +429,11 @@ export default { fetchUsers(search) { return axios.get(this.autocompleteUsersPath, { params: { search } }); }, - fetchIssues() { - if (!this.hasProjectIssues) { - return undefined; - } - - this.isLoading = true; - - const filterParams = { - ...this.apiFilterParams, - }; - - if (filterParams.epic_id) { - filterParams.epic_id = filterParams.epic_id.split('::&').pop(); - } else if (filterParams['not[epic_id]']) { - filterParams['not[epic_id]'] = filterParams['not[epic_id]'].split('::&').pop(); - } - - return axios - .get(this.endpoint, { - params: { - due_date: this.dueDateFilter, - page: this.page, - per_page: PAGE_SIZE, - search: this.searchQuery, - state: this.state, - with_labels_details: true, - ...apiSortParams[this.sortKey], - ...filterParams, - }, - }) - .then(({ data, headers }) => { - this.page = Number(headers['x-page']); - this.totalIssues = Number(headers['x-total']); - this.issues = data.map((issue) => convertObjectPropsToCamelCase(issue, { deep: true })); - this.exportCsvPathWithQuery = this.getExportCsvPathWithQuery(); - }) - .catch(() => { - createFlash({ message: this.$options.i18n.errorFetchingIssues }); - }) - .finally(() => { - this.isLoading = false; - }); - }, getExportCsvPathWithQuery() { return `${this.exportCsvPath}${window.location.search}`; }, getStatus(issue) { - if (issue.closedAt && issue.movedToId) { + if (issue.closedAt && issue.moved) { return this.$options.i18n.closedMoved; } if (issue.closedAt) { @@ -484,18 +464,26 @@ export default { }, handleClickTab(state) { if (this.state !== state) { - this.page = 1; + this.pageParams = initialPageParams; } this.state = state; - this.fetchIssues(); }, handleFilter(filter) { this.filterTokens = filter; - this.fetchIssues(); }, - handlePageChange(page) { - this.page = page; - this.fetchIssues(); + handleNextPage() { + this.pageParams = { + afterCursor: this.pageInfo.endCursor, + firstPageSize: PAGE_SIZE, + }; + scrollUp(); + }, + handlePreviousPage() { + this.pageParams = { + beforeCursor: this.pageInfo.startCursor, + lastPageSize: PAGE_SIZE, + }; + scrollUp(); }, handleReorder({ newIndex, oldIndex }) { const issueToMove = this.issues[oldIndex]; @@ -530,9 +518,11 @@ export default { createFlash({ message: this.$options.i18n.reorderError }); }); }, - handleSort(value) { - this.sortKey = value; - this.fetchIssues(); + handleSort(sortKey) { + if (this.sortKey !== sortKey) { + this.pageParams = initialPageParams; + } + this.sortKey = sortKey; }, toggleBulkEditSidebar(showBulkEditSidebar) { this.showBulkEditSidebar = showBulkEditSidebar; @@ -556,18 +546,18 @@ export default { :tabs="$options.IssuableListTabs" :current-tab="state" :tab-counts="tabCounts" - :issuables-loading="isLoading" + :issuables-loading="$apollo.queries.issues.loading" :is-manual-ordering="isManualOrdering" :show-bulk-edit-sidebar="showBulkEditSidebar" :show-pagination-controls="showPaginationControls" - :total-items="totalIssues" - :current-page="page" - :previous-page="page - 1" - :next-page="page + 1" + :use-keyset-pagination="true" + :has-next-page="pageInfo.hasNextPage" + :has-previous-page="pageInfo.hasPreviousPage" :url-params="urlParams" @click-tab="handleClickTab" @filter="handleFilter" - @page-change="handlePageChange" + @next-page="handleNextPage" + @previous-page="handlePreviousPage" @reorder="handleReorder" @sort="handleSort" @update-legacy-bulk-edit="handleUpdateLegacyBulkEdit" @@ -646,7 +636,7 @@ export default { </li> <blocking-issues-count class="gl-display-none gl-sm-display-block" - :blocking-issues-count="issuable.blockingIssuesCount" + :blocking-issues-count="issuable.blockedByCount" :is-list-item="true" /> </template> diff --git a/app/assets/javascripts/issues_list/constants.js b/app/assets/javascripts/issues_list/constants.js index 06e140d6420..76006f9081d 100644 --- a/app/assets/javascripts/issues_list/constants.js +++ b/app/assets/javascripts/issues_list/constants.js @@ -101,10 +101,13 @@ export const i18n = { export const JIRA_IMPORT_SUCCESS_ALERT_HIDE_MAP_KEY = 'jira-import-success-alert-hide-map'; export const PARAM_DUE_DATE = 'due_date'; -export const PARAM_PAGE = 'page'; export const PARAM_SORT = 'sort'; export const PARAM_STATE = 'state'; +export const initialPageParams = { + firstPageSize: PAGE_SIZE, +}; + export const DUE_DATE_NONE = '0'; export const DUE_DATE_ANY = ''; export const DUE_DATE_OVERDUE = 'overdue'; diff --git a/app/assets/javascripts/issues_list/index.js b/app/assets/javascripts/issues_list/index.js index d0c9462a3d7..97b9a9a115d 100644 --- a/app/assets/javascripts/issues_list/index.js +++ b/app/assets/javascripts/issues_list/index.js @@ -73,6 +73,13 @@ export function mountIssuesListApp() { return false; } + Vue.use(VueApollo); + + const defaultClient = createDefaultClient({}, { assumeImmutableResults: true }); + const apolloProvider = new VueApollo({ + defaultClient, + }); + const { autocompleteAwardEmojisPath, autocompleteUsersPath, @@ -83,7 +90,6 @@ export function mountIssuesListApp() { email, emailsHelpPagePath, emptyStateSvgPath, - endpoint, exportCsvPath, groupEpicsPath, hasBlockedIssuesFeature, @@ -113,16 +119,13 @@ export function mountIssuesListApp() { return new Vue({ el, - // Currently does not use Vue Apollo, but need to provide {} for now until the - // issue is fixed upstream in https://github.com/vuejs/vue-apollo/pull/1153 - apolloProvider: {}, + apolloProvider, provide: { autocompleteAwardEmojisPath, autocompleteUsersPath, calendarPath, canBulkUpdate: parseBoolean(canBulkUpdate), emptyStateSvgPath, - endpoint, groupEpicsPath, hasBlockedIssuesFeature: parseBoolean(hasBlockedIssuesFeature), hasIssuableHealthStatusFeature: parseBoolean(hasIssuableHealthStatusFeature), diff --git a/app/assets/javascripts/issues_list/queries/get_issues.query.graphql b/app/assets/javascripts/issues_list/queries/get_issues.query.graphql new file mode 100644 index 00000000000..afd53084ca0 --- /dev/null +++ b/app/assets/javascripts/issues_list/queries/get_issues.query.graphql @@ -0,0 +1,45 @@ +#import "~/graphql_shared/fragments/pageInfo.fragment.graphql" +#import "./issue.fragment.graphql" + +query getProjectIssues( + $projectPath: ID! + $search: String + $sort: IssueSort + $state: IssuableState + $assigneeId: String + $assigneeUsernames: [String!] + $authorUsername: String + $labelName: [String] + $milestoneTitle: [String] + $not: NegatedIssueFilterInput + $beforeCursor: String + $afterCursor: String + $firstPageSize: Int + $lastPageSize: Int +) { + project(fullPath: $projectPath) { + issues( + search: $search + sort: $sort + state: $state + assigneeId: $assigneeId + assigneeUsernames: $assigneeUsernames + authorUsername: $authorUsername + labelName: $labelName + milestoneTitle: $milestoneTitle + not: $not + before: $beforeCursor + after: $afterCursor + first: $firstPageSize + last: $lastPageSize + ) { + count + pageInfo { + ...PageInfo + } + nodes { + ...IssueFragment + } + } + } +} diff --git a/app/assets/javascripts/issues_list/queries/issue.fragment.graphql b/app/assets/javascripts/issues_list/queries/issue.fragment.graphql new file mode 100644 index 00000000000..de30d8b4bf6 --- /dev/null +++ b/app/assets/javascripts/issues_list/queries/issue.fragment.graphql @@ -0,0 +1,51 @@ +fragment IssueFragment on Issue { + id + iid + closedAt + confidential + createdAt + downvotes + dueDate + humanTimeEstimate + moved + title + updatedAt + upvotes + userDiscussionsCount + webUrl + assignees { + nodes { + id + avatarUrl + name + username + webUrl + } + } + author { + id + avatarUrl + name + username + webUrl + } + labels { + nodes { + id + color + title + description + } + } + milestone { + id + dueDate + startDate + webPath + title + } + taskCompletionStatus { + completedCount + count + } +} diff --git a/app/assets/javascripts/jira_connect/index.js b/app/assets/javascripts/jira_connect/index.js index dc8bb3b0c77..bc0d21c6c9a 100644 --- a/app/assets/javascripts/jira_connect/index.js +++ b/app/assets/javascripts/jira_connect/index.js @@ -1,3 +1,5 @@ +import '../webpack'; + import setConfigs from '@gitlab/ui/dist/config'; import Vue from 'vue'; import { getLocation, sizeToParent } from '~/jira_connect/utils'; diff --git a/app/assets/javascripts/performance_bar/index.js b/app/assets/javascripts/performance_bar/index.js index d8aab25a6a8..66e999ca43b 100644 --- a/app/assets/javascripts/performance_bar/index.js +++ b/app/assets/javascripts/performance_bar/index.js @@ -1,3 +1,5 @@ +import '../webpack'; + import Vue from 'vue'; import axios from '~/lib/utils/axios_utils'; import { numberToHumanSize } from '~/lib/utils/number_utils'; diff --git a/app/assets/javascripts/runner/components/runner_manual_setup_help.vue b/app/assets/javascripts/runner/components/runner_manual_setup_help.vue index 4755977b051..426d377c92b 100644 --- a/app/assets/javascripts/runner/components/runner_manual_setup_help.vue +++ b/app/assets/javascripts/runner/components/runner_manual_setup_help.vue @@ -1,8 +1,10 @@ <script> import { GlLink, GlSprintf, GlTooltipDirective } from '@gitlab/ui'; -import { __ } from '~/locale'; +import { s__ } from '~/locale'; +import RunnerRegistrationTokenReset from '~/runner/components/runner_registration_token_reset.vue'; import ClipboardButton from '~/vue_shared/components/clipboard_button.vue'; import RunnerInstructions from '~/vue_shared/components/runner_instructions/runner_instructions.vue'; +import { INSTANCE_TYPE, GROUP_TYPE, PROJECT_TYPE } from '../constants'; export default { components: { @@ -10,6 +12,7 @@ export default { GlSprintf, ClipboardButton, RunnerInstructions, + RunnerRegistrationTokenReset, }, directives: { GlTooltip: GlTooltipDirective, @@ -24,16 +27,40 @@ export default { type: String, required: true, }, - typeName: { + type: { type: String, - required: false, - default: __('shared'), + required: true, + validator(type) { + return [INSTANCE_TYPE, GROUP_TYPE, PROJECT_TYPE].includes(type); + }, }, }, + data() { + return { + currentRegistrationToken: this.registrationToken, + }; + }, computed: { rootUrl() { return gon.gitlab_url || ''; }, + typeName() { + switch (this.type) { + case INSTANCE_TYPE: + return s__('Runners|shared'); + case GROUP_TYPE: + return s__('Runners|group'); + case PROJECT_TYPE: + return s__('Runners|specific'); + default: + return ''; + } + }, + }, + methods: { + onTokenReset(token) { + this.currentRegistrationToken = token; + }, }, }; </script> @@ -65,12 +92,13 @@ export default { {{ __('And this registration token:') }} <br /> - <code data-testid="registration-token">{{ registrationToken }}</code> - <clipboard-button :title="__('Copy token')" :text="registrationToken" /> + <code data-testid="registration-token">{{ currentRegistrationToken }}</code> + <clipboard-button :title="__('Copy token')" :text="currentRegistrationToken" /> </li> </ol> - <!-- TODO Implement reset token functionality --> + <runner-registration-token-reset :type="type" @tokenReset="onTokenReset" /> + <runner-instructions /> </div> </template> diff --git a/app/assets/javascripts/runner/components/runner_registration_token_reset.vue b/app/assets/javascripts/runner/components/runner_registration_token_reset.vue new file mode 100644 index 00000000000..b03574264d9 --- /dev/null +++ b/app/assets/javascripts/runner/components/runner_registration_token_reset.vue @@ -0,0 +1,83 @@ +<script> +import { GlButton } from '@gitlab/ui'; +import createFlash, { FLASH_TYPES } from '~/flash'; +import { __, s__ } from '~/locale'; +import runnersRegistrationTokenResetMutation from '~/runner/graphql/runners_registration_token_reset.mutation.graphql'; +import { INSTANCE_TYPE, GROUP_TYPE, PROJECT_TYPE } from '../constants'; + +export default { + components: { + GlButton, + }, + props: { + type: { + type: String, + required: true, + validator(type) { + return [INSTANCE_TYPE, GROUP_TYPE, PROJECT_TYPE].includes(type); + }, + }, + }, + data() { + return { + loading: false, + }; + }, + computed: {}, + methods: { + async resetToken() { + // TODO Replace confirmation with gl-modal + // See: https://gitlab.com/gitlab-org/gitlab/-/issues/333810 + // eslint-disable-next-line no-alert + if (!window.confirm(__('Are you sure you want to reset the registration token?'))) { + return; + } + + this.loading = true; + try { + const { + data: { + runnersRegistrationTokenReset: { token, errors }, + }, + } = await this.$apollo.mutate({ + mutation: runnersRegistrationTokenResetMutation, + variables: { + // TODO Currently INTANCE_TYPE only is supported + // In future iterations this component will support + // other registration token types. + // See: https://gitlab.com/gitlab-org/gitlab/-/issues/19819 + input: { + type: this.type, + }, + }, + }); + if (errors && errors.length) { + this.onError(new Error(errors[0])); + return; + } + this.onSuccess(token); + } catch (e) { + this.onError(e); + } finally { + this.loading = false; + } + }, + onError(error) { + const { message } = error; + createFlash({ message }); + }, + onSuccess(token) { + createFlash({ + message: s__('Runners|New registration token generated!'), + type: FLASH_TYPES.SUCCESS, + }); + this.$emit('tokenReset', token); + }, + }, +}; +</script> +<template> + <gl-button :loading="loading" @click="resetToken"> + {{ __('Reset registration token') }} + </gl-button> +</template> diff --git a/app/assets/javascripts/runner/graphql/runners_registration_token_reset.mutation.graphql b/app/assets/javascripts/runner/graphql/runners_registration_token_reset.mutation.graphql new file mode 100644 index 00000000000..9c2797732ad --- /dev/null +++ b/app/assets/javascripts/runner/graphql/runners_registration_token_reset.mutation.graphql @@ -0,0 +1,6 @@ +mutation runnersRegistrationTokenReset($input: RunnersRegistrationTokenResetInput!) { + runnersRegistrationTokenReset(input: $input) { + token + errors + } +} diff --git a/app/assets/javascripts/runner/runner_list/runner_list_app.vue b/app/assets/javascripts/runner/runner_list/runner_list_app.vue index b4eacb911a2..7f3a980ccca 100644 --- a/app/assets/javascripts/runner/runner_list/runner_list_app.vue +++ b/app/assets/javascripts/runner/runner_list/runner_list_app.vue @@ -7,6 +7,7 @@ import RunnerList from '../components/runner_list.vue'; import RunnerManualSetupHelp from '../components/runner_manual_setup_help.vue'; import RunnerPagination from '../components/runner_pagination.vue'; import RunnerTypeHelp from '../components/runner_type_help.vue'; +import { INSTANCE_TYPE } from '../constants'; import getRunnersQuery from '../graphql/get_runners.query.graphql'; import { fromUrlQueryToSearch, @@ -97,6 +98,7 @@ export default { }); }, }, + INSTANCE_TYPE, }; </script> <template> @@ -106,7 +108,10 @@ export default { <runner-type-help /> </div> <div class="col-sm-6"> - <runner-manual-setup-help :registration-token="registrationToken" /> + <runner-manual-setup-help + :registration-token="registrationToken" + :type="$options.INSTANCE_TYPE" + /> </div> </div> diff --git a/app/assets/javascripts/sentry/index.js b/app/assets/javascripts/sentry/index.js index 06e4e0aa507..a875ef84088 100644 --- a/app/assets/javascripts/sentry/index.js +++ b/app/assets/javascripts/sentry/index.js @@ -1,3 +1,5 @@ +import '../webpack'; + import SentryConfig from './sentry_config'; const index = function index() { diff --git a/app/assets/javascripts/vue_shared/security_reports/components/security_report_download_dropdown.vue b/app/assets/javascripts/vue_shared/security_reports/components/security_report_download_dropdown.vue index 9e941087da2..5d39d740c07 100644 --- a/app/assets/javascripts/vue_shared/security_reports/components/security_report_download_dropdown.vue +++ b/app/assets/javascripts/vue_shared/security_reports/components/security_report_download_dropdown.vue @@ -35,7 +35,7 @@ export default { <template> <gl-dropdown v-gl-tooltip - :title="s__('SecurityReports|Download results')" + :text="s__('SecurityReports|Download results')" :loading="loading" icon="download" size="small" diff --git a/app/assets/javascripts/webpack.js b/app/assets/javascripts/webpack.js index 4f558843357..b901f17790f 100644 --- a/app/assets/javascripts/webpack.js +++ b/app/assets/javascripts/webpack.js @@ -2,6 +2,9 @@ * This is the first script loaded by webpack's runtime. It is used to manually configure * config.output.publicPath to account for relative_url_root or CDN settings which cannot be * baked-in to our webpack bundles. + * + * Note: This file should be at the top of an entry point and _cannot_ be moved to + * e.g. the `window` scope, because it needs to be executed in the scope of webpack. */ if (gon && gon.webpack_public_path) { diff --git a/app/helpers/issues_helper.rb b/app/helpers/issues_helper.rb index 91920277c50..7690773354f 100644 --- a/app/helpers/issues_helper.rb +++ b/app/helpers/issues_helper.rb @@ -190,7 +190,6 @@ module IssuesHelper email: current_user&.notification_email, emails_help_page_path: help_page_path('development/emails', anchor: 'email-namespace'), empty_state_svg_path: image_path('illustrations/issues.svg'), - endpoint: expose_path(api_v4_projects_issues_path(id: project.id)), export_csv_path: export_csv_project_issues_path(project), has_project_issues: project_issues(project).exists?.to_s, import_csv_issues_path: import_csv_namespace_project_issues_path, diff --git a/app/models/ability.rb b/app/models/ability.rb index c18bd21d754..6a63a8d46ba 100644 --- a/app/models/ability.rb +++ b/app/models/ability.rb @@ -54,7 +54,7 @@ class Ability end end - def allowed?(user, action, subject = :global, opts = {}) + def allowed?(user, ability, subject = :global, opts = {}) if subject.is_a?(Hash) opts = subject subject = :global @@ -64,21 +64,76 @@ class Ability case opts[:scope] when :user - DeclarativePolicy.user_scope { policy.can?(action) } + DeclarativePolicy.user_scope { policy.allowed?(ability) } when :subject - DeclarativePolicy.subject_scope { policy.can?(action) } + DeclarativePolicy.subject_scope { policy.allowed?(ability) } else - policy.can?(action) + policy.allowed?(ability) end + ensure + # TODO: replace with runner invalidation: + # See: https://gitlab.com/gitlab-org/declarative-policy/-/merge_requests/24 + # See: https://gitlab.com/gitlab-org/declarative-policy/-/merge_requests/25 + forget_runner_result(policy.runner(ability)) if policy && ability_forgetting? end def policy_for(user, subject = :global) - cache = Gitlab::SafeRequestStore.active? ? Gitlab::SafeRequestStore : {} - DeclarativePolicy.policy_for(user, subject, cache: cache) + DeclarativePolicy.policy_for(user, subject, cache: ::Gitlab::SafeRequestStore.storage) + end + + # This method is something of a band-aid over the problem. The problem is + # that some conditions may not be re-entrant, if facts change. + # (`BasePolicy#admin?` is a known offender, due to the effects of + # `admin_mode`) + # + # To deal with this we need to clear two elements of state: the offending + # conditions (selected by 'pattern') and the cached ability checks (cached + # on the `policy#runner(ability)`). + # + # Clearing the conditions (see `forget_all_but`) is fairly robust, provided + # the pattern is not _under_-selective. Clearing the runners is harder, + # since there is not good way to know which abilities any given condition + # may affect. The approach taken here (see `forget_runner_result`) is to + # discard all runner results generated during a `forgetting` block. This may + # be _under_-selective if a runner prior to this block cached a state value + # that might now be invalid. + # + # TODO: add some kind of reverse-dependency mapping in DeclarativePolicy + # See: https://gitlab.com/gitlab-org/declarative-policy/-/issues/14 + def forgetting(pattern, &block) + was_forgetting = ability_forgetting? + ::Gitlab::SafeRequestStore[:ability_forgetting] = true + keys_before = ::Gitlab::SafeRequestStore.storage.keys + + yield + ensure + ::Gitlab::SafeRequestStore[:ability_forgetting] = was_forgetting + forget_all_but(keys_before, matching: pattern) end private + def ability_forgetting? + ::Gitlab::SafeRequestStore[:ability_forgetting] + end + + def forget_all_but(keys_before, matching:) + keys_after = ::Gitlab::SafeRequestStore.storage.keys + + added_keys = keys_after - keys_before + added_keys.each do |key| + if key.is_a?(String) && key.start_with?('/dp') && key =~ matching + ::Gitlab::SafeRequestStore.delete(key) + end + end + end + + def forget_runner_result(runner) + # TODO: add support in DP for this + # See: https://gitlab.com/gitlab-org/declarative-policy/-/issues/15 + runner.instance_variable_set(:@state, nil) + end + def apply_filters_if_needed(elements, user, filters) filters.each do |ability, filter| elements = filter.call(elements) unless allowed?(user, ability) diff --git a/app/models/analytics/cycle_analytics/project_level.rb b/app/models/analytics/cycle_analytics/project_level.rb index 7a73bc75ed6..d43793f60c9 100644 --- a/app/models/analytics/cycle_analytics/project_level.rb +++ b/app/models/analytics/cycle_analytics/project_level.rb @@ -47,3 +47,4 @@ module Analytics end end end +Analytics::CycleAnalytics::ProjectLevel.prepend_mod_with('Analytics::CycleAnalytics::ProjectLevel') diff --git a/app/models/ci/pipeline.rb b/app/models/ci/pipeline.rb index ae06bea5a02..159d9d10878 100644 --- a/app/models/ci/pipeline.rb +++ b/app/models/ci/pipeline.rb @@ -1257,7 +1257,7 @@ module Ci end def build_matchers - self.builds.build_matchers(project) + self.builds.latest.build_matchers(project) end private diff --git a/app/models/container_expiration_policy.rb b/app/models/container_expiration_policy.rb index 0441a5f0f5b..9bacd9a0edf 100644 --- a/app/models/container_expiration_policy.rb +++ b/app/models/container_expiration_policy.rb @@ -38,6 +38,16 @@ class ContainerExpirationPolicy < ApplicationRecord ) end + def self.without_container_repositories + where.not( + 'EXISTS(?)', + ContainerRepository.select(1) + .where( + 'container_repositories.project_id = container_expiration_policies.project_id' + ) + ) + end + def self.keep_n_options { 1 => _('%{tags} tag per image name') % { tags: 1 }, diff --git a/app/models/integration.rb b/app/models/integration.rb index 238ecbbf209..84f6e61bc48 100644 --- a/app/models/integration.rb +++ b/app/models/integration.rb @@ -44,6 +44,7 @@ class Integration < ApplicationRecord bamboo bugzilla buildkite campfire confluence custom_issue_tracker datadog discord drone_ci + emails_on_push ewm emails_on_push external_wiki ].to_set.freeze def self.renamed?(name) diff --git a/app/models/project.rb b/app/models/project.rb index 735dc185575..951ee70a9a1 100644 --- a/app/models/project.rb +++ b/app/models/project.rb @@ -166,9 +166,9 @@ class Project < ApplicationRecord has_one :datadog_integration, class_name: 'Integrations::Datadog' has_one :discord_integration, class_name: 'Integrations::Discord' has_one :drone_ci_integration, class_name: 'Integrations::DroneCi' - has_one :emails_on_push_service, class_name: 'Integrations::EmailsOnPush' - has_one :ewm_service, class_name: 'Integrations::Ewm' - has_one :external_wiki_service, class_name: 'Integrations::ExternalWiki' + has_one :emails_on_push_integration, class_name: 'Integrations::EmailsOnPush' + has_one :ewm_integration, class_name: 'Integrations::Ewm' + has_one :external_wiki_integration, class_name: 'Integrations::ExternalWiki' has_one :flowdock_service, class_name: 'Integrations::Flowdock' has_one :hangouts_chat_service, class_name: 'Integrations::HangoutsChat' has_one :irker_service, class_name: 'Integrations::Irker' diff --git a/app/models/user.rb b/app/models/user.rb index 8ee0421e45f..5fbd6271589 100644 --- a/app/models/user.rb +++ b/app/models/user.rb @@ -84,10 +84,11 @@ class User < ApplicationRecord update_tracked_fields(request) - lease = Gitlab::ExclusiveLease.new("user_update_tracked_fields:#{id}", timeout: 1.hour.to_i) - return unless lease.try_obtain - - Users::UpdateService.new(self, user: self).execute(validate: false) + Gitlab::ExclusiveLease.throttle(id) do + ::Ability.forgetting(/admin/) do + Users::UpdateService.new(self, user: self).execute(validate: false) + end + end end # rubocop: enable CodeReuse/ServiceClass @@ -1868,6 +1869,12 @@ class User < ApplicationRecord !!(password_expires_at && password_expires_at < Time.current) end + def password_expired_if_applicable? + return false unless allow_password_authentication? + + password_expired? + end + def can_be_deactivated? active? && no_recent_activity? && !internal? end diff --git a/app/policies/base_policy.rb b/app/policies/base_policy.rb index ea1ea87ff2f..77897c5807f 100644 --- a/app/policies/base_policy.rb +++ b/app/policies/base_policy.rb @@ -67,7 +67,7 @@ class BasePolicy < DeclarativePolicy::Base rule { default }.enable :read_cross_project - condition(:is_gitlab_com) { ::Gitlab.dev_env_or_com? } + condition(:is_gitlab_com, score: 0, scope: :global) { ::Gitlab.dev_env_or_com? } end BasePolicy.prepend_mod_with('BasePolicy') diff --git a/app/policies/concerns/policy_actor.rb b/app/policies/concerns/policy_actor.rb index 513bb85f538..8fa09683b06 100644 --- a/app/policies/concerns/policy_actor.rb +++ b/app/policies/concerns/policy_actor.rb @@ -85,7 +85,7 @@ module PolicyActor false end - def password_expired? + def password_expired_if_applicable? false end diff --git a/app/policies/global_policy.rb b/app/policies/global_policy.rb index 35d38bac7fa..c3b4b163cb4 100644 --- a/app/policies/global_policy.rb +++ b/app/policies/global_policy.rb @@ -16,7 +16,7 @@ class GlobalPolicy < BasePolicy end condition(:password_expired, scope: :user) do - @user&.password_expired? + @user&.password_expired_if_applicable? end condition(:project_bot, scope: :user) { @user&.project_bot? } diff --git a/app/services/users/update_service.rb b/app/services/users/update_service.rb index ff08c806319..23c67231a29 100644 --- a/app/services/users/update_service.rb +++ b/app/services/users/update_service.rb @@ -17,6 +17,7 @@ module Users yield(@user) if block_given? user_exists = @user.persisted? + @user.user_detail # prevent assignment discard_read_only_attributes assign_attributes diff --git a/app/views/admin/runners/show.html.haml b/app/views/admin/runners/show.html.haml index d03a782756b..6f3c16f7abf 100644 --- a/app/views/admin/runners/show.html.haml +++ b/app/views/admin/runners/show.html.haml @@ -28,12 +28,14 @@ %tr %td .gl-alert.gl-alert-danger - = sprite_icon('error', size: 16, css_class: 'gl-icon gl-alert-icon gl-alert-icon-no-title') - .gl-alert-body - %strong - = project.full_name - .gl-alert-actions - = link_to s_('Disable'), admin_namespace_project_runner_project_path(project.namespace, project, runner_project), method: :delete, class: 'btn gl-alert-action btn-info btn-md gl-button' + .gl-alert-container + = sprite_icon('error', size: 16, css_class: 'gl-icon gl-alert-icon gl-alert-icon-no-title') + .gl-alert-content + .gl-alert-body + %strong + = project.full_name + .gl-alert-actions + = link_to s_('Disable'), admin_namespace_project_runner_project_path(project.namespace, project, runner_project), method: :delete, class: 'btn gl-alert-action btn-confirm btn-md gl-button' %table.table{ data: { testid: 'unassigned-projects' } } %thead diff --git a/app/views/clusters/clusters/_gcp_signup_offer_banner.html.haml b/app/views/clusters/clusters/_gcp_signup_offer_banner.html.haml index 5df368ef3af..81f4be9fce5 100644 --- a/app/views/clusters/clusters/_gcp_signup_offer_banner.html.haml +++ b/app/views/clusters/clusters/_gcp_signup_offer_banner.html.haml @@ -1,9 +1,11 @@ - link = link_to(s_('ClusterIntegration|sign up'), 'https://console.cloud.google.com/freetrial?utm_campaign=2018_cpanel&utm_source=gitlab&utm_medium=referral', target: '_blank', rel: 'noopener noreferrer') .gcp-signup-offer.gl-alert.gl-alert-info.gl-my-3{ role: 'alert', data: { feature_id: UserCalloutsHelper::GCP_SIGNUP_OFFER, dismiss_endpoint: user_callouts_path } } - %button.js-close.gl-alert-dismiss{ type: 'button', 'aria-label' => _('Dismiss') } - = sprite_icon('close', size: 16, css_class: 'gl-icon') - = sprite_icon('information-o', size: 16, css_class: 'gl-icon gl-alert-icon gl-alert-icon-no-title') - %h4.gl-alert-title= s_('ClusterIntegration|Did you know?') - %p.gl-alert-body= s_('ClusterIntegration|Every new Google Cloud Platform (GCP) account receives $300 in credit upon %{sign_up_link}. In partnership with Google, GitLab is able to offer an additional $200 for both new and existing GCP accounts to get started with GitLab\'s Google Kubernetes Engine Integration.').html_safe % { sign_up_link: link } - %a.gl-button.btn-confirm.text-decoration-none{ href: 'https://cloud.google.com/partners/partnercredit/?pcn_code=0014M00001h35gDQAQ#contact-form', target: '_blank', rel: 'noopener noreferrer' } - = s_("ClusterIntegration|Apply for credit") + .gl-alert-container + %button.js-close.btn.gl-dismiss-btn.btn-default.btn-sm.gl-button.btn-default-tertiary.btn-icon{ type: 'button', 'aria-label' => _('Dismiss') } + = sprite_icon('close', size: 16, css_class: 'gl-icon') + = sprite_icon('information-o', size: 16, css_class: 'gl-icon gl-alert-icon gl-alert-icon-no-title') + .gl-alert-content + %h4.gl-alert-title= s_('ClusterIntegration|Did you know?') + %p.gl-alert-body= s_('ClusterIntegration|Every new Google Cloud Platform (GCP) account receives $300 in credit upon %{sign_up_link}. In partnership with Google, GitLab is able to offer an additional $200 for both new and existing GCP accounts to get started with GitLab\'s Google Kubernetes Engine Integration.').html_safe % { sign_up_link: link } + %a.gl-button.btn-confirm.text-decoration-none{ href: 'https://cloud.google.com/partners/partnercredit/?pcn_code=0014M00001h35gDQAQ#contact-form', target: '_blank', rel: 'noopener noreferrer' } + = s_("ClusterIntegration|Apply for credit") diff --git a/app/workers/container_expiration_policy_worker.rb b/app/workers/container_expiration_policy_worker.rb index b15d1bf90bd..8fc139ac87c 100644 --- a/app/workers/container_expiration_policy_worker.rb +++ b/app/workers/container_expiration_policy_worker.rb @@ -15,11 +15,19 @@ class ContainerExpirationPolicyWorker # rubocop:disable Scalability/IdempotentWo def perform process_stale_ongoing_cleanups + disable_policies_without_container_repositories throttling_enabled? ? perform_throttled : perform_unthrottled end private + def disable_policies_without_container_repositories + ContainerExpirationPolicy.active.each_batch(of: BATCH_SIZE) do |policies| + policies.without_container_repositories + .update_all(enabled: false) + end + end + def process_stale_ongoing_cleanups threshold = delete_tags_service_timeout.seconds + 30.minutes ContainerRepository.with_stale_ongoing_cleanup(threshold.ago) diff --git a/app/workers/web_hook_worker.rb b/app/workers/web_hook_worker.rb index 3480f49d640..a2a53ca922a 100644 --- a/app/workers/web_hook_worker.rb +++ b/app/workers/web_hook_worker.rb @@ -8,7 +8,7 @@ class WebHookWorker feature_category :integrations worker_has_external_dependencies! loggable_arguments 2 - data_consistency :delayed, feature_flag: :load_balancing_for_web_hook_worker + data_consistency :delayed sidekiq_options retry: 4, dead: false diff --git a/config/feature_flags/development/load_balancing_for_web_hook_worker.yml b/config/feature_flags/development/load_balancing_for_web_hook_worker.yml deleted file mode 100644 index f9c191e3ab4..00000000000 --- a/config/feature_flags/development/load_balancing_for_web_hook_worker.yml +++ /dev/null @@ -1,8 +0,0 @@ ---- -name: load_balancing_for_web_hook_worker -introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/62075 -rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/331365 -milestone: '14.0' -type: development -group: group::memory -default_enabled: false diff --git a/config/initializers/active_record_keyset_pagination.rb b/config/initializers/active_record_keyset_pagination.rb index f8c2ada5255..f5692c95276 100644 --- a/config/initializers/active_record_keyset_pagination.rb +++ b/config/initializers/active_record_keyset_pagination.rb @@ -2,8 +2,8 @@ module PaginatorExtension # This method loads the records for the requested page and returns a keyset paginator object. - def keyset_paginate(cursor: nil, per_page: 20) - Gitlab::Pagination::Keyset::Paginator.new(scope: self.dup, cursor: cursor, per_page: per_page) + def keyset_paginate(cursor: nil, per_page: 20, keyset_order_options: {}) + Gitlab::Pagination::Keyset::Paginator.new(scope: self.dup, cursor: cursor, per_page: per_page, keyset_order_options: keyset_order_options) end end diff --git a/config/initializers/declarative_policy.rb b/config/initializers/declarative_policy.rb index 49267584809..406f88bac23 100644 --- a/config/initializers/declarative_policy.rb +++ b/config/initializers/declarative_policy.rb @@ -1,5 +1,7 @@ # frozen_string_literal: true +require 'declarative_policy' + DeclarativePolicy.configure do named_policy :global, ::GlobalPolicy end diff --git a/config/webpack.config.js b/config/webpack.config.js index db5371a7258..c2af7197f94 100644 --- a/config/webpack.config.js +++ b/config/webpack.config.js @@ -103,6 +103,15 @@ function generateEntries() { autoEntries[entry] = defaultEntries.concat(entryPaths); }); + /* + If you create manual entries, ensure that these import `app/assets/javascripts/webpack.js` right at + the top of the entry in order to ensure that the public path is correctly determined for loading + assets async. See: https://webpack.js.org/configuration/output/#outputpublicpath + + Note: WebPack 5 has an 'auto' option for the public path which could allow us to remove this option + Note 2: If you are using web-workers, you might need to reset the public path, see: + https://gitlab.com/gitlab-org/gitlab/-/issues/321656 + */ const manualEntries = { default: defaultEntries, sentry: './sentry/index.js', diff --git a/doc/api/graphql/reference/index.md b/doc/api/graphql/reference/index.md index 0069d109e1e..e84769fa568 100644 --- a/doc/api/graphql/reference/index.md +++ b/doc/api/graphql/reference/index.md @@ -14459,9 +14459,9 @@ List limit metric setting. | Value | Description | | ----- | ----------- | -| <a id="listlimitmetricall_metrics"></a>`all_metrics` | | -| <a id="listlimitmetricissue_count"></a>`issue_count` | | -| <a id="listlimitmetricissue_weights"></a>`issue_weights` | | +| <a id="listlimitmetricall_metrics"></a>`all_metrics` | Limit list by number and total weight of issues. | +| <a id="listlimitmetricissue_count"></a>`issue_count` | Limit list by number of issues. | +| <a id="listlimitmetricissue_weights"></a>`issue_weights` | Limit list by total weight of issues. | ### `MeasurementIdentifier` diff --git a/doc/ci/runners/README.md b/doc/ci/runners/README.md index b493da993ca..45113c480c7 100644 --- a/doc/ci/runners/README.md +++ b/doc/ci/runners/README.md @@ -21,7 +21,7 @@ which depends on your [subscription plan](../../subscriptions/gitlab_com/index.m Linux shared runners on GitLab.com run in autoscale mode and are powered by Google Cloud Platform. -Autoscaling means reduced queue times to spin up CI/CD jobs, and isolated VMs for each project, thus maximizing security. These shared runners are available for users and customers on GitLab.com. +Autoscaling means reduced queue times to spin up CI/CD jobs, and isolated VMs for each job, thus maximizing security. These shared runners are available for users and customers on GitLab.com. GitLab offers Ultimate tier capabilities and included CI/CD minutes per group per month for our [Open Source](https://about.gitlab.com/solutions/open-source/join/), [Education](https://about.gitlab.com/solutions/education/), and [Startups](https://about.gitlab.com/solutions/startups/) programs. For private projects, GitLab offers various [plans](https://about.gitlab.com/pricing/), starting with a Free tier. diff --git a/doc/development/database/database_reviewer_guidelines.md b/doc/development/database/database_reviewer_guidelines.md index de131ddffbc..16734dada13 100644 --- a/doc/development/database/database_reviewer_guidelines.md +++ b/doc/development/database/database_reviewer_guidelines.md @@ -52,7 +52,7 @@ that require a more in-depth discussion between the database reviewers and maint - [Database Office Hours Agenda](https://docs.google.com/document/d/1wgfmVL30F8SdMg-9yY6Y8djPSxWNvKmhR5XmsvYX1EI/edit). - <i class="fa fa-youtube-play youtube" aria-hidden="true"></i> [YouTube playlist with past recordings](https://www.youtube.com/playlist?list=PL05JrBw4t0Kp-kqXeiF7fF7cFYaKtdqXM). -You should also join the [#database-labs](../understanding_explain_plans.md#database-lab) +You should also join the [#database-lab](../understanding_explain_plans.md#database-lab-engine) Slack channel and get familiar with how to use Joe, the Slackbot that provides developers with their own clone of the production database. diff --git a/doc/development/database/keyset_pagination.md b/doc/development/database/keyset_pagination.md new file mode 100644 index 00000000000..e30c3cc8832 --- /dev/null +++ b/doc/development/database/keyset_pagination.md @@ -0,0 +1,251 @@ +--- +stage: Enablement +group: Database +info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments +--- + +# Keyset pagination + +The keyset pagination library can be used in HAML-based views and the REST API within the GitLab project. + +You can read about keyset pagination and how it compares to the offset based pagination on our [pagination guidelines](pagination_guidelines.md) page. + +## API overview + +### Synopsis + +Keyset pagination with `ActiveRecord` in Rails controllers: + +```ruby +cursor = params[:cursor] # this is nil when the first page is requested +paginator = Project.order(:created_at).keyset_paginate(cursor: cursor, per_page: 20) + +paginator.each do |project| + puts project.name # prints maximum 20 projects +end +``` + +### Usage + +This library adds a single method to ActiveRecord relations: [`#keyset_paginate`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/config/initializers/active_record_keyset_pagination.rb). + +This is similar in spirit (but not in implementation) to Kaminari's `paginate` method. + +Keyset pagination works without any configuration for simple ActiveRecord queries: + +- Order by one column. +- Order by two columns, where the last column is the primary key. + +The library can detect nullable and non-distinct columns and based on these, it will add extra ordering using the primary key. This is necessary because keyset pagination expects distinct order by values: + +```ruby +Project.order(:created_at).keyset_paginate.records # ORDER BY created_at, id + +Project.order(:name).keyset_paginate.records # ORDER BY name, id + +Project.order(:created_at, id: :desc).keyset_paginate.records # ORDER BY created_at, id + +Project.order(created_at: :asc, id: :desc).keyset_paginate.records # ORDER BY created_at, id DESC +``` + +The `keyset_paginate` method returns [a special paginator object](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/pagination/keyset/paginator.rb) which contains the loaded records and additional information for requesting various pages. + +The method accepts the following keyword arguments: + +- `cursor` - Encoded order by column values for requesting the next page (can be `nil`). +- `per_page` - Number of records to load per page (default 20). +- `keyset_order_options` - Extra options for building the keyset paginated database query, see an example for `UNION` queries in the performance section (optional). + +The paginator object has the following methods: + +- `records` - Returns the records for the current page. +- `has_next_page?` - Tells whether there is a next page. +- `has_previous_page?` - Tells whether there is a previous page. +- `cursor_for_next_page` - Encoded values as `String` for requesting the next page (can be `nil`). +- `cursor_for_previous_page` - Encoded values as `String` for requesting the previous page (can be `nil`). +- `cursor_for_first_page` - Encoded values as `String` for requesting the first page. +- `cursor_for_last_page` - Encoded values as `String` for requesting the last page. +- The paginator objects includes the `Enumerable` module and delegates the enumerable functionality to the `records` method/array. + +Example for getting the first and the second page: + +```ruby +paginator = Project.order(:name).keyset_paginate + +paginator.to_a # same as .records + +cursor = paginator.cursor_for_next_page # encoded column attributes for the next page + +paginator = Project.order(:name).keyset_paginate(cursor: cursor).records # loading the next page +``` + +Since keyset pagination does not support page numbers, we are restricted to go to the following pages: + +- Next page +- Previous page +- Last page +- First page + +#### Usage in Rails with HAML views + +Consider the following controller action, where we list the projects ordered by name: + +```ruby +def index + @projects = Project.order(:name).keyset_paginate(cursor: params[:cursor]) +end +``` + +In the HAML file, we can render the records: + +```ruby +- if @projects.any? + - @projects.each do |project| + .project-container + = project.name + + = keyset_paginate @projects +``` + +## Performance + +The performance of the keyset pagination depends on the database index configuration and the number of columns we use in the `ORDER BY` clause. + +In case we order by the primary key (`id`), then the generated queries will be efficient since the primary key is covered by a database index. + +When two or more columns are used in the `ORDER BY` clause, it's advised to check the generated database query and make sure that the correct index configuration is used. More information can be found on the [pagination guideline page](pagination_guidelines.md#index-coverage). + +NOTE: +While the query performance of the first page might look good, the second page (where the cursor attributes are used in the query) might yield poor performance. It's advised to always verify the performance of both queries: first page and second page. + +Example database query with tie-breaker (`id`) column: + +```sql +SELECT "issues".* +FROM "issues" +WHERE (("issues"."id" > 99 + AND "issues"."created_at" = '2021-02-16 11:26:17.408466') + OR ("issues"."created_at" > '2021-02-16 11:26:17.408466') + OR ("issues"."created_at" IS NULL)) +ORDER BY "issues"."created_at" DESC NULLS LAST, "issues"."id" DESC +LIMIT 20 +``` + +`OR` queries are difficult to optimize in PostgreSQL, we generally advise using [`UNION` queries](../sql.md#use-unions) instead. The keyset pagination library can generate efficient `UNION` when multiple columns are present in the `ORDER BY` clause. This is triggered when we specify the `use_union_optimization: true` option in the options passed to `Relation#keyset_paginate`. + +Example: + +```ruby +# Triggers a simple query for the first page. +paginator1 = Project.order(:created_at, id: :desc).keyset_paginate(per_page: 2, keyset_order_options: { use_union_optimization: true }) + +cursor = paginator1.cursor_for_next_page + +# Triggers UNION query for the second page +paginator2 = Project.order(:created_at, id: :desc).keyset_paginate(per_page: 2, cursor: cursor, keyset_order_options: { use_union_optimization: true }) + +puts paginator2.records.to_a # UNION query +``` + +## Complex order configuration + +Common `ORDER BY` configurations will be handled by the `keyset_paginate` method automatically so no manual configuration is needed. There are a few edge cases where order object configuration is necessary: + +- `NULLS LAST` ordering. +- Function-based ordering. +- Ordering with a custom tie-breaker column, like `iid`. + +These order objects can be defined in the model classes as normal ActiveRecord scopes, there is no special behavior that prevents using these scopes elsewhere (kaminari, background jobs). + +### `NULLS LAST` ordering + +Consider the following scope: + +```ruby +scope = Issue.where(project_id: 10).order(Gitlab::Database.nulls_last_order('relative_position', 'DESC')) +# SELECT "issues".* FROM "issues" WHERE "issues"."project_id" = 10 ORDER BY relative_position DESC NULLS LAST + +scope.keyset_paginate # raises: Gitlab::Pagination::Keyset::Paginator::UnsupportedScopeOrder: The order on the scope does not support keyset pagination +``` + +The `keyset_paginate` method raises an error because the order value on the query is a custom SQL string and not an [`Arel`](https://www.rubydoc.info/gems/arel) AST node. The keyset library cannot automatically infer configuration values from these kinds of queries. + +To make keyset pagination work, we need to configure custom order objects, to do so, we need to collect information about the order columns: + +- `relative_position` can have duplicated values since no unique index is present. +- `relative_position` can have null values because we don't have a not null constraint on the column. For this, we need to determine where will we see NULL values, at the beginning of the resultset or the end (`NULLS LAST`). +- Keyset pagination requires distinct order columns, so we'll need to add the primary key (`id`) to make the order distinct. +- Jumping to the last page and paginating backwards actually reverses the `ORDER BY` clause. For this, we'll need to provide the reversed `ORDER BY` clause. + +Example: + +```ruby +order = Gitlab::Pagination::Keyset::Order.build([ + # The attributes are documented in the `lib/gitlab/pagination/keyset/column_order_definition.rb` file + Gitlab::Pagination::Keyset::ColumnOrderDefinition.new( + attribute_name: 'relative_position', + column_expression: Issue.arel_table[:relative_position], + order_expression: Gitlab::Database.nulls_last_order('relative_position', 'DESC'), + reversed_order_expression: Gitlab::Database.nulls_first_order('relative_position', 'ASC'), + nullable: :nulls_last, + order_direction: :desc, + distinct: false + ), + Gitlab::Pagination::Keyset::ColumnOrderDefinition.new( + attribute_name: 'id', + order_expression: Issue.arel_table[:id].asc, + nullable: :not_nullable, + distinct: true + ) +]) + +scope = Issue.where(project_id: 10).order(order) # or reorder() + +scope.keyset_paginate.records # works +``` + +### Function-based ordering + +In the following example, we multiply the `id` by 10 and ordering by that value. Since the `id` column is unique, we need to define only one column: + +```ruby +order = Gitlab::Pagination::Keyset::Order.build([ + Gitlab::Pagination::Keyset::ColumnOrderDefinition.new( + attribute_name: 'id_times_ten', + order_expression: Arel.sql('id * 10').asc, + nullable: :not_nullable, + order_direction: :asc, + distinct: true, + add_to_projections: true + ) +]) + +paginator = Issue.where(project_id: 10).order(order).keyset_paginate(per_page: 5) +puts paginator.records.map(&:id_times_ten) + +cursor = paginator.cursor_for_next_page + +paginator = Issue.where(project_id: 10).order(order).keyset_paginate(cursor: cursor, per_page: 5) +puts paginator.records.map(&:id_times_ten) +``` + +The `add_to_projections` flag tells the paginator to expose the column expression in the `SELECT` clause. This is necessary because the keyset pagination needs to somehow extract the last value from the records to request the next page. + +### `iid` based ordering + +When ordering issues, the database ensures that we'll have distinct `iid` values within a project. Ordering by one column is enough to make the pagination work if the `project_id` filter is present: + +```ruby +order = Gitlab::Pagination::Keyset::Order.build([ + Gitlab::Pagination::Keyset::ColumnOrderDefinition.new( + attribute_name: 'iid', + order_expression: Issue.arel_table[:iid].asc, + nullable: :not_nullable, + distinct: true + ) +]) + +scope = Issue.where(project_id: 10).order(order) + +scope.keyset_paginate.records # works +``` diff --git a/doc/development/database/pagination_guidelines.md b/doc/development/database/pagination_guidelines.md index 3308ebfcaae..ce656851f86 100644 --- a/doc/development/database/pagination_guidelines.md +++ b/doc/development/database/pagination_guidelines.md @@ -58,9 +58,7 @@ It's not possible to make all filter and sort combinations performant, so we sho ### Prepare for scaling -Offset-based pagination is the easiest way to paginate over records, however, it does not scale well for large tables. As a long-term solution, keyset pagination is preferred. The tooling around keyset pagination is not as mature as for offset pagination so currently, it's easier to start with offset pagination and then switch to keyset pagination. - -To avoid losing functionality and maintaining backward compatibility when switching pagination methods, it's advised to consider the following approach in the design phase: +Offset-based pagination is the easiest way to paginate over records, however, it does not scale well for large database tables. As a long-term solution, [keyset pagination](keyset_pagination.md) is preferred. Switching between offset and keyset pagination is generally straightforward and can be done without affecting the end-user if the following conditions are met: - Avoid presenting total counts, prefer limit counts. - Example: count maximum 1001 records, and then on the UI show 1000+ if the count is 1001, show the actual number otherwise. @@ -304,7 +302,22 @@ LIMIT 20 ##### Tooling -Using keyset pagination outside of GraphQL is not straightforward. We have the low-level blocks for building keyset pagination database queries, however, the usage in application code is still not streamlined yet. +A generic keyset pagination library is available within the GitLab project which can most of the cases easly replace the existing, kaminari based pagination with significant performance improvements when dealing with large datasets. + +Example: + +```ruby +# first page +paginator = Project.order(:created_at, :id).keyset_paginate(per_page: 20) +puts paginator.to_a # records + +# next page +cursor = paginator.cursor_for_next_page +paginator = Project.order(:created_at, :id).keyset_paginate(cursor: cursor, per_page: 20) +puts paginator.to_a # records +``` + +For a comprehensive overview, take a look at the [keyset pagination guide](keyset_pagination.md) page. #### Performance diff --git a/doc/development/query_performance.md b/doc/development/query_performance.md index 87e26cf42df..3ff36c7d005 100644 --- a/doc/development/query_performance.md +++ b/doc/development/query_performance.md @@ -38,8 +38,8 @@ cache, or what PostgreSQL calls shared buffers. This is the "warm cache" query. When analyzing an [`EXPLAIN` plan](understanding_explain_plans.md), you can see the difference not only in the timing, but by looking at the output for `Buffers` -by running your explain with `EXPLAIN(analyze, buffers)`. The [#database-lab](understanding_explain_plans.md#database-lab) -tool will automatically include these options. +by running your explain with `EXPLAIN(analyze, buffers)`. [Database Lab](understanding_explain_plans.md#database-lab-engine) +will automatically include these options. If you are making a warm cache query, you will only see the `shared hits`. diff --git a/doc/development/understanding_explain_plans.md b/doc/development/understanding_explain_plans.md index 66dc1fef31a..f9d1e7e2eee 100644 --- a/doc/development/understanding_explain_plans.md +++ b/doc/development/understanding_explain_plans.md @@ -198,13 +198,39 @@ Here we can see that our filter has to remove 65,677 rows, and that we use 208,846 buffers. Each buffer in PostgreSQL is 8 KB (8192 bytes), meaning our above node uses *1.6 GB of buffers*. That's a lot! +Keep in mind that some statistics are per-loop averages, while others are total values: + +| Field name | Value type | +| --- | --- | +| Actual Total Time | per-loop average | +| Actual Rows | per-loop average | +| Buffers Shared Hit | total value | +| Buffers Shared Read | total value | +| Buffers Shared Dirtied | total value | +| Buffers Shared Written | total value | +| I/O Read Time | total value | +| I/O Read Write | total value | + +For example: + +```sql + -> Index Scan using users_pkey on public.users (cost=0.43..3.44 rows=1 width=1318) (actual time=0.025..0.025 rows=1 loops=888) + Index Cond: (users.id = issues.author_id) + Buffers: shared hit=3543 read=9 + I/O Timings: read=17.760 write=0.000 +``` + +Here we can see that this node used 3552 buffers (3543 + 9), returned 888 rows (`888 * 1`), and the actual duration was 22.2 milliseconds (`888 * 0.025`). +17.76 milliseconds of the total duration was spent in reading from disk, to retrieve data that was not in the cache. + ## Node types There are quite a few different types of nodes, so we only cover some of the more common ones here. A full list of all the available nodes and their descriptions can be found in -the [PostgreSQL source file `plannodes.h`](https://gitlab.com/postgres/postgres/blob/master/src/include/nodes/plannodes.h) +the [PostgreSQL source file `plannodes.h`](https://gitlab.com/postgres/postgres/blob/master/src/include/nodes/plannodes.h). +pgMustard's [EXPLAIN docs](https://www.pgmustard.com/docs/explain) also offer detailed look into nodes and their fields. ### Seq Scan @@ -441,7 +467,7 @@ When optimizing a query, we usually need to reduce the amount of data we're dealing with. Indexes are the way to work with fewer pages (buffers) to get the result, so, during optimization, look at the number of buffers used (read and hit), and work on reducing these numbers. Reduced timing will be the consequence of reduced -buffer numbers. [#database-lab](#database-lab) guarantees that the plan is structurally +buffer numbers. [Database Lab Engine](#database-lab-engine) guarantees that the plan is structurally identical to production (and overall number of buffers is the same as on production), but difference in cache state and I/O speed may lead to different timings. @@ -617,7 +643,7 @@ If we look at the plan we also see our costs are very low: Index Scan using projects_pkey on projects (cost=0.43..3.45 rows=1 width=4) (actual time=0.049..0.050 rows=1 loops=145) ``` -Here our cost is only 3.45, and it only takes us 0.050 milliseconds to do so. +Here our cost is only 3.45, and it takes us 7.25 milliseconds to do so (0.05 * 145). The next index scan is a bit more expensive: ```sql @@ -681,64 +707,26 @@ There are a few ways to get the output of a query plan. Of course you can directly run the `EXPLAIN` query in the `psql` console, or you can follow one of the other options below. -### Rails console +### Database Lab Engine -Using the [`activerecord-explain-analyze`](https://github.com/6/activerecord-explain-analyze) -you can directly generate the query plan from the Rails console: +GitLab team members can use [Database Lab Engine](https://gitlab.com/postgres-ai/database-lab), and the companion +SQL optimization tool - [Joe Bot](https://gitlab.com/postgres-ai/joe). -```ruby -pry(main)> require 'activerecord-explain-analyze' -=> true -pry(main)> Project.where('build_timeout > ?', 3600).explain(analyze: true) - Project Load (1.9ms) SELECT "projects".* FROM "projects" WHERE (build_timeout > 3600) - ↳ (pry):12 -=> EXPLAIN for: SELECT "projects".* FROM "projects" WHERE (build_timeout > 3600) -Seq Scan on public.projects (cost=0.00..2.17 rows=1 width=742) (actual time=0.040..0.041 rows=0 loops=1) - Output: id, name, path, description, created_at, updated_at, creator_id, namespace_id, ... - Filter: (projects.build_timeout > 3600) - Rows Removed by Filter: 14 - Buffers: shared hit=2 -Planning time: 0.411 ms -Execution time: 0.113 ms -``` +Database Lab Engine provides developers with their own clone of the production database, while Joe Bot helps with exploring execution plans. -### ChatOps +Joe Bot is available in the [`#database-lab`](https://gitlab.slack.com/archives/CLJMDRD8C) channel on Slack, +and through its [web interface](https://console.postgres.ai/gitlab/joe-instances). -[GitLab team members can also use our ChatOps solution, available in Slack using the -`/chatops` slash command](chatops_on_gitlabcom.md). -You can use ChatOps to get a query plan by running the following: +With Joe Bot you can execute DDL statements (like creating indexes, tables, and columns) and get query plans for `SELECT`, `UPDATE`, and `DELETE` statements. -```sql -/chatops run explain SELECT COUNT(*) FROM projects WHERE visibility_level IN (0, 20) -``` +For example, in order to test new index on a column that is not existing on production yet, you can do the following: -Visualising the plan using <https://explain.depesz.com/> is also supported: +Create the column: ```sql -/chatops run explain --visual SELECT COUNT(*) FROM projects WHERE visibility_level IN (0, 20) +exec ALTER TABLE projects ADD COLUMN last_at timestamp without time zone ``` -Quoting the query is not necessary. - -For more information about the available options, run: - -```sql -/chatops run explain --help -``` - -### `#database-lab` - -Another tool GitLab team members can use is a chatbot powered by [Joe](https://gitlab.com/postgres-ai/joe) -which uses [Database Lab](https://gitlab.com/postgres-ai/database-lab) to instantly provide developers -with their own clone of the production database. - -Joe is available in the -[`#database-lab`](https://gitlab.slack.com/archives/CLJMDRD8C) channel on Slack. - -Unlike ChatOps, it gives you a way to execute DDL statements (like creating indexes and tables) and get query plan not only for `SELECT` but also `UPDATE` and `DELETE`. - -For example, in order to test new index you can do the following: - Create the index: ```sql @@ -769,18 +757,67 @@ For more information about the available options, run: help ``` +The web interface comes with the following execution plan visualizers included: + +- [Depesz](https://explain.depesz.com/) +- [PEV2](https://github.com/dalibo/pev2) +- [FlameGraph](https://github.com/mgartner/pg_flame) + #### Tips & Tricks -The database connection is now maintained during your whole session, so you can use `exec set ...` for any session variables (such as `enable_seqscan` or `work_mem`). These settings will be applied to all subsequent commands until you reset them. +The database connection is now maintained during your whole session, so you can use `exec set ...` for any session variables (such as `enable_seqscan` or `work_mem`). These settings will be applied to all subsequent commands until you reset them. For example you can disable parallel queries with + +```sql +exec SET max_parallel_workers_per_gather = 0 +``` + +### Rails console + +Using the [`activerecord-explain-analyze`](https://github.com/6/activerecord-explain-analyze) +you can directly generate the query plan from the Rails console: + +```ruby +pry(main)> require 'activerecord-explain-analyze' +=> true +pry(main)> Project.where('build_timeout > ?', 3600).explain(analyze: true) + Project Load (1.9ms) SELECT "projects".* FROM "projects" WHERE (build_timeout > 3600) + ↳ (pry):12 +=> EXPLAIN for: SELECT "projects".* FROM "projects" WHERE (build_timeout > 3600) +Seq Scan on public.projects (cost=0.00..2.17 rows=1 width=742) (actual time=0.040..0.041 rows=0 loops=1) + Output: id, name, path, description, created_at, updated_at, creator_id, namespace_id, ... + Filter: (projects.build_timeout > 3600) + Rows Removed by Filter: 14 + Buffers: shared hit=2 +Planning time: 0.411 ms +Execution time: 0.113 ms +``` + +### ChatOps + +[GitLab team members can also use our ChatOps solution, available in Slack using the +`/chatops` slash command](chatops_on_gitlabcom.md). + +NOTE: +While ChatOps is still available, the recommended way to generate execution plans is to use [Database Lab Engine](#database-lab-engine). -It is also possible to use transactions. This may be useful when you are working on statements that modify the data, for example INSERT, UPDATE, and DELETE. The `explain` command will perform `EXPLAIN ANALYZE`, which executes the statement. In order to run each `explain` starting from a clean state you can wrap it in a transaction, for example: +You can use ChatOps to get a query plan by running the following: ```sql -exec BEGIN +/chatops run explain SELECT COUNT(*) FROM projects WHERE visibility_level IN (0, 20) +``` -explain UPDATE some_table SET some_column = TRUE +Visualising the plan using <https://explain.depesz.com/> is also supported: + +```sql +/chatops run explain --visual SELECT COUNT(*) FROM projects WHERE visibility_level IN (0, 20) +``` -exec ROLLBACK +Quoting the query is not necessary. + +For more information about the available options, run: + +```sql +/chatops run explain --help ``` ## Further reading diff --git a/doc/development/usage_ping/index.md b/doc/development/usage_ping/index.md index 95dc4f2979a..de6a234e20c 100644 --- a/doc/development/usage_ping/index.md +++ b/doc/development/usage_ping/index.md @@ -24,11 +24,32 @@ More links: ## What is Usage Ping? -- GitLab sends a weekly payload containing usage data to GitLab Inc. Usage Ping provides high-level data to help our product, support, and sales teams. It does not send any project names, usernames, or any other specific data. The information from the usage ping is not anonymous, it is linked to the hostname of the instance. Sending usage ping is optional, and any instance can disable analytics. -- The usage data is primarily composed of row counts for different tables in the instance's database. By comparing these counts month over month (or week over week), we can get a rough sense for how an instance is using the different features in the product. In addition to counts, other facts - that help us classify and understand GitLab installations are collected. -- Usage ping is important to GitLab as we use it to calculate our Stage Monthly Active Users (SMAU) which helps us measure the success of our stages and features. -- While usage ping is enabled, GitLab gathers data from the other instances and can show usage statistics of your instance to your users. +Usage Ping is a process in GitLab that collects and sends a weekly payload to GitLab Inc. +The payload provides important high-level data that helps our product, support, +and sales teams understand how GitLab is used. For example, the data helps to: + +- Compare counts month over month (or week over week) to get a rough sense for how an instance uses + different product features. +- Collect other facts that help us classify and understand GitLab installations. +- Calculate our Stage Monthly Active Users (SMAU), which helps to measure the success of our stages + and features. + +Usage Ping information is not anonymous. It's linked to the instance's hostname. However, it does +not contain project names, usernames, or any other specific data. + +Sending a Usage Ping payload is optional and can be [disabled](#disable-usage-ping) on any instance. +When Usage Ping is enabled, GitLab gathers data from the other instances +and can show your instance's usage statistics to your users. + +### Terminology + +We use the following terminology to describe the Usage Ping components: + +- **Usage Ping**: the process that collects and generates a JSON payload. +- **Usage data**: the contents of the Usage Ping JSON payload. This includes metrics. +- **Metrics**: primarily made up of row counts for different tables in an instance's database. Each + metric has a corresponding [metric definition](metrics_dictionary.md#metrics-definition-and-validation) + in a YAML file. ### Why should we enable Usage Ping? diff --git a/doc/user/group/epics/epic_boards.md b/doc/user/group/epics/epic_boards.md index 2f9dc27d87f..c31b0c7f78a 100644 --- a/doc/user/group/epics/epic_boards.md +++ b/doc/user/group/epics/epic_boards.md @@ -25,7 +25,7 @@ To view an epic board, in a group, select **Epics > Boards**. Prerequisites: -- A minimum of [Reporter](../../permissions.md#group-members-permissions) access to a group in GitLab. +- You must have at least the [Reporter role](../../permissions.md#group-members-permissions) for a group. To create a new epic board: @@ -49,7 +49,7 @@ To change these options later, [edit the board](#edit-the-scope-of-an-epic-board Prerequisites: -- A minimum of [Reporter](../../permissions.md#group-members-permissions) access to a group in GitLab. +- You must have at least the [Reporter role](../../permissions.md#group-members-permissions) for a group. - A minimum of two boards present in a group. To delete the active epic board: @@ -73,7 +73,7 @@ To delete the active epic board: Prerequisites: -- A minimum of [Reporter](../../permissions.md#group-members-permissions) access to a group in GitLab. +- You must have at least the [Reporter role](../../permissions.md#group-members-permissions) for a group. To create a new list: @@ -90,7 +90,7 @@ list view that's removed. You can always create it again later if you need. Prerequisites: -- A minimum of [Reporter](../../permissions.md#group-members-permissions) access to a group in GitLab. +- You must have at least the [Reporter role](../../permissions.md#group-members-permissions) for a group. To remove a list from an epic board: @@ -120,7 +120,7 @@ You can move epics and lists by dragging them. Prerequisites: -- A minimum of [Reporter](../../permissions.md#group-members-permissions) access to a group in GitLab. +- You must have at least the [Reporter role](../../permissions.md#group-members-permissions) for a group. To move an epic, select the epic card and drag it to another position in its current list or into another list. Learn about possible effects in [Dragging epics between lists](#dragging-epics-between-lists). @@ -143,7 +143,7 @@ and the target list. Prerequisites: -- A minimum of [Reporter](../../permissions.md#group-members-permissions) access to a group in GitLab. +- You must have at least the [Reporter role](../../permissions.md#group-members-permissions) for a group. To edit the scope of an epic board: diff --git a/doc/user/packages/container_registry/index.md b/doc/user/packages/container_registry/index.md index 9d65c5d37ad..d6e86e64e78 100644 --- a/doc/user/packages/container_registry/index.md +++ b/doc/user/packages/container_registry/index.md @@ -488,6 +488,10 @@ Cleanup policies can be run on all projects, with these exceptions: Feature.disable(:container_expiration_policies_historic_entry, Project.find(<project id>)) ``` +WARNING: +For performance reasons, enabled cleanup policies are automatically disabled for projects on +GitLab.com that don't have a container image. + ### How the cleanup policy works The cleanup policy collects all tags in the Container Registry and excludes tags diff --git a/doc/user/project/merge_requests/approvals/settings.md b/doc/user/project/merge_requests/approvals/settings.md index 97e4b7da396..b72a4125d0e 100644 --- a/doc/user/project/merge_requests/approvals/settings.md +++ b/doc/user/project/merge_requests/approvals/settings.md @@ -34,7 +34,7 @@ on merge requests, you can disable this setting: 1. Select the **Prevent users from modifying MR approval rules in merge requests** checkbox. 1. Select **Save changes**. -TODO This change affects all open merge requests. +This change affects all open merge requests. ## Reset approvals on push diff --git a/lib/api/members.rb b/lib/api/members.rb index 0956806da5b..70e13e8d4ae 100644 --- a/lib/api/members.rb +++ b/lib/api/members.rb @@ -97,6 +97,8 @@ module API end # rubocop: disable CodeReuse/ActiveRecord post ":id/members" do + ::Gitlab::QueryLimiting.disable!('https://gitlab.com/gitlab-org/gitlab/-/issues/333434') + source = find_source(source_type, params[:id]) authorize_admin_source!(source_type, source) diff --git a/lib/gitlab/auth.rb b/lib/gitlab/auth.rb index 36f58d43a77..580c7042f1e 100644 --- a/lib/gitlab/auth.rb +++ b/lib/gitlab/auth.rb @@ -385,7 +385,7 @@ module Gitlab end def can_user_login_with_non_expired_password?(user) - user.can?(:log_in) && !user.password_expired? + user.can?(:log_in) && !user.password_expired_if_applicable? end end end diff --git a/lib/gitlab/auth/current_user_mode.rb b/lib/gitlab/auth/current_user_mode.rb index a6d706c2a49..fc391543f4d 100644 --- a/lib/gitlab/auth/current_user_mode.rb +++ b/lib/gitlab/auth/current_user_mode.rb @@ -27,22 +27,27 @@ module Gitlab # will bypass the session check for a user that was already in admin mode # # If passed a block, it will surround the block execution and reset the session - # bypass at the end; otherwise use manually '.reset_bypass_session!' + # bypass at the end; otherwise you must remember to call '.reset_bypass_session!' def bypass_session!(admin_id) Gitlab::SafeRequestStore[CURRENT_REQUEST_BYPASS_SESSION_ADMIN_ID_RS_KEY] = admin_id + # Bypassing the session invalidates the cached value of admin_mode? + # Any new calls need to be re-computed. + uncache_admin_mode_state(admin_id) Gitlab::AppLogger.debug("Bypassing session in admin mode for: #{admin_id}") - if block_given? - begin - yield - ensure - reset_bypass_session! - end + return unless block_given? + + begin + yield + ensure + reset_bypass_session!(admin_id) end end - def reset_bypass_session! + def reset_bypass_session!(admin_id = nil) + # Restoring the session bypass invalidates the cached value of admin_mode? + uncache_admin_mode_state(admin_id) Gitlab::SafeRequestStore.delete(CURRENT_REQUEST_BYPASS_SESSION_ADMIN_ID_RS_KEY) end @@ -50,10 +55,21 @@ module Gitlab Gitlab::SafeRequestStore[CURRENT_REQUEST_BYPASS_SESSION_ADMIN_ID_RS_KEY] end + def uncache_admin_mode_state(admin_id = nil) + if admin_id + key = { res: :current_user_mode, user: admin_id, method: :admin_mode? } + Gitlab::SafeRequestStore.delete(key) + else + Gitlab::SafeRequestStore.delete_if do |key| + key.is_a?(Hash) && key[:res] == :current_user_mode && key[:method] == :admin_mode? + end + end + end + # Store in the current request the provided user model (only if in admin mode) # and yield def with_current_admin(admin) - return yield unless self.new(admin).admin_mode? + return yield unless new(admin).admin_mode? Gitlab::SafeRequestStore[CURRENT_REQUEST_ADMIN_MODE_USER_RS_KEY] = admin diff --git a/lib/gitlab/auth/user_access_denied_reason.rb b/lib/gitlab/auth/user_access_denied_reason.rb index 6639000dba8..904759919ae 100644 --- a/lib/gitlab/auth/user_access_denied_reason.rb +++ b/lib/gitlab/auth/user_access_denied_reason.rb @@ -23,6 +23,8 @@ module Gitlab "Your primary email address is not confirmed. "\ "Please check your inbox for the confirmation instructions. "\ "In case the link is expired, you can request a new confirmation email at #{Rails.application.routes.url_helpers.new_user_confirmation_url}" + when :blocked + "Your account has been blocked." when :password_expired "Your password expired. "\ "Please access GitLab from a web browser to update your password." @@ -44,6 +46,8 @@ module Gitlab :deactivated elsif !@user.confirmed? :unconfirmed + elsif @user.blocked? + :blocked elsif @user.password_expired? :password_expired else diff --git a/lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml index a2b112b8e9f..5521a4a781b 100644 --- a/lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml @@ -1,14 +1,21 @@ +# To use this template, add the following to your .gitlab-ci.yml file: +# +# include: +# template: DAST.gitlab-ci.yml +# +# You also need to add a `dast` stage to your `stages:` configuration. A sample configuration for DAST: +# +# stages: +# - build +# - test +# - deploy +# - dast + # Read more about this feature here: https://docs.gitlab.com/ee/user/application_security/dast/ # Configure DAST with CI/CD variables (https://docs.gitlab.com/ee/ci/variables/README.html). # List of available variables: https://docs.gitlab.com/ee/user/application_security/dast/#available-variables -stages: - - build - - test - - deploy - - dast - variables: DAST_VERSION: 2 # Setting this variable will affect all Security templates diff --git a/lib/gitlab/database/migrations/observers.rb b/lib/gitlab/database/migrations/observers.rb index b65a303ef30..979a098d699 100644 --- a/lib/gitlab/database/migrations/observers.rb +++ b/lib/gitlab/database/migrations/observers.rb @@ -8,7 +8,8 @@ module Gitlab [ TotalDatabaseSizeChange.new, QueryStatistics.new, - QueryLog.new + QueryLog.new, + QueryDetails.new ] end end diff --git a/lib/gitlab/database/migrations/observers/query_details.rb b/lib/gitlab/database/migrations/observers/query_details.rb new file mode 100644 index 00000000000..52b6464d449 --- /dev/null +++ b/lib/gitlab/database/migrations/observers/query_details.rb @@ -0,0 +1,41 @@ +# frozen_string_literal: true + +module Gitlab + module Database + module Migrations + module Observers + class QueryDetails < MigrationObserver + def before + @file_path = File.join(Instrumentation::RESULT_DIR, 'current-details.json') + @file = File.open(@file_path, 'wb') + @writer = Oj::StreamWriter.new(@file, {}) + @writer.push_array + @subscriber = ActiveSupport::Notifications.subscribe('sql.active_record') do |*args| + record_sql_event(*args) + end + end + + def after + ActiveSupport::Notifications.unsubscribe(@subscriber) + @writer.pop_all + @writer.flush + @file.close + end + + def record(observation) + File.rename(@file_path, File.join(Instrumentation::RESULT_DIR, "#{observation.migration}-query-details.json")) + end + + def record_sql_event(_name, started, finished, _unique_id, payload) + @writer.push_value({ + start_time: started.iso8601(6), + end_time: finished.iso8601(6), + sql: payload[:sql], + binds: payload[:type_casted_binds] + }) + end + end + end + end + end +end diff --git a/lib/gitlab/exclusive_lease.rb b/lib/gitlab/exclusive_lease.rb index 6749bd6ca60..75d07a36dcd 100644 --- a/lib/gitlab/exclusive_lease.rb +++ b/lib/gitlab/exclusive_lease.rb @@ -36,6 +36,28 @@ module Gitlab end end + # yield to the {block} at most {count} times per {period} + # + # Defaults to once per hour. + # + # For example: + # + # # toot the train horn at most every 20min: + # throttle(locomotive.id, count: 3, period: 1.hour) { toot_train_horn } + # # Brake suddenly at most once every minute: + # throttle(locomotive.id, period: 1.minute) { brake_suddenly } + # # Specify a uniqueness group: + # throttle(locomotive.id, group: :locomotive_brake) { brake_suddenly } + # + # If a group is not specified, each block will get a separate group to itself. + def self.throttle(key, group: nil, period: 1.hour, count: 1, &block) + group ||= block.source_location.join(':') + + return if new("el:throttle:#{group}:#{key}", timeout: period.to_i / count).waiting? + + yield + end + def self.cancel(key, uuid) return unless key.present? @@ -79,6 +101,11 @@ module Gitlab end end + # This lease is waiting to obtain + def waiting? + !try_obtain + end + # Try to renew an existing lease. Return lease UUID on success, # false if the lease is taken by a different UUID or inexistent. def renew diff --git a/lib/gitlab/lfs_token.rb b/lib/gitlab/lfs_token.rb index c7f2adb27d1..2e8564b6e00 100644 --- a/lib/gitlab/lfs_token.rb +++ b/lib/gitlab/lfs_token.rb @@ -52,7 +52,7 @@ module Gitlab def valid_user? return true unless user? - !actor.blocked? && (!actor.allow_password_authentication? || !actor.password_expired?) + !actor.blocked? && !actor.password_expired_if_applicable? end def authentication_payload(repository_http_path) diff --git a/lib/gitlab/pagination/keyset/paginator.rb b/lib/gitlab/pagination/keyset/paginator.rb index 2ec4472fcd6..1c71549d86a 100644 --- a/lib/gitlab/pagination/keyset/paginator.rb +++ b/lib/gitlab/pagination/keyset/paginator.rb @@ -26,7 +26,7 @@ module Gitlab # per_page - Number of items per page. # cursor_converter - Object that serializes and de-serializes the cursor attributes. Implements dump and parse methods. # direction_key - Symbol that will be the hash key of the direction within the cursor. (default: _kd => keyset direction) - def initialize(scope:, cursor: nil, per_page: 20, cursor_converter: Base64CursorConverter, direction_key: :_kd) + def initialize(scope:, cursor: nil, per_page: 20, cursor_converter: Base64CursorConverter, direction_key: :_kd, keyset_order_options: {}) @keyset_scope = build_scope(scope) @order = Gitlab::Pagination::Keyset::Order.extract_keyset_order_object(@keyset_scope) @per_page = per_page @@ -36,6 +36,7 @@ module Gitlab @at_last_page = false @at_first_page = false @cursor_attributes = decode_cursor_attributes(cursor) + @keyset_order_options = keyset_order_options set_pagination_helper_flags! end @@ -45,13 +46,13 @@ module Gitlab @records ||= begin items = if paginate_backward? reversed_order - .apply_cursor_conditions(keyset_scope, cursor_attributes) + .apply_cursor_conditions(keyset_scope, cursor_attributes, keyset_order_options) .reorder(reversed_order) .limit(per_page_plus_one) .to_a else order - .apply_cursor_conditions(keyset_scope, cursor_attributes) + .apply_cursor_conditions(keyset_scope, cursor_attributes, keyset_order_options) .limit(per_page_plus_one) .to_a end @@ -120,7 +121,7 @@ module Gitlab private - attr_reader :keyset_scope, :order, :per_page, :cursor_converter, :direction_key, :cursor_attributes + attr_reader :keyset_scope, :order, :per_page, :cursor_converter, :direction_key, :cursor_attributes, :keyset_order_options delegate :reversed_order, to: :order diff --git a/lib/gitlab/safe_request_store.rb b/lib/gitlab/safe_request_store.rb index d146913bdb3..664afd1cc21 100644 --- a/lib/gitlab/safe_request_store.rb +++ b/lib/gitlab/safe_request_store.rb @@ -20,6 +20,15 @@ module Gitlab end end + # Access to the backing storage of the request store. This returns an object + # with `[]` and `[]=` methods that does not discard values. + # + # This can be useful if storage is needed for a delimited purpose, and the + # forgetful nature of the null store is undesirable. + def self.storage + store.store + end + # This method accept an options hash to be compatible with # ActiveSupport::Cache::Store#write method. The options are # not passed to the underlying cache implementation because @@ -27,5 +36,11 @@ module Gitlab def self.write(key, value, options = nil) store.write(key, value) end + + def self.delete_if(&block) + return unless RequestStore.active? + + storage.delete_if { |k, v| block.call(k) } + end end end diff --git a/lib/sidebars/projects/menus/ci_cd_menu.rb b/lib/sidebars/projects/menus/ci_cd_menu.rb index 042ad17fdfc..f85a9faacd3 100644 --- a/lib/sidebars/projects/menus/ci_cd_menu.rb +++ b/lib/sidebars/projects/menus/ci_cd_menu.rb @@ -61,6 +61,10 @@ module Sidebars pipelines#index pipelines#show pipelines#new + pipelines#dag + pipelines#failures + pipelines#builds + pipelines#test_report ] end diff --git a/locale/gitlab.pot b/locale/gitlab.pot index d0d24dfeb2e..1d63a5fb16a 100644 --- a/locale/gitlab.pot +++ b/locale/gitlab.pot @@ -28273,6 +28273,9 @@ msgstr "" msgid "Runners|Name" msgstr "" +msgid "Runners|New registration token generated!" +msgstr "" + msgid "Runners|New runner, has not connected yet" msgstr "" @@ -31227,6 +31230,9 @@ msgstr "" msgid "StorageSize|Unknown" msgstr "" +msgid "Strikethrough" +msgstr "" + msgid "Subgroup information" msgstr "" diff --git a/qa/qa/specs/features/browser_ui/non_devops/performance_bar_spec.rb b/qa/qa/specs/features/browser_ui/non_devops/performance_bar_spec.rb index de425dde6c0..a0f613cfda2 100644 --- a/qa/qa/specs/features/browser_ui/non_devops/performance_bar_spec.rb +++ b/qa/qa/specs/features/browser_ui/non_devops/performance_bar_spec.rb @@ -20,7 +20,7 @@ module QA end end - it 'shows results for the original request and AJAX requests', testcase: 'https://gitlab.com/gitlab-org/quality/testcases/-/issues/478', quarantine: { only: { pipeline: :main }, issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/323051', type: :bug } do + it 'shows results for the original request and AJAX requests', testcase: 'https://gitlab.com/gitlab-org/quality/testcases/-/issues/478' do # Issue pages always make AJAX requests Resource::Issue.fabricate_via_browser_ui! do |issue| issue.title = 'Performance bar test' diff --git a/spec/controllers/projects/protected_branches_controller_spec.rb b/spec/controllers/projects/protected_branches_controller_spec.rb index a0cb5c1473a..dcfccc00347 100644 --- a/spec/controllers/projects/protected_branches_controller_spec.rb +++ b/spec/controllers/projects/protected_branches_controller_spec.rb @@ -42,7 +42,7 @@ RSpec.describe Projects::ProtectedBranchesController do context 'when a policy restricts rule deletion' do before do - policy = instance_double(ProtectedBranchPolicy, can?: false) + policy = instance_double(ProtectedBranchPolicy, allowed?: false) allow(ProtectedBranchPolicy).to receive(:new).and_return(policy) end @@ -70,7 +70,7 @@ RSpec.describe Projects::ProtectedBranchesController do context 'when a policy restricts rule deletion' do before do - policy = instance_double(ProtectedBranchPolicy, can?: false) + policy = instance_double(ProtectedBranchPolicy, allowed?: false) allow(ProtectedBranchPolicy).to receive(:new).and_return(policy) end @@ -97,7 +97,7 @@ RSpec.describe Projects::ProtectedBranchesController do context 'when a policy restricts rule deletion' do before do - policy = instance_double(ProtectedBranchPolicy, can?: false) + policy = instance_double(ProtectedBranchPolicy, allowed?: false) allow(ProtectedBranchPolicy).to receive(:new).and_return(policy) end diff --git a/spec/factories/integrations.rb b/spec/factories/integrations.rb index fd570ca9c50..1dd2839aa46 100644 --- a/spec/factories/integrations.rb +++ b/spec/factories/integrations.rb @@ -12,7 +12,7 @@ FactoryBot.define do issue_tracker end - factory :emails_on_push_service, class: 'Integrations::EmailsOnPush' do + factory :emails_on_push_integration, class: 'Integrations::EmailsOnPush' do project type { 'EmailsOnPushService' } active { true } @@ -103,7 +103,7 @@ FactoryBot.define do issue_tracker end - factory :ewm_service, class: 'Integrations::Ewm' do + factory :ewm_integration, class: 'Integrations::Ewm' do project active { true } issue_tracker @@ -127,7 +127,7 @@ FactoryBot.define do end end - factory :external_wiki_service, class: 'Integrations::ExternalWiki' do + factory :external_wiki_integration, class: 'Integrations::ExternalWiki' do project type { 'ExternalWikiService' } active { true } diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb index cc561ef65a2..6641d8749f9 100644 --- a/spec/factories/projects.rb +++ b/spec/factories/projects.rb @@ -426,7 +426,7 @@ FactoryBot.define do factory :ewm_project, parent: :project do has_external_issue_tracker { true } - ewm_service + ewm_integration end factory :project_with_design, parent: :project do diff --git a/spec/features/projects/active_tabs_spec.rb b/spec/features/projects/active_tabs_spec.rb index b333f64aa87..39950adc83f 100644 --- a/spec/features/projects/active_tabs_spec.rb +++ b/spec/features/projects/active_tabs_spec.rb @@ -182,4 +182,55 @@ RSpec.describe 'Project active tab' do it_behaves_like 'page has active sub tab', _('CI/CD') end end + + context 'on project CI/CD' do + context 'browsing Pipelines tabs' do + let_it_be(:pipeline) { create(:ci_pipeline, project: project) } + + context 'Pipeline tab' do + before do + visit project_pipeline_path(project, pipeline) + end + + it_behaves_like 'page has active tab', _('CI/CD') + it_behaves_like 'page has active sub tab', _('Pipelines') + end + + context 'Needs tab' do + before do + visit dag_project_pipeline_path(project, pipeline) + end + + it_behaves_like 'page has active tab', _('CI/CD') + it_behaves_like 'page has active sub tab', _('Pipelines') + end + + context 'Builds tab' do + before do + visit builds_project_pipeline_path(project, pipeline) + end + + it_behaves_like 'page has active tab', _('CI/CD') + it_behaves_like 'page has active sub tab', _('Pipelines') + end + + context 'Failures tab' do + before do + visit failures_project_pipeline_path(project, pipeline) + end + + it_behaves_like 'page has active tab', _('CI/CD') + it_behaves_like 'page has active sub tab', _('Pipelines') + end + + context 'Test Report tab' do + before do + visit test_report_project_pipeline_path(project, pipeline) + end + + it_behaves_like 'page has active tab', _('CI/CD') + it_behaves_like 'page has active sub tab', _('Pipelines') + end + end + end end diff --git a/spec/frontend/content_editor/components/top_toolbar_spec.js b/spec/frontend/content_editor/components/top_toolbar_spec.js index 0a1405a1774..0d55fa730ae 100644 --- a/spec/frontend/content_editor/components/top_toolbar_spec.js +++ b/spec/frontend/content_editor/components/top_toolbar_spec.js @@ -42,6 +42,7 @@ describe('content_editor/components/top_toolbar', () => { testId | controlProps ${'bold'} | ${{ contentType: 'bold', iconName: 'bold', label: 'Bold text', editorCommand: 'toggleBold' }} ${'italic'} | ${{ contentType: 'italic', iconName: 'italic', label: 'Italic text', editorCommand: 'toggleItalic' }} + ${'strike'} | ${{ contentType: 'strike', iconName: 'strikethrough', label: 'Strikethrough', editorCommand: 'toggleStrike' }} ${'code'} | ${{ contentType: 'code', iconName: 'code', label: 'Code', editorCommand: 'toggleCode' }} ${'blockquote'} | ${{ contentType: 'blockquote', iconName: 'quote', label: 'Insert a quote', editorCommand: 'toggleBlockquote' }} ${'bullet-list'} | ${{ contentType: 'bulletList', iconName: 'list-bulleted', label: 'Add a bullet list', editorCommand: 'toggleBulletList' }} diff --git a/spec/frontend/fixtures/api_markdown.yml b/spec/frontend/fixtures/api_markdown.yml index a1ea2806879..3274e914f03 100644 --- a/spec/frontend/fixtures/api_markdown.yml +++ b/spec/frontend/fixtures/api_markdown.yml @@ -1,6 +1,6 @@ # This data file drives the specs in # spec/frontend/fixtures/api_markdown.rb and -# spec/frontend/rich_text_editor/extensions/markdown_processing_spec.js +# spec/frontend/content_editor/extensions/markdown_processing_spec.js --- - name: bold markdown: '**bold**' @@ -8,6 +8,8 @@ markdown: '_emphasized text_' - name: inline_code markdown: '`code`' +- name: strike + markdown: '~~del~~' - name: link markdown: '[GitLab](https://gitlab.com)' - name: code_block diff --git a/spec/frontend/fixtures/releases.rb b/spec/frontend/fixtures/releases.rb index 1882ac49fd6..ac34400bc01 100644 --- a/spec/frontend/fixtures/releases.rb +++ b/spec/frontend/fixtures/releases.rb @@ -146,6 +146,7 @@ RSpec.describe 'Releases (JavaScript fixtures)' do post_graphql(query, current_user: admin, variables: { fullPath: project.full_path }) expect_graphql_errors_to_be_empty + expect(graphql_data_at(:project, :releases)).to be_present end it "graphql/#{one_release_query_path}.json" do @@ -154,6 +155,7 @@ RSpec.describe 'Releases (JavaScript fixtures)' do post_graphql(query, current_user: admin, variables: { fullPath: project.full_path, tagName: release.tag }) expect_graphql_errors_to_be_empty + expect(graphql_data_at(:project, :release)).to be_present end it "graphql/#{one_release_for_editing_query_path}.json" do @@ -162,6 +164,7 @@ RSpec.describe 'Releases (JavaScript fixtures)' do post_graphql(query, current_user: admin, variables: { fullPath: project.full_path, tagName: release.tag }) expect_graphql_errors_to_be_empty + expect(graphql_data_at(:project, :release)).to be_present end end end diff --git a/spec/frontend/issuable_list/components/issuable_list_root_spec.js b/spec/frontend/issuable_list/components/issuable_list_root_spec.js index 38d6d6d86bc..7dddd2c3405 100644 --- a/spec/frontend/issuable_list/components/issuable_list_root_spec.js +++ b/spec/frontend/issuable_list/components/issuable_list_root_spec.js @@ -1,4 +1,4 @@ -import { GlSkeletonLoading, GlPagination } from '@gitlab/ui'; +import { GlKeysetPagination, GlSkeletonLoading, GlPagination } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; import VueDraggable from 'vuedraggable'; @@ -11,9 +11,12 @@ import FilteredSearchBar from '~/vue_shared/components/filtered_search_bar/filte import { mockIssuableListProps, mockIssuables } from '../mock_data'; -const createComponent = ({ props = mockIssuableListProps, data = {} } = {}) => +const createComponent = ({ props = {}, data = {} } = {}) => shallowMount(IssuableListRoot, { - propsData: props, + propsData: { + ...mockIssuableListProps, + ...props, + }, data() { return data; }, @@ -34,6 +37,7 @@ describe('IssuableListRoot', () => { let wrapper; const findFilteredSearchBar = () => wrapper.findComponent(FilteredSearchBar); + const findGlKeysetPagination = () => wrapper.findComponent(GlKeysetPagination); const findGlPagination = () => wrapper.findComponent(GlPagination); const findIssuableItem = () => wrapper.findComponent(IssuableItem); const findIssuableTabs = () => wrapper.findComponent(IssuableTabs); @@ -189,15 +193,15 @@ describe('IssuableListRoot', () => { }); describe('template', () => { - beforeEach(() => { + it('renders component container element with class "issuable-list-container"', () => { wrapper = createComponent(); - }); - it('renders component container element with class "issuable-list-container"', () => { expect(wrapper.classes()).toContain('issuable-list-container'); }); it('renders issuable-tabs component', () => { + wrapper = createComponent(); + const tabsEl = findIssuableTabs(); expect(tabsEl.exists()).toBe(true); @@ -209,6 +213,8 @@ describe('IssuableListRoot', () => { }); it('renders contents for slot "nav-actions" within issuable-tab component', () => { + wrapper = createComponent(); + const buttonEl = findIssuableTabs().find('button.js-new-issuable'); expect(buttonEl.exists()).toBe(true); @@ -216,6 +222,8 @@ describe('IssuableListRoot', () => { }); it('renders filtered-search-bar component', () => { + wrapper = createComponent(); + const searchEl = findFilteredSearchBar(); const { namespace, @@ -239,12 +247,8 @@ describe('IssuableListRoot', () => { }); }); - it('renders gl-loading-icon when `issuablesLoading` prop is true', async () => { - wrapper.setProps({ - issuablesLoading: true, - }); - - await wrapper.vm.$nextTick(); + it('renders gl-loading-icon when `issuablesLoading` prop is true', () => { + wrapper = createComponent({ props: { issuablesLoading: true } }); expect(wrapper.findAllComponents(GlSkeletonLoading)).toHaveLength( wrapper.vm.skeletonItemCount, @@ -252,6 +256,8 @@ describe('IssuableListRoot', () => { }); it('renders issuable-item component for each item within `issuables` array', () => { + wrapper = createComponent(); + const itemsEl = wrapper.findAllComponents(IssuableItem); const mockIssuable = mockIssuableListProps.issuables[0]; @@ -262,28 +268,23 @@ describe('IssuableListRoot', () => { }); }); - it('renders contents for slot "empty-state" when `issuablesLoading` is false and `issuables` is empty', async () => { - wrapper.setProps({ - issuables: [], - }); - - await wrapper.vm.$nextTick(); + it('renders contents for slot "empty-state" when `issuablesLoading` is false and `issuables` is empty', () => { + wrapper = createComponent({ props: { issuables: [] } }); expect(wrapper.find('p.js-issuable-empty-state').exists()).toBe(true); expect(wrapper.find('p.js-issuable-empty-state').text()).toBe('Issuable empty state'); }); - it('renders gl-pagination when `showPaginationControls` prop is true', async () => { - wrapper.setProps({ - showPaginationControls: true, - totalItems: 10, + it('renders only gl-pagination when `showPaginationControls` prop is true', () => { + wrapper = createComponent({ + props: { + showPaginationControls: true, + totalItems: 10, + }, }); - await wrapper.vm.$nextTick(); - - const paginationEl = findGlPagination(); - expect(paginationEl.exists()).toBe(true); - expect(paginationEl.props()).toMatchObject({ + expect(findGlKeysetPagination().exists()).toBe(false); + expect(findGlPagination().props()).toMatchObject({ perPage: 20, value: 1, prevPage: 0, @@ -292,32 +293,47 @@ describe('IssuableListRoot', () => { align: 'center', }); }); - }); - describe('events', () => { - beforeEach(() => { + it('renders only gl-keyset-pagination when `showPaginationControls` and `useKeysetPagination` props are true', () => { wrapper = createComponent({ - data: { - checkedIssuables: { - [mockIssuables[0].iid]: { checked: true, issuable: mockIssuables[0] }, - }, + props: { + hasNextPage: true, + hasPreviousPage: true, + showPaginationControls: true, + useKeysetPagination: true, }, }); + + expect(findGlPagination().exists()).toBe(false); + expect(findGlKeysetPagination().props()).toMatchObject({ + hasNextPage: true, + hasPreviousPage: true, + }); }); + }); + + describe('events', () => { + const data = { + checkedIssuables: { + [mockIssuables[0].iid]: { checked: true, issuable: mockIssuables[0] }, + }, + }; it('issuable-tabs component emits `click-tab` event on `click-tab` event', () => { + wrapper = createComponent({ data }); + findIssuableTabs().vm.$emit('click'); expect(wrapper.emitted('click-tab')).toBeTruthy(); }); - it('sets all issuables as checked when filtered-search-bar component emits `checked-input` event', async () => { + it('sets all issuables as checked when filtered-search-bar component emits `checked-input` event', () => { + wrapper = createComponent({ data }); + const searchEl = findFilteredSearchBar(); searchEl.vm.$emit('checked-input', true); - await wrapper.vm.$nextTick(); - expect(searchEl.emitted('checked-input')).toBeTruthy(); expect(searchEl.emitted('checked-input').length).toBe(1); @@ -328,6 +344,8 @@ describe('IssuableListRoot', () => { }); it('filtered-search-bar component emits `filter` event on `onFilter` & `sort` event on `onSort` events', () => { + wrapper = createComponent({ data }); + const searchEl = findFilteredSearchBar(); searchEl.vm.$emit('onFilter'); @@ -336,13 +354,13 @@ describe('IssuableListRoot', () => { expect(wrapper.emitted('sort')).toBeTruthy(); }); - it('sets an issuable as checked when issuable-item component emits `checked-input` event', async () => { + it('sets an issuable as checked when issuable-item component emits `checked-input` event', () => { + wrapper = createComponent({ data }); + const issuableItem = wrapper.findAllComponents(IssuableItem).at(0); issuableItem.vm.$emit('checked-input', true); - await wrapper.vm.$nextTick(); - expect(issuableItem.emitted('checked-input')).toBeTruthy(); expect(issuableItem.emitted('checked-input').length).toBe(1); @@ -353,27 +371,45 @@ describe('IssuableListRoot', () => { }); it('emits `update-legacy-bulk-edit` when filtered-search-bar checkbox is checked', () => { + wrapper = createComponent({ data }); + findFilteredSearchBar().vm.$emit('checked-input'); expect(wrapper.emitted('update-legacy-bulk-edit')).toEqual([[]]); }); it('emits `update-legacy-bulk-edit` when issuable-item checkbox is checked', () => { + wrapper = createComponent({ data }); + findIssuableItem().vm.$emit('checked-input'); expect(wrapper.emitted('update-legacy-bulk-edit')).toEqual([[]]); }); - it('gl-pagination component emits `page-change` event on `input` event', async () => { - wrapper.setProps({ - showPaginationControls: true, - }); - - await wrapper.vm.$nextTick(); + it('gl-pagination component emits `page-change` event on `input` event', () => { + wrapper = createComponent({ data, props: { showPaginationControls: true } }); findGlPagination().vm.$emit('input'); expect(wrapper.emitted('page-change')).toBeTruthy(); }); + + it.each` + event | glKeysetPaginationEvent + ${'next-page'} | ${'next'} + ${'previous-page'} | ${'prev'} + `( + 'emits `$event` event when gl-keyset-pagination emits `$glKeysetPaginationEvent` event', + ({ event, glKeysetPaginationEvent }) => { + wrapper = createComponent({ + data, + props: { showPaginationControls: true, useKeysetPagination: true }, + }); + + findGlKeysetPagination().vm.$emit(glKeysetPaginationEvent); + + expect(wrapper.emitted(event)).toEqual([[]]); + }, + ); }); describe('manual sorting', () => { diff --git a/spec/frontend/issues_list/components/issue_card_time_info_spec.js b/spec/frontend/issues_list/components/issue_card_time_info_spec.js index 614ad586ec9..634687e77ab 100644 --- a/spec/frontend/issues_list/components/issue_card_time_info_spec.js +++ b/spec/frontend/issues_list/components/issue_card_time_info_spec.js @@ -13,12 +13,10 @@ describe('IssuesListApp component', () => { dueDate: '2020-12-17', startDate: '2020-12-10', title: 'My milestone', - webUrl: '/milestone/webUrl', + webPath: '/milestone/webPath', }, dueDate: '2020-12-12', - timeStats: { - humanTimeEstimate: '1w', - }, + humanTimeEstimate: '1w', }; const findMilestone = () => wrapper.find('[data-testid="issuable-milestone"]'); @@ -56,7 +54,7 @@ describe('IssuesListApp component', () => { expect(milestone.text()).toBe(issue.milestone.title); expect(milestone.find(GlIcon).props('name')).toBe('clock'); - expect(milestone.find(GlLink).attributes('href')).toBe(issue.milestone.webUrl); + expect(milestone.find(GlLink).attributes('href')).toBe(issue.milestone.webPath); }); describe.each` @@ -102,7 +100,7 @@ describe('IssuesListApp component', () => { const timeEstimate = wrapper.find('[data-testid="time-estimate"]'); - expect(timeEstimate.text()).toBe(issue.timeStats.humanTimeEstimate); + expect(timeEstimate.text()).toBe(issue.humanTimeEstimate); expect(timeEstimate.attributes('title')).toBe('Estimate'); expect(timeEstimate.find(GlIcon).props('name')).toBe('timer'); }); diff --git a/spec/frontend/issues_list/components/issues_list_app_spec.js b/spec/frontend/issues_list/components/issues_list_app_spec.js index d78a436c618..a3ac57ee1bb 100644 --- a/spec/frontend/issues_list/components/issues_list_app_spec.js +++ b/spec/frontend/issues_list/components/issues_list_app_spec.js @@ -1,9 +1,19 @@ import { GlButton, GlEmptyState, GlLink } from '@gitlab/ui'; -import { mount, shallowMount } from '@vue/test-utils'; +import { createLocalVue, mount, shallowMount } from '@vue/test-utils'; import AxiosMockAdapter from 'axios-mock-adapter'; +import { cloneDeep } from 'lodash'; +import { nextTick } from 'vue'; +import VueApollo from 'vue-apollo'; +import getIssuesQuery from 'ee_else_ce/issues_list/queries/get_issues.query.graphql'; +import createMockApollo from 'helpers/mock_apollo_helper'; import { TEST_HOST } from 'helpers/test_constants'; import waitForPromises from 'helpers/wait_for_promises'; -import { apiParams, filteredTokens, locationSearch, urlParams } from 'jest/issues_list/mock_data'; +import { + getIssuesQueryResponse, + filteredTokens, + locationSearch, + urlParams, +} from 'jest/issues_list/mock_data'; import createFlash from '~/flash'; import CsvImportExportButtons from '~/issuable/components/csv_import_export_buttons.vue'; import IssuableByEmail from '~/issuable/components/issuable_by_email.vue'; @@ -14,10 +24,7 @@ import { apiSortParams, CREATED_DESC, DUE_DATE_OVERDUE, - PAGE_SIZE, - PAGE_SIZE_MANUAL, PARAM_DUE_DATE, - RELATIVE_POSITION_DESC, TOKEN_TYPE_ASSIGNEE, TOKEN_TYPE_AUTHOR, TOKEN_TYPE_CONFIDENTIAL, @@ -32,20 +39,26 @@ import { import eventHub from '~/issues_list/eventhub'; import { getSortOptions } from '~/issues_list/utils'; import axios from '~/lib/utils/axios_utils'; +import { scrollUp } from '~/lib/utils/scroll_utils'; import { setUrlParams } from '~/lib/utils/url_utility'; jest.mock('~/flash'); +jest.mock('~/lib/utils/scroll_utils', () => ({ + scrollUp: jest.fn().mockName('scrollUpMock'), +})); describe('IssuesListApp component', () => { let axiosMock; let wrapper; + const localVue = createLocalVue(); + localVue.use(VueApollo); + const defaultProvide = { autocompleteUsersPath: 'autocomplete/users/path', calendarPath: 'calendar/path', canBulkUpdate: false, emptyStateSvgPath: 'empty-state.svg', - endpoint: 'api/endpoint', exportCsvPath: 'export/csv/path', hasBlockedIssuesFeature: true, hasIssueWeightsFeature: true, @@ -61,21 +74,13 @@ describe('IssuesListApp component', () => { signInPath: 'sign/in/path', }; - const state = 'opened'; - const xPage = 1; - const xTotal = 25; - const tabCounts = { - opened: xTotal, - closed: undefined, - all: undefined, - }; - const fetchIssuesResponse = { - data: [], - headers: { - 'x-page': xPage, - 'x-total': xTotal, - }, - }; + let defaultQueryResponse = getIssuesQueryResponse; + if (IS_EE) { + defaultQueryResponse = cloneDeep(getIssuesQueryResponse); + defaultQueryResponse.data.project.issues.nodes[0].blockedByCount = 1; + defaultQueryResponse.data.project.issues.nodes[0].healthStatus = null; + defaultQueryResponse.data.project.issues.nodes[0].weight = 5; + } const findCsvImportExportButtons = () => wrapper.findComponent(CsvImportExportButtons); const findIssuableByEmail = () => wrapper.findComponent(IssuableByEmail); @@ -86,19 +91,26 @@ describe('IssuesListApp component', () => { const findGlLink = () => wrapper.findComponent(GlLink); const findIssuableList = () => wrapper.findComponent(IssuableList); - const mountComponent = ({ provide = {}, mountFn = shallowMount } = {}) => - mountFn(IssuesListApp, { + const mountComponent = ({ + provide = {}, + response = defaultQueryResponse, + mountFn = shallowMount, + } = {}) => { + const requestHandlers = [[getIssuesQuery, jest.fn().mockResolvedValue(response)]]; + const apolloProvider = createMockApollo(requestHandlers); + + return mountFn(IssuesListApp, { + localVue, + apolloProvider, provide: { ...defaultProvide, ...provide, }, }); + }; beforeEach(() => { axiosMock = new AxiosMockAdapter(axios); - axiosMock - .onGet(defaultProvide.endpoint) - .reply(200, fetchIssuesResponse.data, fetchIssuesResponse.headers); }); afterEach(() => { @@ -108,28 +120,37 @@ describe('IssuesListApp component', () => { }); describe('IssuableList', () => { - beforeEach(async () => { + beforeEach(() => { wrapper = mountComponent(); - await waitForPromises(); + jest.runOnlyPendingTimers(); }); it('renders', () => { expect(findIssuableList().props()).toMatchObject({ namespace: defaultProvide.projectPath, recentSearchesStorageKey: 'issues', - searchInputPlaceholder: 'Search or filter results…', + searchInputPlaceholder: IssuesListApp.i18n.searchPlaceholder, sortOptions: getSortOptions(true, true), initialSortBy: CREATED_DESC, + issuables: getIssuesQueryResponse.data.project.issues.nodes, tabs: IssuableListTabs, currentTab: IssuableStates.Opened, - tabCounts, - showPaginationControls: false, - issuables: [], - totalItems: xTotal, - currentPage: xPage, - previousPage: xPage - 1, - nextPage: xPage + 1, - urlParams: { page: xPage, state }, + tabCounts: { + opened: 1, + closed: undefined, + all: undefined, + }, + issuablesLoading: false, + isManualOrdering: false, + showBulkEditSidebar: false, + showPaginationControls: true, + useKeysetPagination: true, + hasPreviousPage: getIssuesQueryResponse.data.project.issues.pageInfo.hasPreviousPage, + hasNextPage: getIssuesQueryResponse.data.project.issues.pageInfo.hasNextPage, + urlParams: { + state: IssuableStates.Opened, + ...urlSortParams[CREATED_DESC], + }, }); }); }); @@ -157,9 +178,9 @@ describe('IssuesListApp component', () => { describe('csv import/export component', () => { describe('when user is signed in', () => { - it('renders', async () => { - const search = '?page=1&search=refactor&state=opened&sort=created_date'; + const search = '?search=refactor&state=opened&sort=created_date'; + beforeEach(() => { global.jsdom.reconfigure({ url: `${TEST_HOST}${search}` }); wrapper = mountComponent({ @@ -167,11 +188,13 @@ describe('IssuesListApp component', () => { mountFn: mount, }); - await waitForPromises(); + jest.runOnlyPendingTimers(); + }); + it('renders', () => { expect(findCsvImportExportButtons().props()).toMatchObject({ exportCsvPath: `${defaultProvide.exportCsvPath}${search}`, - issuableCount: xTotal, + issuableCount: 1, }); }); }); @@ -238,18 +261,6 @@ describe('IssuesListApp component', () => { }); }); - describe('page', () => { - it('is set from the url params', () => { - const page = 5; - - global.jsdom.reconfigure({ url: setUrlParams({ page }, TEST_HOST) }); - - wrapper = mountComponent(); - - expect(findIssuableList().props('currentPage')).toBe(page); - }); - }); - describe('search', () => { it('is set from the url params', () => { global.jsdom.reconfigure({ url: `${TEST_HOST}${locationSearch}` }); @@ -326,12 +337,10 @@ describe('IssuesListApp component', () => { describe('empty states', () => { describe('when there are issues', () => { describe('when search returns no results', () => { - beforeEach(async () => { + beforeEach(() => { global.jsdom.reconfigure({ url: `${TEST_HOST}?search=no+results` }); wrapper = mountComponent({ provide: { hasProjectIssues: true }, mountFn: mount }); - - await waitForPromises(); }); it('shows empty state', () => { @@ -344,10 +353,8 @@ describe('IssuesListApp component', () => { }); describe('when "Open" tab has no issues', () => { - beforeEach(async () => { + beforeEach(() => { wrapper = mountComponent({ provide: { hasProjectIssues: true }, mountFn: mount }); - - await waitForPromises(); }); it('shows empty state', () => { @@ -360,14 +367,12 @@ describe('IssuesListApp component', () => { }); describe('when "Closed" tab has no issues', () => { - beforeEach(async () => { + beforeEach(() => { global.jsdom.reconfigure({ url: setUrlParams({ state: IssuableStates.Closed }, TEST_HOST), }); wrapper = mountComponent({ provide: { hasProjectIssues: true }, mountFn: mount }); - - await waitForPromises(); }); it('shows empty state', () => { @@ -555,98 +560,70 @@ describe('IssuesListApp component', () => { describe('events', () => { describe('when "click-tab" event is emitted by IssuableList', () => { beforeEach(() => { - axiosMock.onGet(defaultProvide.endpoint).reply(200, fetchIssuesResponse.data, { - 'x-page': 2, - 'x-total': xTotal, - }); - wrapper = mountComponent(); findIssuableList().vm.$emit('click-tab', IssuableStates.Closed); }); - it('makes API call to filter the list by the new state and resets the page to 1', () => { - expect(axiosMock.history.get[1].params).toMatchObject({ - page: 1, - state: IssuableStates.Closed, - }); + it('updates to the new tab', () => { + expect(findIssuableList().props('currentTab')).toBe(IssuableStates.Closed); }); }); - describe('when "page-change" event is emitted by IssuableList', () => { - const data = [{ id: 10, title: 'title', state }]; - const page = 2; - const totalItems = 21; - - beforeEach(async () => { - axiosMock.onGet(defaultProvide.endpoint).reply(200, data, { - 'x-page': page, - 'x-total': totalItems, - }); - - wrapper = mountComponent(); - - findIssuableList().vm.$emit('page-change', page); - - await waitForPromises(); - }); + describe.each(['next-page', 'previous-page'])( + 'when "%s" event is emitted by IssuableList', + (event) => { + beforeEach(() => { + wrapper = mountComponent(); - it('fetches issues with expected params', () => { - expect(axiosMock.history.get[1].params).toMatchObject({ - page, - per_page: PAGE_SIZE, - state, - with_labels_details: true, + findIssuableList().vm.$emit(event); }); - }); - it('updates IssuableList with response data', () => { - expect(findIssuableList().props()).toMatchObject({ - issuables: data, - totalItems, - currentPage: page, - previousPage: page - 1, - nextPage: page + 1, - urlParams: { page, state }, + it('scrolls to the top', () => { + expect(scrollUp).toHaveBeenCalled(); }); - }); - }); + }, + ); describe('when "reorder" event is emitted by IssuableList', () => { - const issueOne = { id: 1, iid: 101, title: 'Issue one' }; - const issueTwo = { id: 2, iid: 102, title: 'Issue two' }; - const issueThree = { id: 3, iid: 103, title: 'Issue three' }; - const issueFour = { id: 4, iid: 104, title: 'Issue four' }; - const issues = [issueOne, issueTwo, issueThree, issueFour]; - - beforeEach(async () => { - axiosMock.onGet(defaultProvide.endpoint).reply(200, issues, fetchIssuesResponse.headers); - wrapper = mountComponent(); - await waitForPromises(); - }); - - describe('when successful', () => { - describe.each` - description | issueToMove | oldIndex | newIndex | moveBeforeId | moveAfterId - ${'to the beginning of the list'} | ${issueThree} | ${2} | ${0} | ${null} | ${issueOne.id} - ${'down the list'} | ${issueOne} | ${0} | ${1} | ${issueTwo.id} | ${issueThree.id} - ${'up the list'} | ${issueThree} | ${2} | ${1} | ${issueOne.id} | ${issueTwo.id} - ${'to the end of the list'} | ${issueTwo} | ${1} | ${3} | ${issueFour.id} | ${null} - `( - 'when moving issue $description', - ({ issueToMove, oldIndex, newIndex, moveBeforeId, moveAfterId }) => { - it('makes API call to reorder the issue', async () => { - findIssuableList().vm.$emit('reorder', { oldIndex, newIndex }); - - await waitForPromises(); - - expect(axiosMock.history.put[0]).toMatchObject({ - url: `${defaultProvide.issuesPath}/${issueToMove.iid}/reorder`, - data: JSON.stringify({ move_before_id: moveBeforeId, move_after_id: moveAfterId }), - }); - }); + const issueOne = { + ...defaultQueryResponse.data.project.issues.nodes[0], + id: 'gid://gitlab/Issue/1', + iid: 101, + title: 'Issue one', + }; + const issueTwo = { + ...defaultQueryResponse.data.project.issues.nodes[0], + id: 'gid://gitlab/Issue/2', + iid: 102, + title: 'Issue two', + }; + const issueThree = { + ...defaultQueryResponse.data.project.issues.nodes[0], + id: 'gid://gitlab/Issue/3', + iid: 103, + title: 'Issue three', + }; + const issueFour = { + ...defaultQueryResponse.data.project.issues.nodes[0], + id: 'gid://gitlab/Issue/4', + iid: 104, + title: 'Issue four', + }; + const response = { + data: { + project: { + issues: { + ...defaultQueryResponse.data.project.issues, + nodes: [issueOne, issueTwo, issueThree, issueFour], + }, }, - ); + }, + }; + + beforeEach(() => { + wrapper = mountComponent({ response }); + jest.runOnlyPendingTimers(); }); describe('when unsuccessful', () => { @@ -664,21 +641,16 @@ describe('IssuesListApp component', () => { describe('when "sort" event is emitted by IssuableList', () => { it.each(Object.keys(apiSortParams))( - 'fetches issues with correct params with payload `%s`', + 'updates to the new sort when payload is `%s`', async (sortKey) => { wrapper = mountComponent(); findIssuableList().vm.$emit('sort', sortKey); - await waitForPromises(); + jest.runOnlyPendingTimers(); + await nextTick(); - expect(axiosMock.history.get[1].params).toEqual({ - page: xPage, - per_page: sortKey === RELATIVE_POSITION_DESC ? PAGE_SIZE_MANUAL : PAGE_SIZE, - state, - with_labels_details: true, - ...apiSortParams[sortKey], - }); + expect(findIssuableList().props('urlParams')).toMatchObject(urlSortParams[sortKey]); }, ); }); @@ -687,13 +659,11 @@ describe('IssuesListApp component', () => { beforeEach(() => { wrapper = mountComponent(); jest.spyOn(eventHub, '$emit'); - }); - it('emits an "issuables:updateBulkEdit" event to the legacy bulk edit class', async () => { findIssuableList().vm.$emit('update-legacy-bulk-edit'); + }); - await waitForPromises(); - + it('emits an "issuables:updateBulkEdit" event to the legacy bulk edit class', () => { expect(eventHub.$emit).toHaveBeenCalledWith('issuables:updateBulkEdit'); }); }); @@ -705,10 +675,6 @@ describe('IssuesListApp component', () => { findIssuableList().vm.$emit('filter', filteredTokens); }); - it('makes an API call to search for issues with the search term', () => { - expect(axiosMock.history.get[1].params).toMatchObject(apiParams); - }); - it('updates IssuableList with url params', () => { expect(findIssuableList().props('urlParams')).toMatchObject(urlParams); }); diff --git a/spec/frontend/issues_list/mock_data.js b/spec/frontend/issues_list/mock_data.js index 99267fb6e31..6c669e02070 100644 --- a/spec/frontend/issues_list/mock_data.js +++ b/spec/frontend/issues_list/mock_data.js @@ -3,6 +3,73 @@ import { OPERATOR_IS_NOT, } from '~/vue_shared/components/filtered_search_bar/constants'; +export const getIssuesQueryResponse = { + data: { + project: { + issues: { + count: 1, + pageInfo: { + hasNextPage: false, + hasPreviousPage: false, + startCursor: 'startcursor', + endCursor: 'endcursor', + }, + nodes: [ + { + id: 'gid://gitlab/Issue/123456', + iid: '789', + closedAt: null, + confidential: false, + createdAt: '2021-05-22T04:08:01Z', + downvotes: 2, + dueDate: '2021-05-29', + humanTimeEstimate: null, + moved: false, + title: 'Issue title', + updatedAt: '2021-05-22T04:08:01Z', + upvotes: 3, + userDiscussionsCount: 4, + webUrl: 'project/-/issues/789', + assignees: { + nodes: [ + { + id: 'gid://gitlab/User/234', + avatarUrl: 'avatar/url', + name: 'Marge Simpson', + username: 'msimpson', + webUrl: 'url/msimpson', + }, + ], + }, + author: { + id: 'gid://gitlab/User/456', + avatarUrl: 'avatar/url', + name: 'Homer Simpson', + username: 'hsimpson', + webUrl: 'url/hsimpson', + }, + labels: { + nodes: [ + { + id: 'gid://gitlab/ProjectLabel/456', + color: '#333', + title: 'Label title', + description: 'Label description', + }, + ], + }, + milestone: null, + taskCompletionStatus: { + completedCount: 1, + count: 2, + }, + }, + ], + }, + }, + }, +}; + export const locationSearch = [ '?search=find+issues', 'author_username=homer', diff --git a/spec/frontend/runner/components/runner_manual_setup_help_spec.js b/spec/frontend/runner/components/runner_manual_setup_help_spec.js index ca5c88f6e28..add595d784e 100644 --- a/spec/frontend/runner/components/runner_manual_setup_help_spec.js +++ b/spec/frontend/runner/components/runner_manual_setup_help_spec.js @@ -1,8 +1,11 @@ import { GlSprintf } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; +import { nextTick } from 'vue'; import { TEST_HOST } from 'helpers/test_constants'; import { extendedWrapper } from 'helpers/vue_test_utils_helper'; import RunnerManualSetupHelp from '~/runner/components/runner_manual_setup_help.vue'; +import RunnerRegistrationTokenReset from '~/runner/components/runner_registration_token_reset.vue'; +import { INSTANCE_TYPE, GROUP_TYPE, PROJECT_TYPE } from '~/runner/constants'; import ClipboardButton from '~/vue_shared/components/clipboard_button.vue'; import RunnerInstructions from '~/vue_shared/components/runner_instructions/runner_instructions.vue'; @@ -14,6 +17,8 @@ describe('RunnerManualSetupHelp', () => { let originalGon; const findRunnerInstructions = () => wrapper.findComponent(RunnerInstructions); + const findRunnerRegistrationTokenReset = () => + wrapper.findComponent(RunnerRegistrationTokenReset); const findClipboardButtons = () => wrapper.findAllComponents(ClipboardButton); const findRunnerHelpTitle = () => wrapper.findByTestId('runner-help-title'); const findCoordinatorUrl = () => wrapper.findByTestId('coordinator-url'); @@ -28,6 +33,7 @@ describe('RunnerManualSetupHelp', () => { }, propsData: { registrationToken: mockRegistrationToken, + type: INSTANCE_TYPE, ...props, }, stubs: { @@ -54,16 +60,26 @@ describe('RunnerManualSetupHelp', () => { wrapper.destroy(); }); - it('Title contains the default runner type', () => { + it('Title contains the shared runner type', () => { + createComponent({ props: { type: INSTANCE_TYPE } }); + expect(findRunnerHelpTitle().text()).toMatchInterpolatedText('Set up a shared runner manually'); }); it('Title contains the group runner type', () => { - createComponent({ props: { typeName: 'group' } }); + createComponent({ props: { type: GROUP_TYPE } }); expect(findRunnerHelpTitle().text()).toMatchInterpolatedText('Set up a group runner manually'); }); + it('Title contains the specific runner type', () => { + createComponent({ props: { type: PROJECT_TYPE } }); + + expect(findRunnerHelpTitle().text()).toMatchInterpolatedText( + 'Set up a specific runner manually', + ); + }); + it('Runner Install Page link', () => { expect(findRunnerHelpLink().attributes('href')).toBe(mockRunnerInstallHelpPage); }); @@ -73,12 +89,27 @@ describe('RunnerManualSetupHelp', () => { expect(findClipboardButtons().at(0).props('text')).toBe(TEST_HOST); }); + it('Displays the runner instructions', () => { + expect(findRunnerInstructions().exists()).toBe(true); + }); + it('Displays the registration token', () => { expect(findRegistrationToken().text()).toBe(mockRegistrationToken); expect(findClipboardButtons().at(1).props('text')).toBe(mockRegistrationToken); }); - it('Displays the runner instructions', () => { - expect(findRunnerInstructions().exists()).toBe(true); + it('Displays the runner registration token reset button', () => { + expect(findRunnerRegistrationTokenReset().exists()).toBe(true); + }); + + it('Replaces the runner reset button', async () => { + const mockNewRegistrationToken = 'NEW_MOCK_REGISTRATION_TOKEN'; + + findRunnerRegistrationTokenReset().vm.$emit('tokenReset', mockNewRegistrationToken); + + await nextTick(); + + expect(findRegistrationToken().text()).toBe(mockNewRegistrationToken); + expect(findClipboardButtons().at(1).props('text')).toBe(mockNewRegistrationToken); }); }); diff --git a/spec/frontend/runner/components/runner_registration_token_reset_spec.js b/spec/frontend/runner/components/runner_registration_token_reset_spec.js new file mode 100644 index 00000000000..fa5751b380f --- /dev/null +++ b/spec/frontend/runner/components/runner_registration_token_reset_spec.js @@ -0,0 +1,155 @@ +import { GlButton } from '@gitlab/ui'; +import { createLocalVue, shallowMount } from '@vue/test-utils'; +import VueApollo from 'vue-apollo'; +import createMockApollo from 'helpers/mock_apollo_helper'; +import waitForPromises from 'helpers/wait_for_promises'; +import createFlash, { FLASH_TYPES } from '~/flash'; +import RunnerRegistrationTokenReset from '~/runner/components/runner_registration_token_reset.vue'; +import { INSTANCE_TYPE } from '~/runner/constants'; +import runnersRegistrationTokenResetMutation from '~/runner/graphql/runners_registration_token_reset.mutation.graphql'; + +jest.mock('~/flash'); + +const localVue = createLocalVue(); +localVue.use(VueApollo); + +const mockNewToken = 'NEW_TOKEN'; + +describe('RunnerRegistrationTokenReset', () => { + let wrapper; + let runnersRegistrationTokenResetMutationHandler; + + const findButton = () => wrapper.findComponent(GlButton); + + const createComponent = () => { + wrapper = shallowMount(RunnerRegistrationTokenReset, { + localVue, + propsData: { + type: INSTANCE_TYPE, + }, + apolloProvider: createMockApollo([ + [runnersRegistrationTokenResetMutation, runnersRegistrationTokenResetMutationHandler], + ]), + }); + }; + + beforeEach(() => { + runnersRegistrationTokenResetMutationHandler = jest.fn().mockResolvedValue({ + data: { + runnersRegistrationTokenReset: { + token: mockNewToken, + errors: [], + }, + }, + }); + + createComponent(); + + jest.spyOn(window, 'confirm'); + }); + + afterEach(() => { + wrapper.destroy(); + }); + + it('Displays reset button', () => { + expect(findButton().exists()).toBe(true); + }); + + describe('On click and confirmation', () => { + beforeEach(async () => { + window.confirm.mockReturnValueOnce(true); + await findButton().vm.$emit('click'); + }); + + it('resets token', () => { + expect(runnersRegistrationTokenResetMutationHandler).toHaveBeenCalledTimes(1); + expect(runnersRegistrationTokenResetMutationHandler).toHaveBeenCalledWith({ + input: { type: INSTANCE_TYPE }, + }); + }); + + it('emits result', () => { + expect(wrapper.emitted('tokenReset')).toHaveLength(1); + expect(wrapper.emitted('tokenReset')[0]).toEqual([mockNewToken]); + }); + + it('does not show a loading state', () => { + expect(findButton().props('loading')).toBe(false); + }); + + it('shows confirmation', () => { + expect(createFlash).toHaveBeenLastCalledWith({ + message: expect.stringContaining('registration token generated'), + type: FLASH_TYPES.SUCCESS, + }); + }); + }); + + describe('On click without confirmation', () => { + beforeEach(async () => { + window.confirm.mockReturnValueOnce(false); + await findButton().vm.$emit('click'); + }); + + it('does not reset token', () => { + expect(runnersRegistrationTokenResetMutationHandler).not.toHaveBeenCalled(); + }); + + it('does not emit any result', () => { + expect(wrapper.emitted('tokenReset')).toBeUndefined(); + }); + + it('does not show a loading state', () => { + expect(findButton().props('loading')).toBe(false); + }); + + it('does not shows confirmation', () => { + expect(createFlash).not.toHaveBeenCalled(); + }); + }); + + describe('On error', () => { + it('On network error, error message is shown', async () => { + runnersRegistrationTokenResetMutationHandler.mockRejectedValueOnce( + new Error('Something went wrong'), + ); + + window.confirm.mockReturnValueOnce(true); + await findButton().vm.$emit('click'); + await waitForPromises(); + + expect(createFlash).toHaveBeenLastCalledWith({ + message: 'Network error: Something went wrong', + }); + }); + + it('On validation error, error message is shown', async () => { + runnersRegistrationTokenResetMutationHandler.mockResolvedValue({ + data: { + runnersRegistrationTokenReset: { + token: null, + errors: ['Token reset failed'], + }, + }, + }); + + window.confirm.mockReturnValueOnce(true); + await findButton().vm.$emit('click'); + await waitForPromises(); + + expect(createFlash).toHaveBeenLastCalledWith({ + message: 'Token reset failed', + }); + }); + }); + + describe('Immediately after click', () => { + it('shows loading state', async () => { + window.confirm.mockReturnValue(true); + await findButton().vm.$emit('click'); + + expect(findButton().props('loading')).toBe(true); + }); + }); +}); diff --git a/spec/helpers/issues_helper_spec.rb b/spec/helpers/issues_helper_spec.rb index 59b42dfca20..a8a227c8ec4 100644 --- a/spec/helpers/issues_helper_spec.rb +++ b/spec/helpers/issues_helper_spec.rb @@ -302,7 +302,6 @@ RSpec.describe IssuesHelper do email: current_user&.notification_email, emails_help_page_path: help_page_path('development/emails', anchor: 'email-namespace'), empty_state_svg_path: '#', - endpoint: expose_path(api_v4_projects_issues_path(id: project.id)), export_csv_path: export_csv_project_issues_path(project), has_project_issues: project_issues(project).exists?.to_s, import_csv_issues_path: '#', diff --git a/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb b/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb index 3b274f98020..7557b9a118d 100644 --- a/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb @@ -213,7 +213,9 @@ RSpec.describe Banzai::Filter::References::ExternalIssueReferenceFilter do end context "ewm project" do - let_it_be(:service) { create(:ewm_service, project: project) } + let_it_be(:integration) { create(:ewm_integration, project: project) } + + let(:service) { integration } # TODO: remove when https://gitlab.com/gitlab-org/gitlab/-/issues/330300 is complete before do project.update!(issues_enabled: false) diff --git a/spec/lib/gitlab/ci/templates/templates_spec.rb b/spec/lib/gitlab/ci/templates/templates_spec.rb index 2e6df7da232..81fc66c4a11 100644 --- a/spec/lib/gitlab/ci/templates/templates_spec.rb +++ b/spec/lib/gitlab/ci/templates/templates_spec.rb @@ -27,16 +27,17 @@ RSpec.describe 'CI YML Templates' do end context 'that support autodevops' do - non_autodevops_templates = [ - 'Security/DAST-API.gitlab-ci.yml', - 'Security/API-Fuzzing.gitlab-ci.yml' + exceptions = [ + 'Security/DAST.gitlab-ci.yml', # DAST stage is defined inside AutoDevops yml + 'Security/DAST-API.gitlab-ci.yml', # no auto-devops + 'Security/API-Fuzzing.gitlab-ci.yml' # no auto-devops ] context 'when including available templates in a CI YAML configuration' do using RSpec::Parameterized::TableSyntax where(:template_name) do - all_templates - excluded_templates - non_autodevops_templates + all_templates - excluded_templates - exceptions end with_them do diff --git a/spec/lib/gitlab/database/migrations/observers/query_details_spec.rb b/spec/lib/gitlab/database/migrations/observers/query_details_spec.rb new file mode 100644 index 00000000000..8aac3ed67c6 --- /dev/null +++ b/spec/lib/gitlab/database/migrations/observers/query_details_spec.rb @@ -0,0 +1,58 @@ +# frozen_string_literal: true +require 'spec_helper' + +RSpec.describe Gitlab::Database::Migrations::Observers::QueryDetails do + subject { described_class.new } + + let(:observation) { Gitlab::Database::Migrations::Observation.new(migration) } + let(:connection) { ActiveRecord::Base.connection } + let(:query) { "select date_trunc('day', $1::timestamptz) + $2 * (interval '1 hour')" } + let(:query_binds) { [Time.current, 3] } + let(:directory_path) { Dir.mktmpdir } + let(:log_file) { "#{directory_path}/#{migration}-query-details.json" } + let(:query_details) { Gitlab::Json.parse(File.read(log_file)) } + let(:migration) { 20210422152437 } + + before do + stub_const('Gitlab::Database::Migrations::Instrumentation::RESULT_DIR', directory_path) + end + + after do + FileUtils.remove_entry(directory_path) + end + + it 'records details of executed queries' do + observe + + expect(query_details.size).to eq(1) + + log_entry = query_details[0] + start_time, end_time, sql, binds = log_entry.values_at('start_time', 'end_time', 'sql', 'binds') + start_time = DateTime.parse(start_time) + end_time = DateTime.parse(end_time) + + aggregate_failures do + expect(sql).to include(query) + expect(start_time).to be_before(end_time) + expect(binds).to eq(query_binds.map { |b| connection.type_cast(b) }) + end + end + + it 'unsubscribes after the observation' do + observe + + expect(subject).not_to receive(:record_sql_event) + run_query + end + + def observe + subject.before + run_query + subject.after + subject.record(observation) + end + + def run_query + connection.exec_query(query, 'SQL', query_binds) + end +end diff --git a/spec/lib/gitlab/exclusive_lease_spec.rb b/spec/lib/gitlab/exclusive_lease_spec.rb index e730ddd6577..968d26e1c38 100644 --- a/spec/lib/gitlab/exclusive_lease_spec.rb +++ b/spec/lib/gitlab/exclusive_lease_spec.rb @@ -166,4 +166,82 @@ RSpec.describe Gitlab::ExclusiveLease, :clean_gitlab_redis_shared_state do expect(described_class.get_uuid(unique_key)).to be_falsey end end + + describe '.throttle' do + it 'prevents repeated execution of the block' do + number = 0 + + action = -> { described_class.throttle(1) { number += 1 } } + + action.call + action.call + + expect(number).to eq 1 + end + + it 'is distinct by block' do + number = 0 + + described_class.throttle(1) { number += 1 } + described_class.throttle(1) { number += 1 } + + expect(number).to eq 2 + end + + it 'is distinct by key' do + number = 0 + + action = ->(k) { described_class.throttle(k) { number += 1 } } + + action.call(:a) + action.call(:b) + action.call(:a) + + expect(number).to eq 2 + end + + it 'allows a group to be passed' do + number = 0 + + described_class.throttle(1, group: :a) { number += 1 } + described_class.throttle(1, group: :b) { number += 1 } + described_class.throttle(1, group: :a) { number += 1 } + described_class.throttle(1, group: :b) { number += 1 } + + expect(number).to eq 2 + end + + it 'defaults to a 60min timeout' do + expect(described_class).to receive(:new).with(anything, hash_including(timeout: 1.hour.to_i)).and_call_original + + described_class.throttle(1) {} + end + + it 'allows count to be specified' do + expect(described_class) + .to receive(:new) + .with(anything, hash_including(timeout: 15.minutes.to_i)) + .and_call_original + + described_class.throttle(1, count: 4) {} + end + + it 'allows period to be specified' do + expect(described_class) + .to receive(:new) + .with(anything, hash_including(timeout: 1.day.to_i)) + .and_call_original + + described_class.throttle(1, period: 1.day) {} + end + + it 'allows period and count to be specified' do + expect(described_class) + .to receive(:new) + .with(anything, hash_including(timeout: 30.minutes.to_i)) + .and_call_original + + described_class.throttle(1, count: 48, period: 1.day) {} + end + end end diff --git a/spec/lib/gitlab/git_access_spec.rb b/spec/lib/gitlab/git_access_spec.rb index 96a44575e24..3ee0310a9a2 100644 --- a/spec/lib/gitlab/git_access_spec.rb +++ b/spec/lib/gitlab/git_access_spec.rb @@ -440,6 +440,14 @@ RSpec.describe Gitlab::GitAccess do expect { pull_access_check }.to raise_forbidden("Your password expired. Please access GitLab from a web browser to update your password.") end + it 'allows ldap users with expired password to pull' do + project.add_maintainer(user) + user.update!(password_expires_at: 2.minutes.ago) + allow(user).to receive(:ldap_user?).and_return(true) + + expect { pull_access_check }.not_to raise_error + end + context 'when the project repository does not exist' do before do project.add_guest(user) @@ -979,12 +987,26 @@ RSpec.describe Gitlab::GitAccess do end it 'disallows users with expired password to push' do - project.add_maintainer(user) user.update!(password_expires_at: 2.minutes.ago) expect { push_access_check }.to raise_forbidden("Your password expired. Please access GitLab from a web browser to update your password.") end + it 'allows ldap users with expired password to push' do + user.update!(password_expires_at: 2.minutes.ago) + allow(user).to receive(:ldap_user?).and_return(true) + + expect { push_access_check }.not_to raise_error + end + + it 'disallows blocked ldap users with expired password to push' do + user.block + user.update!(password_expires_at: 2.minutes.ago) + allow(user).to receive(:ldap_user?).and_return(true) + + expect { push_access_check }.to raise_forbidden("Your account has been blocked.") + end + it 'cleans up the files' do expect(project.repository).to receive(:clean_stale_repository_files).and_call_original expect { push_access_check }.not_to raise_error diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml index 781c55f8d9b..29910f926c7 100644 --- a/spec/lib/gitlab/import_export/all_models.yml +++ b/spec/lib/gitlab/import_export/all_models.yml @@ -366,7 +366,7 @@ project: - datadog_integration - discord_integration - drone_ci_integration -- emails_on_push_service +- emails_on_push_integration - pipelines_email_service - mattermost_slash_commands_service - slack_slash_commands_service @@ -391,8 +391,8 @@ project: - youtrack_service - custom_issue_tracker_integration - bugzilla_integration -- ewm_service -- external_wiki_service +- ewm_integration +- external_wiki_integration - mock_ci_service - mock_monitoring_service - forked_to_members diff --git a/spec/lib/gitlab/pagination/keyset/paginator_spec.rb b/spec/lib/gitlab/pagination/keyset/paginator_spec.rb index 3c9a8913876..230ac01af31 100644 --- a/spec/lib/gitlab/pagination/keyset/paginator_spec.rb +++ b/spec/lib/gitlab/pagination/keyset/paginator_spec.rb @@ -117,4 +117,27 @@ RSpec.describe Gitlab::Pagination::Keyset::Paginator do expect { scope.keyset_paginate }.to raise_error(/does not support keyset pagination/) end end + + context 'when use_union_optimization option is true and ordering by two columns' do + let(:scope) { Project.order(name: :asc, id: :desc) } + + it 'uses UNION queries' do + paginator_first_page = scope.keyset_paginate( + per_page: 2, + keyset_order_options: { use_union_optimization: true } + ) + + paginator_second_page = scope.keyset_paginate( + per_page: 2, + cursor: paginator_first_page.cursor_for_next_page, + keyset_order_options: { use_union_optimization: true } + ) + + expect_next_instances_of(Gitlab::SQL::Union, 1) do |instance| + expect(instance.to_sql).to include(paginator_first_page.records.last.name) + end + + paginator_second_page.records.to_a + end + end end diff --git a/spec/lib/sidebars/projects/menus/external_wiki_menu_spec.rb b/spec/lib/sidebars/projects/menus/external_wiki_menu_spec.rb index 19efd2bbd6b..a8f4b039b8c 100644 --- a/spec/lib/sidebars/projects/menus/external_wiki_menu_spec.rb +++ b/spec/lib/sidebars/projects/menus/external_wiki_menu_spec.rb @@ -19,7 +19,7 @@ RSpec.describe Sidebars::Projects::Menus::ExternalWikiMenu do end context 'when active external issue tracker' do - let(:external_wiki) { build(:external_wiki_service, project: project) } + let(:external_wiki) { build(:external_wiki_integration, project: project) } context 'is present' do it 'returns true' do diff --git a/spec/models/ability_spec.rb b/spec/models/ability_spec.rb index 3418d7d39ad..4bfa953df40 100644 --- a/spec/models/ability_spec.rb +++ b/spec/models/ability_spec.rb @@ -342,4 +342,45 @@ RSpec.describe Ability do end end end + + describe 'forgetting', :request_store do + it 'allows us to discard specific values from the DeclarativePolicy cache' do + user_a = build_stubbed(:user) + user_b = build_stubbed(:user) + + # expect these keys to remain + Gitlab::SafeRequestStore[:administrator] = :wibble + Gitlab::SafeRequestStore['admin'] = :wobble + described_class.allowed?(user_b, :read_all_resources) + # expect the DeclarativePolicy cache keys added by this action not to remain + described_class.forgetting(/admin/) do + described_class.allowed?(user_a, :read_all_resources) + end + + keys = Gitlab::SafeRequestStore.storage.keys + + expect(keys).to include( + :administrator, + 'admin', + "/dp/condition/BasePolicy/admin/#{user_b.id}" + ) + expect(keys).not_to include("/dp/condition/BasePolicy/admin/#{user_a.id}") + end + + # regression spec for re-entrant admin condition checks + # See: https://gitlab.com/gitlab-org/gitlab/-/issues/332983 + context 'when bypassing the session' do + let(:user) { build_stubbed(:admin) } + let(:ability) { :admin_all_resources } # any admin-only ability is fine here. + + def check_ability + described_class.forgetting(/admin/) { described_class.allowed?(user, ability) } + end + + it 'allows us to have re-entrant evaluation of admin-only permissions' do + expect { Gitlab::Auth::CurrentUserMode.bypass_session!(user.id) } + .to change { check_ability }.from(false).to(true) + end + end + end end diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb index 72af40e31e0..26fc4b140c1 100644 --- a/spec/models/ci/pipeline_spec.rb +++ b/spec/models/ci/pipeline_spec.rb @@ -4625,8 +4625,11 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do end describe '#build_matchers' do - let_it_be(:pipeline) { create(:ci_pipeline) } - let_it_be(:builds) { create_list(:ci_build, 2, pipeline: pipeline, project: pipeline.project) } + let_it_be(:user) { create(:user) } + let_it_be(:pipeline) { create(:ci_pipeline, user: user) } + let_it_be(:builds) { create_list(:ci_build, 2, pipeline: pipeline, project: pipeline.project, user: user) } + + let(:project) { pipeline.project } subject(:matchers) { pipeline.build_matchers } @@ -4635,5 +4638,22 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do expect(matchers).to all be_a(Gitlab::Ci::Matching::BuildMatcher) expect(matchers.first.build_ids).to match_array(builds.map(&:id)) end + + context 'with retried builds' do + let(:retried_build) { builds.first } + + before do + stub_not_protect_default_branch + project.add_developer(user) + + retried_build.cancel! + ::Ci::Build.retry(retried_build, user) + end + + it 'does not include retried builds' do + expect(matchers.size).to eq(1) + expect(matchers.first.build_ids).not_to include(retried_build.id) + end + end end end diff --git a/spec/models/container_expiration_policy_spec.rb b/spec/models/container_expiration_policy_spec.rb index 32ec5ed161a..191913ed454 100644 --- a/spec/models/container_expiration_policy_spec.rb +++ b/spec/models/container_expiration_policy_spec.rb @@ -139,15 +139,23 @@ RSpec.describe ContainerExpirationPolicy, type: :model do end end - describe '.with_container_repositories' do - subject { described_class.with_container_repositories } - + context 'policies with container repositories' do let_it_be(:policy1) { create(:container_expiration_policy) } let_it_be(:container_repository1) { create(:container_repository, project: policy1.project) } let_it_be(:policy2) { create(:container_expiration_policy) } let_it_be(:container_repository2) { create(:container_repository, project: policy2.project) } let_it_be(:policy3) { create(:container_expiration_policy) } - it { is_expected.to contain_exactly(policy1, policy2) } + describe '.with_container_repositories' do + subject { described_class.with_container_repositories } + + it { is_expected.to contain_exactly(policy1, policy2) } + end + + describe '.without_container_repositories' do + subject { described_class.without_container_repositories } + + it { is_expected.to contain_exactly(policy3) } + end end end diff --git a/spec/models/integrations/emails_on_push_spec.rb b/spec/models/integrations/emails_on_push_spec.rb index ca060f4155e..c82d4bdff9b 100644 --- a/spec/models/integrations/emails_on_push_spec.rb +++ b/spec/models/integrations/emails_on_push_spec.rb @@ -88,7 +88,7 @@ RSpec.describe Integrations::EmailsOnPush do describe '#execute' do let(:push_data) { { object_kind: 'push' } } let(:project) { create(:project, :repository) } - let(:service) { create(:emails_on_push_service, project: project) } + let(:integration) { create(:emails_on_push_integration, project: project) } let(:recipients) { 'test@gitlab.com' } before do @@ -105,7 +105,7 @@ RSpec.describe Integrations::EmailsOnPush do it 'sends email' do expect(EmailsOnPushWorker).not_to receive(:perform_async) - service.execute(push_data) + integration.execute(push_data) end end @@ -119,7 +119,7 @@ RSpec.describe Integrations::EmailsOnPush do it 'does not send email' do expect(EmailsOnPushWorker).not_to receive(:perform_async) - service.execute(push_data) + integration.execute(push_data) end end @@ -128,7 +128,7 @@ RSpec.describe Integrations::EmailsOnPush do expect(project).to receive(:emails_disabled?).and_return(true) expect(EmailsOnPushWorker).not_to receive(:perform_async) - service.execute(push_data) + integration.execute(push_data) end end diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb index 78e32571d7d..cd45a94cd82 100644 --- a/spec/models/project_spec.rb +++ b/spec/models/project_spec.rb @@ -49,7 +49,7 @@ RSpec.describe Project, factory_default: :keep do it { is_expected.to have_one(:datadog_integration) } it { is_expected.to have_one(:discord_integration) } it { is_expected.to have_one(:drone_ci_integration) } - it { is_expected.to have_one(:emails_on_push_service) } + it { is_expected.to have_one(:emails_on_push_integration) } it { is_expected.to have_one(:pipelines_email_service) } it { is_expected.to have_one(:irker_service) } it { is_expected.to have_one(:pivotaltracker_service) } @@ -65,8 +65,8 @@ RSpec.describe Project, factory_default: :keep do it { is_expected.to have_one(:youtrack_service) } it { is_expected.to have_one(:custom_issue_tracker_integration) } it { is_expected.to have_one(:bugzilla_integration) } - it { is_expected.to have_one(:ewm_service) } - it { is_expected.to have_one(:external_wiki_service) } + it { is_expected.to have_one(:ewm_integration) } + it { is_expected.to have_one(:external_wiki_integration) } it { is_expected.to have_one(:confluence_integration) } it { is_expected.to have_one(:project_feature) } it { is_expected.to have_one(:project_repository) } diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb index f1c30a646f5..e5c86e69ffc 100644 --- a/spec/models/user_spec.rb +++ b/spec/models/user_spec.rb @@ -5224,6 +5224,70 @@ RSpec.describe User do end end + describe '#password_expired_if_applicable?' do + let(:user) { build(:user, password_expires_at: password_expires_at) } + + subject { user.password_expired_if_applicable? } + + context 'when user is not ldap user' do + context 'when password_expires_at is not set' do + let(:password_expires_at) {} + + it 'returns false' do + is_expected.to be_falsey + end + end + + context 'when password_expires_at is in the past' do + let(:password_expires_at) { 1.minute.ago } + + it 'returns true' do + is_expected.to be_truthy + end + end + + context 'when password_expires_at is in the future' do + let(:password_expires_at) { 1.minute.from_now } + + it 'returns false' do + is_expected.to be_falsey + end + end + end + + context 'when user is ldap user' do + let(:user) { build(:user, password_expires_at: password_expires_at) } + + before do + allow(user).to receive(:ldap_user?).and_return(true) + end + + context 'when password_expires_at is not set' do + let(:password_expires_at) {} + + it 'returns false' do + is_expected.to be_falsey + end + end + + context 'when password_expires_at is in the past' do + let(:password_expires_at) { 1.minute.ago } + + it 'returns false' do + is_expected.to be_falsey + end + end + + context 'when password_expires_at is in the future' do + let(:password_expires_at) { 1.minute.from_now } + + it 'returns false' do + is_expected.to be_falsey + end + end + end + end + describe '#read_only_attribute?' do context 'when synced attributes metadata is present' do it 'delegates to synced_attributes_metadata' do diff --git a/spec/policies/base_policy_spec.rb b/spec/policies/base_policy_spec.rb index 44ff909872d..ec20616d357 100644 --- a/spec/policies/base_policy_spec.rb +++ b/spec/policies/base_policy_spec.rb @@ -22,31 +22,45 @@ RSpec.describe BasePolicy do end end - shared_examples 'admin only access' do |policy| + shared_examples 'admin only access' do |ability| + def policy + # method, because we want a fresh cache each time. + described_class.new(current_user, nil) + end + let(:current_user) { build_stubbed(:user) } - subject { described_class.new(current_user, nil) } + subject { policy } - it { is_expected.not_to be_allowed(policy) } + it { is_expected.not_to be_allowed(ability) } - context 'for admins' do + context 'with an admin' do let(:current_user) { build_stubbed(:admin) } it 'allowed when in admin mode' do enable_admin_mode!(current_user) - is_expected.to be_allowed(policy) + is_expected.to be_allowed(ability) end it 'prevented when not in admin mode' do - is_expected.not_to be_allowed(policy) + is_expected.not_to be_allowed(ability) end end - context 'for anonymous' do + context 'with anonymous' do let(:current_user) { nil } - it { is_expected.not_to be_allowed(policy) } + it { is_expected.not_to be_allowed(ability) } + end + + describe 'bypassing the session for sessionless login', :request_store do + let(:current_user) { build_stubbed(:admin) } + + it 'changes from prevented to allowed' do + expect { Gitlab::Auth::CurrentUserMode.bypass_session!(current_user.id) } + .to change { policy.allowed?(ability) }.from(false).to(true) + end end end diff --git a/spec/policies/global_policy_spec.rb b/spec/policies/global_policy_spec.rb index 9e995366c17..e88619b9527 100644 --- a/spec/policies/global_policy_spec.rb +++ b/spec/policies/global_policy_spec.rb @@ -245,6 +245,14 @@ RSpec.describe GlobalPolicy do end it { is_expected.not_to be_allowed(:access_api) } + + context 'when user is using ldap' do + before do + allow(current_user).to receive(:ldap_user?).and_return(true) + end + + it { is_expected.to be_allowed(:access_api) } + end end context 'when terms are enforced' do @@ -433,6 +441,14 @@ RSpec.describe GlobalPolicy do end it { is_expected.not_to be_allowed(:access_git) } + + context 'when user is using ldap' do + before do + allow(current_user).to receive(:ldap_user?).and_return(true) + end + + it { is_expected.to be_allowed(:access_git) } + end end end @@ -517,6 +533,14 @@ RSpec.describe GlobalPolicy do end it { is_expected.not_to be_allowed(:use_slash_commands) } + + context 'when user is using ldap' do + before do + allow(current_user).to receive(:ldap_user?).and_return(true) + end + + it { is_expected.to be_allowed(:use_slash_commands) } + end end end diff --git a/spec/requests/api/graphql/group_query_spec.rb b/spec/requests/api/graphql/group_query_spec.rb index 8e4f808f794..b6bbf8d5dd2 100644 --- a/spec/requests/api/graphql/group_query_spec.rb +++ b/spec/requests/api/graphql/group_query_spec.rb @@ -96,7 +96,7 @@ RSpec.describe 'getting group information' do expect(graphql_data['group']).to be_nil end - it 'avoids N+1 queries' do + it 'avoids N+1 queries', :assume_throttled do pending('See: https://gitlab.com/gitlab-org/gitlab/-/issues/245272') queries = [{ query: group_query(group1) }, diff --git a/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb index bcede4d37dd..a63116e2b94 100644 --- a/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb +++ b/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe 'Setting assignees of a merge request' do +RSpec.describe 'Setting assignees of a merge request', :assume_throttled do include GraphqlHelpers let_it_be(:project) { create(:project, :repository) } @@ -68,7 +68,7 @@ RSpec.describe 'Setting assignees of a merge request' do context 'when the current user does not have permission to add assignees' do let(:current_user) { create(:user) } - let(:db_query_limit) { 27 } + let(:db_query_limit) { 28 } it 'does not change the assignees' do project.add_guest(current_user) @@ -80,7 +80,7 @@ RSpec.describe 'Setting assignees of a merge request' do end context 'with assignees already assigned' do - let(:db_query_limit) { 39 } + let(:db_query_limit) { 46 } before do merge_request.assignees = [assignee2] @@ -96,7 +96,7 @@ RSpec.describe 'Setting assignees of a merge request' do end context 'when passing an empty list of assignees' do - let(:db_query_limit) { 31 } + let(:db_query_limit) { 32 } let(:input) { { assignee_usernames: [] } } before do @@ -115,7 +115,7 @@ RSpec.describe 'Setting assignees of a merge request' do context 'when passing append as true' do let(:mode) { Types::MutationOperationModeEnum.enum[:append] } let(:input) { { assignee_usernames: [assignee2.username], operation_mode: mode } } - let(:db_query_limit) { 20 } + let(:db_query_limit) { 22 } before do # In CE, APPEND is a NOOP as you can't have multiple assignees @@ -135,7 +135,7 @@ RSpec.describe 'Setting assignees of a merge request' do end context 'when passing remove as true' do - let(:db_query_limit) { 31 } + let(:db_query_limit) { 32 } let(:mode) { Types::MutationOperationModeEnum.enum[:remove] } let(:input) { { assignee_usernames: [assignee.username], operation_mode: mode } } let(:expected_result) { [] } diff --git a/spec/requests/api/import_bitbucket_server_spec.rb b/spec/requests/api/import_bitbucket_server_spec.rb index dac139064da..972b21ad2e0 100644 --- a/spec/requests/api/import_bitbucket_server_spec.rb +++ b/spec/requests/api/import_bitbucket_server_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' RSpec.describe API::ImportBitbucketServer do let(:base_uri) { "https://test:7990" } - let(:user) { create(:user) } + let(:user) { create(:user, bio: 'test') } let(:token) { "asdasd12345" } let(:secret) { "sekrettt" } let(:project_key) { 'TES' } diff --git a/spec/requests/api/protected_branches_spec.rb b/spec/requests/api/protected_branches_spec.rb index 6b1aa576167..8efb822cb83 100644 --- a/spec/requests/api/protected_branches_spec.rb +++ b/spec/requests/api/protected_branches_spec.rb @@ -228,7 +228,7 @@ RSpec.describe API::ProtectedBranches do context 'when a policy restricts rule deletion' do before do - policy = instance_double(ProtectedBranchPolicy, can?: false) + policy = instance_double(ProtectedBranchPolicy, allowed?: false) expect(ProtectedBranchPolicy).to receive(:new).and_return(policy) end @@ -278,7 +278,7 @@ RSpec.describe API::ProtectedBranches do context 'when a policy restricts rule deletion' do before do - policy = instance_double(ProtectedBranchPolicy, can?: false) + policy = instance_double(ProtectedBranchPolicy, allowed?: false) expect(ProtectedBranchPolicy).to receive(:new).and_return(policy) end diff --git a/spec/requests/api/services_spec.rb b/spec/requests/api/services_spec.rb index 8701efcd65f..6c14c174748 100644 --- a/spec/requests/api/services_spec.rb +++ b/spec/requests/api/services_spec.rb @@ -25,8 +25,8 @@ RSpec.describe API::Services do end context 'project with services' do - let!(:active_service) { create(:emails_on_push_service, project: project, active: true) } - let!(:service) { create(:custom_issue_tracker_integration, project: project, active: false) } + let!(:active_integration) { create(:emails_on_push_integration, project: project, active: true) } + let!(:integration) { create(:custom_issue_tracker_integration, project: project, active: false) } it "returns a list of all active services" do get api("/projects/#{project.id}/services", user) diff --git a/spec/requests/git_http_spec.rb b/spec/requests/git_http_spec.rb index 7cf46f6adc6..ec55810b4ad 100644 --- a/spec/requests/git_http_spec.rb +++ b/spec/requests/git_http_spec.rb @@ -36,16 +36,6 @@ RSpec.describe 'Git HTTP requests' do end end - context "when password is expired" do - it "responds to downloads with status 401 Unauthorized" do - user.update!(password_expires_at: 2.days.ago) - - download(path, user: user.username, password: user.password) do |response| - expect(response).to have_gitlab_http_status(:unauthorized) - end - end - end - context "when user is blocked" do let(:user) { create(:user, :blocked) } @@ -68,6 +58,26 @@ RSpec.describe 'Git HTTP requests' do end end + shared_examples 'operations are not allowed with expired password' do + context "when password is expired" do + it "responds to downloads with status 401 Unauthorized" do + user.update!(password_expires_at: 2.days.ago) + + download(path, user: user.username, password: user.password) do |response| + expect(response).to have_gitlab_http_status(:unauthorized) + end + end + + it "responds to uploads with status 401 Unauthorized" do + user.update!(password_expires_at: 2.days.ago) + + upload(path, user: user.username, password: user.password) do |response| + expect(response).to have_gitlab_http_status(:unauthorized) + end + end + end + end + shared_examples 'pushes require Basic HTTP Authentication' do context "when no credentials are provided" do it "responds to uploads with status 401 Unauthorized (no project existence information leak)" do @@ -95,15 +105,6 @@ RSpec.describe 'Git HTTP requests' do expect(response.header['WWW-Authenticate']).to start_with('Basic ') end end - - context "when password is expired" do - it "responds to uploads with status 401 Unauthorized" do - user.update!(password_expires_at: 2.days.ago) - upload(path, user: user.username, password: user.password) do |response| - expect(response).to have_gitlab_http_status(:unauthorized) - end - end - end end context "when authentication succeeds" do @@ -212,6 +213,7 @@ RSpec.describe 'Git HTTP requests' do it_behaves_like 'pulls require Basic HTTP Authentication' it_behaves_like 'pushes require Basic HTTP Authentication' + it_behaves_like 'operations are not allowed with expired password' context 'when authenticated' do it 'rejects downloads and uploads with 404 Not Found' do @@ -306,6 +308,7 @@ RSpec.describe 'Git HTTP requests' do it_behaves_like 'pulls require Basic HTTP Authentication' it_behaves_like 'pushes require Basic HTTP Authentication' + it_behaves_like 'operations are not allowed with expired password' context 'when authenticated' do context 'and as a developer on the team' do @@ -473,6 +476,7 @@ RSpec.describe 'Git HTTP requests' do it_behaves_like 'pulls require Basic HTTP Authentication' it_behaves_like 'pushes require Basic HTTP Authentication' + it_behaves_like 'operations are not allowed with expired password' end context 'but the repo is enabled' do @@ -488,6 +492,7 @@ RSpec.describe 'Git HTTP requests' do it_behaves_like 'pulls require Basic HTTP Authentication' it_behaves_like 'pushes require Basic HTTP Authentication' + it_behaves_like 'operations are not allowed with expired password' end end @@ -508,6 +513,7 @@ RSpec.describe 'Git HTTP requests' do it_behaves_like 'pulls require Basic HTTP Authentication' it_behaves_like 'pushes require Basic HTTP Authentication' + it_behaves_like 'operations are not allowed with expired password' context "when username and password are provided" do let(:env) { { user: user.username, password: 'nope' } } @@ -1003,6 +1009,24 @@ RSpec.describe 'Git HTTP requests' do it_behaves_like 'pulls are allowed' it_behaves_like 'pushes are allowed' + + context "when password is expired" do + it "responds to downloads with status 200" do + user.update!(password_expires_at: 2.days.ago) + + download(path, user: user.username, password: user.password) do |response| + expect(response).to have_gitlab_http_status(:ok) + end + end + + it "responds to uploads with status 200" do + user.update!(password_expires_at: 2.days.ago) + + upload(path, user: user.username, password: user.password) do |response| + expect(response).to have_gitlab_http_status(:ok) + end + end + end end end end diff --git a/spec/serializers/service_event_entity_spec.rb b/spec/serializers/service_event_entity_spec.rb index 64baa57fd6d..91254c7dd27 100644 --- a/spec/serializers/service_event_entity_spec.rb +++ b/spec/serializers/service_event_entity_spec.rb @@ -5,15 +5,15 @@ require 'spec_helper' RSpec.describe ServiceEventEntity do let(:request) { double('request') } - subject { described_class.new(event, request: request, service: service).as_json } + subject { described_class.new(event, request: request, service: integration).as_json } before do - allow(request).to receive(:service).and_return(service) + allow(request).to receive(:service).and_return(integration) end describe '#as_json' do context 'service without fields' do - let(:service) { create(:emails_on_push_service, push_events: true) } + let(:integration) { create(:emails_on_push_integration, push_events: true) } let(:event) { 'push' } it 'exposes correct attributes' do @@ -25,7 +25,7 @@ RSpec.describe ServiceEventEntity do end context 'service with fields' do - let(:service) { create(:slack_service, note_events: false, note_channel: 'note-channel') } + let(:integration) { create(:slack_service, note_events: false, note_channel: 'note-channel') } let(:event) { 'note' } it 'exposes correct attributes' do diff --git a/spec/serializers/service_field_entity_spec.rb b/spec/serializers/service_field_entity_spec.rb index 007042e1087..20ca98416f8 100644 --- a/spec/serializers/service_field_entity_spec.rb +++ b/spec/serializers/service_field_entity_spec.rb @@ -55,10 +55,11 @@ RSpec.describe ServiceFieldEntity do end context 'EmailsOnPush Service' do - let(:service) { create(:emails_on_push_service, send_from_committer_email: '1') } + let(:integration) { create(:emails_on_push_integration, send_from_committer_email: '1') } + let(:service) { integration } # TODO: remove when https://gitlab.com/gitlab-org/gitlab/-/issues/330300 is complete context 'field with type checkbox' do - let(:field) { service.global_fields.find { |field| field[:name] == 'send_from_committer_email' } } + let(:field) { integration.global_fields.find { |field| field[:name] == 'send_from_committer_email' } } it 'exposes correct attributes and casts value to Boolean' do expected_hash = { @@ -77,7 +78,7 @@ RSpec.describe ServiceFieldEntity do end context 'field with type select' do - let(:field) { service.global_fields.find { |field| field[:name] == 'branches_to_be_notified' } } + let(:field) { integration.global_fields.find { |field| field[:name] == 'branches_to_be_notified' } } it 'exposes correct attributes' do expected_hash = { diff --git a/spec/services/protected_branches/create_service_spec.rb b/spec/services/protected_branches/create_service_spec.rb index 986322e4d87..45462831a31 100644 --- a/spec/services/protected_branches/create_service_spec.rb +++ b/spec/services/protected_branches/create_service_spec.rb @@ -40,7 +40,7 @@ RSpec.describe ProtectedBranches::CreateService do context 'when a policy restricts rule creation' do before do - policy = instance_double(ProtectedBranchPolicy, can?: false) + policy = instance_double(ProtectedBranchPolicy, allowed?: false) expect(ProtectedBranchPolicy).to receive(:new).and_return(policy) end diff --git a/spec/services/protected_branches/destroy_service_spec.rb b/spec/services/protected_branches/destroy_service_spec.rb index 98d31147754..47a048e7033 100644 --- a/spec/services/protected_branches/destroy_service_spec.rb +++ b/spec/services/protected_branches/destroy_service_spec.rb @@ -18,7 +18,7 @@ RSpec.describe ProtectedBranches::DestroyService do context 'when a policy restricts rule deletion' do before do - policy = instance_double(ProtectedBranchPolicy, can?: false) + policy = instance_double(ProtectedBranchPolicy, allowed?: false) expect(ProtectedBranchPolicy).to receive(:new).and_return(policy) end diff --git a/spec/services/protected_branches/update_service_spec.rb b/spec/services/protected_branches/update_service_spec.rb index fdfbdf2e6ae..88e58ad5907 100644 --- a/spec/services/protected_branches/update_service_spec.rb +++ b/spec/services/protected_branches/update_service_spec.rb @@ -27,7 +27,7 @@ RSpec.describe ProtectedBranches::UpdateService do context 'when a policy restricts rule creation' do before do - policy = instance_double(ProtectedBranchPolicy, can?: false) + policy = instance_double(ProtectedBranchPolicy, allowed?: false) expect(ProtectedBranchPolicy).to receive(:new).and_return(policy) end diff --git a/spec/services/repositories/changelog_service_spec.rb b/spec/services/repositories/changelog_service_spec.rb index 02d60f076ca..9a5b0f33fbb 100644 --- a/spec/services/repositories/changelog_service_spec.rb +++ b/spec/services/repositories/changelog_service_spec.rb @@ -76,7 +76,7 @@ RSpec.describe Repositories::ChangelogService do recorder = ActiveRecord::QueryRecorder.new { service.execute } changelog = project.repository.blob_at('master', 'CHANGELOG.md')&.data - expect(recorder.count).to eq(11) + expect(recorder.count).to eq(12) expect(changelog).to include('Title 1', 'Title 2') end diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index 9a2eee0edc5..31ff619232c 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -346,6 +346,15 @@ RSpec.configure do |config| Gitlab::WithRequestStore.with_request_store { example.run } end + # previous test runs may have left some resources throttled + config.before do + ::Gitlab::ExclusiveLease.reset_all!("el:throttle:*") + end + + config.before(:example, :assume_throttled) do |example| + allow(::Gitlab::ExclusiveLease).to receive(:throttle).and_return(nil) + end + config.before(:example, :request_store) do # Clear request store before actually starting the spec (the # `around` above will have the request store enabled for all diff --git a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb index c775574091e..a1aa7c04b67 100644 --- a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb +++ b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb @@ -1061,7 +1061,7 @@ RSpec.describe 'layouts/nav/sidebar/_project' do let(:service_status) { true } before do - project.create_external_wiki_service(active: service_status, properties: properties) + project.create_external_wiki_integration(active: service_status, properties: properties) project.reload end diff --git a/spec/workers/container_expiration_policy_worker_spec.rb b/spec/workers/container_expiration_policy_worker_spec.rb index 8562935b0b5..6f81d06f653 100644 --- a/spec/workers/container_expiration_policy_worker_spec.rb +++ b/spec/workers/container_expiration_policy_worker_spec.rb @@ -123,5 +123,19 @@ RSpec.describe ContainerExpirationPolicyWorker do expect(stuck_cleanup.reload).to be_cleanup_unfinished end end + + context 'policies without container repositories' do + let_it_be(:container_expiration_policy1) { create(:container_expiration_policy, enabled: true) } + let_it_be(:container_repository1) { create(:container_repository, project_id: container_expiration_policy1.project_id) } + let_it_be(:container_expiration_policy2) { create(:container_expiration_policy, enabled: true) } + let_it_be(:container_repository2) { create(:container_repository, project_id: container_expiration_policy2.project_id) } + let_it_be(:container_expiration_policy3) { create(:container_expiration_policy, enabled: true) } + + it 'disables them' do + expect { subject } + .to change { ::ContainerExpirationPolicy.active.count }.from(3).to(2) + expect(container_expiration_policy3.reload.enabled).to be false + end + end end end diff --git a/spec/workers/web_hook_worker_spec.rb b/spec/workers/web_hook_worker_spec.rb index 548cf4c717a..a86964aa417 100644 --- a/spec/workers/web_hook_worker_spec.rb +++ b/spec/workers/web_hook_worker_spec.rb @@ -17,7 +17,6 @@ RSpec.describe WebHookWorker do it_behaves_like 'worker with data consistency', described_class, - feature_flag: :load_balancing_for_web_hook_worker, data_consistency: :delayed end end |