summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitlab-ci.yml2
-rw-r--r--.gitlab/ci/notify.gitlab-ci.yml2
-rw-r--r--.rubocop_todo/rspec/invalid_feature_category.yml1
-rw-r--r--.rubocop_todo/rspec/missing_feature_category.yml1
-rw-r--r--GITALY_SERVER_VERSION2
-rw-r--r--app/assets/javascripts/branches/branch_sort_dropdown.js6
-rw-r--r--app/assets/javascripts/branches/components/sort_dropdown.vue35
-rw-r--r--app/assets/javascripts/design_management/components/design_overlay.vue3
-rw-r--r--app/assets/javascripts/design_management/graphql.js2
-rw-r--r--app/assets/javascripts/pipelines/pipeline_details_bundle.js7
-rw-r--r--app/assets/javascripts/vue_shared/components/web_ide_link.vue1
-rw-r--r--app/controllers/projects/branches_controller.rb16
-rw-r--r--app/finders/todos_finder.rb2
-rw-r--r--app/graphql/mutations/issues/bulk_update.rb84
-rw-r--r--app/graphql/resolvers/group_releases_resolver.rb30
-rw-r--r--app/graphql/resolvers/releases_resolver.rb2
-rw-r--r--app/graphql/types/group_release_sort_enum.rb15
-rw-r--r--app/graphql/types/group_type.rb6
-rw-r--r--app/graphql/types/mutation_type.rb1
-rw-r--r--app/helpers/sorting_titles_values_helper.rb4
-rw-r--r--app/helpers/tab_helper.rb2
-rw-r--r--app/models/concerns/issue_parent.rb11
-rw-r--r--app/models/group.rb1
-rw-r--r--app/models/namespaces/traversal/linear.rb53
-rw-r--r--app/models/project.rb1
-rw-r--r--app/models/work_item.rb4
-rw-r--r--app/models/work_items/type.rb4
-rw-r--r--app/serializers/build_details_entity.rb2
-rw-r--r--app/services/issuable/bulk_update_service.rb29
-rw-r--r--app/services/members/approve_access_request_service.rb6
-rw-r--r--app/services/members/base_service.rb4
-rw-r--r--app/services/members/destroy_service.rb1
-rw-r--r--app/services/preview_markdown_service.rb2
-rw-r--r--app/services/quick_actions/target_service.rb8
-rw-r--r--app/services/todo_service.rb17
-rw-r--r--app/views/admin/broadcast_messages/edit.html.haml3
-rw-r--r--app/views/projects/branches/index.html.haml12
-rw-r--r--config/feature_flags/development/bulk_update_issues_mutation.yml8
-rw-r--r--config/feature_flags/development/set_traversal_ids_on_save.yml8
-rw-r--r--db/post_migrate/20230119095024_validate_fk_on_ci_builds_runner_session_partition_id_and_build_id.rb2
-rw-r--r--doc/api/draft_notes.md7
-rw-r--r--doc/api/graphql/reference/index.md60
-rw-r--r--doc/ci/environments/deployment_approvals.md3
-rw-r--r--doc/ci/environments/index.md24
-rw-r--r--doc/ci/environments/protected_environments.md2
-rw-r--r--doc/development/architecture.md2
-rw-r--r--doc/development/code_review.md17
-rw-r--r--doc/development/feature_categorization/index.md5
-rw-r--r--doc/development/merge_request_concepts/performance.md23
-rw-r--r--doc/user/project/merge_requests/conflicts.md6
-rw-r--r--doc/user/todos.md1
-rw-r--r--lib/api/helpers/projects_helpers.rb2
-rw-r--r--lib/gitlab/quick_actions/command_definition.rb8
-rw-r--r--locale/gitlab.pot6
-rw-r--r--rubocop/cop/migration/prevent_single_statement_with_disable_ddl_transaction.rb86
-rw-r--r--rubocop/cop/rspec/invalid_feature_category.rb4
-rw-r--r--spec/controllers/projects/autocomplete_sources_controller_spec.rb33
-rw-r--r--spec/features/projects/pipelines/pipeline_spec.rb17
-rw-r--r--spec/finders/todos_finder_spec.rb4
-rw-r--r--spec/frontend/add_context_commits_modal/store/actions_spec.js6
-rw-r--r--spec/frontend/api/alert_management_alerts_api_spec.js10
-rw-r--r--spec/frontend/branches/components/sort_dropdown_spec.js8
-rw-r--r--spec/frontend/captcha/captcha_modal_axios_interceptor_spec.js2
-rw-r--r--spec/frontend/ci/reports/codequality_report/store/actions_spec.js4
-rw-r--r--spec/frontend/ci/runner/components/search_tokens/tag_token_spec.js6
-rw-r--r--spec/frontend/confidential_merge_request/components/project_form_group_spec.js3
-rw-r--r--spec/frontend/contributors/component/contributors_spec.js3
-rw-r--r--spec/frontend/contributors/store/actions_spec.js4
-rw-r--r--spec/frontend/deploy_keys/components/app_spec.js5
-rw-r--r--spec/frontend/design_management/components/design_overlay_spec.js305
-rw-r--r--spec/frontend/diffs/components/app_spec.js3
-rw-r--r--spec/frontend/editor/source_editor_markdown_livepreview_ext_spec.js10
-rw-r--r--spec/frontend/emoji/awards_app/store/actions_spec.js6
-rw-r--r--spec/frontend/error_tracking/store/actions_spec.js4
-rw-r--r--spec/frontend/error_tracking/store/details/actions_spec.js8
-rw-r--r--spec/frontend/feature_flags/components/new_environments_dropdown_spec.js2
-rw-r--r--spec/frontend/ide/components/repo_editor_spec.js3
-rw-r--r--spec/frontend/ide/stores/modules/commit/actions_spec.js3
-rw-r--r--spec/frontend/ide/stores/modules/terminal/actions/session_status_spec.js4
-rw-r--r--spec/frontend/import_entities/import_groups/services/status_poller_spec.js3
-rw-r--r--spec/frontend/issues/issue_spec.js3
-rw-r--r--spec/frontend/issues/related_merge_requests/components/related_merge_requests_spec.js3
-rw-r--r--spec/frontend/labels/components/promote_label_modal_spec.js4
-rw-r--r--spec/frontend/lib/utils/icon_utils_spec.js6
-rw-r--r--spec/frontend/lib/utils/poll_spec.js20
-rw-r--r--spec/frontend/lib/utils/select2_utils_spec.js3
-rw-r--r--spec/frontend/merge_requests/components/compare_dropdown_spec.js3
-rw-r--r--spec/frontend/mr_notes/stores/actions_spec.js4
-rw-r--r--spec/frontend/notes/components/discussion_filter_spec.js3
-rw-r--r--spec/frontend/notes/components/note_awards_list_spec.js3
-rw-r--r--spec/frontend/notes/components/notes_app_spec.js3
-rw-r--r--spec/frontend/pages/import/history/components/import_error_details_spec.js5
-rw-r--r--spec/frontend/pipelines/graph/graph_component_wrapper_spec.js3
-rw-r--r--spec/frontend/projects/commit_box/info/load_branches_spec.js6
-rw-r--r--spec/frontend/prometheus_metrics/custom_metrics_spec.js3
-rw-r--r--spec/frontend/prometheus_metrics/prometheus_metrics_spec.js3
-rw-r--r--spec/frontend/releases/components/app_edit_new_spec.js3
-rw-r--r--spec/frontend/repository/log_tree_spec.js3
-rw-r--r--spec/frontend/snippets/components/snippet_blob_edit_spec.js6
-rw-r--r--spec/frontend/users_select/test_helper.js3
-rw-r--r--spec/frontend/vue_merge_request_widget/components/artifacts_list_app_spec.js6
-rw-r--r--spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_container_spec.js3
-rw-r--r--spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_spec.js3
-rw-r--r--spec/frontend/vue_merge_request_widget/components/widget/widget_spec.js25
-rw-r--r--spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js16
-rw-r--r--spec/frontend/vue_merge_request_widget/test_extensions.js3
-rw-r--r--spec/frontend/vue_shared/components/entity_select/group_select_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/entity_select/project_select_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/notes/system_note_spec.js3
-rw-r--r--spec/frontend/vue_shared/components/web_ide_link_spec.js13
-rw-r--r--spec/frontend/work_items/components/notes/system_note_spec.js3
-rw-r--r--spec/graphql/resolvers/group_releases_resolver_spec.rb47
-rw-r--r--spec/graphql/resolvers/releases_resolver_spec.rb64
-rw-r--r--spec/graphql/types/group_type_spec.rb9
-rw-r--r--spec/lib/gitlab/job_waiter_spec.rb2
-rw-r--r--spec/lib/gitlab/quick_actions/command_definition_spec.rb29
-rw-r--r--spec/models/group_spec.rb15
-rw-r--r--spec/models/namespace_spec.rb114
-rw-r--r--spec/models/work_item_spec.rb12
-rw-r--r--spec/models/work_items/type_spec.rb20
-rw-r--r--spec/requests/api/graphql/group/group_releases_spec.rb139
-rw-r--r--spec/requests/api/graphql/mutations/issues/bulk_update_spec.rb165
-rw-r--r--spec/requests/api/graphql/project/releases_spec.rb224
-rw-r--r--spec/rubocop/cop/migration/prevent_single_statement_with_disable_ddl_transaction_spec.rb67
-rw-r--r--spec/rubocop/cop/rspec/invalid_feature_category_spec.rb1
-rw-r--r--spec/services/issuable/bulk_update_service_spec.rb81
-rw-r--r--spec/services/members/approve_access_request_service_spec.rb8
-rw-r--r--spec/services/members/base_service_spec.rb19
-rw-r--r--spec/services/members/destroy_service_spec.rb8
-rw-r--r--spec/services/preview_markdown_service_spec.rb17
-rw-r--r--spec/services/quick_actions/target_service_spec.rb8
-rw-r--r--spec/services/todo_service_spec.rb18
-rw-r--r--spec/support/shared_contexts/requests/api/graphql/releases_and_group_releases_shared_context.rb60
-rw-r--r--spec/support/shared_examples/graphql/resolvers/releases_resolvers_shared_examples.rb41
-rw-r--r--spec/support/shared_examples/namespaces/traversal_examples.rb5
-rw-r--r--spec/support/shared_examples/requests/api/graphql/releases_and_group_releases_shared_examples.rb164
-rwxr-xr-xtooling/bin/js_to_system_specs_mappings10
137 files changed, 1914 insertions, 718 deletions
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 591c83f0cff..6947c6db23b 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -88,7 +88,7 @@ workflow:
<<: *ruby3-variables
<<: *default-branch-incident-variables
GITLAB_DEPENDENCY_PROXY_ADDRESS: ""
- PIPELINE_NAME: 'Ruby 3 $CI_COMMIT_BRANCH branch pipeline (trigerred by a project token)'
+ PIPELINE_NAME: 'Ruby 3 $CI_COMMIT_BRANCH branch pipeline (triggered by a project token)'
# For `$CI_DEFAULT_BRANCH` branch, create a pipeline (this includes on schedules, pushes, merges, etc.).
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
variables:
diff --git a/.gitlab/ci/notify.gitlab-ci.yml b/.gitlab/ci/notify.gitlab-ci.yml
index bdd7979126f..638e1cd8bd8 100644
--- a/.gitlab/ci/notify.gitlab-ci.yml
+++ b/.gitlab/ci/notify.gitlab-ci.yml
@@ -43,7 +43,7 @@ notify-pipeline-failure:
- .notify:rules:notify-pipeline-failure
image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}ruby:${RUBY_VERSION}
variables:
- INCIDENT_PROJECT: "#{BROKEN_BRANCH_INCIDENTS_PROJECT}"
+ INCIDENT_PROJECT: "${BROKEN_BRANCH_INCIDENTS_PROJECT}"
BROKEN_BRANCH_PROJECT_TOKEN: "${BROKEN_BRANCH_INCIDENTS_PROJECT_TOKEN}"
INCIDENT_JSON: "${CI_PROJECT_DIR}/incident.json"
SLACK_CHANNEL: "${NOTIFY_PIPELINE_FAILURE_CHANNEL}"
diff --git a/.rubocop_todo/rspec/invalid_feature_category.yml b/.rubocop_todo/rspec/invalid_feature_category.yml
index fb5e2299337..469c0e2a8a8 100644
--- a/.rubocop_todo/rspec/invalid_feature_category.yml
+++ b/.rubocop_todo/rspec/invalid_feature_category.yml
@@ -88,7 +88,6 @@ RSpec/InvalidFeatureCategory:
- 'spec/lib/gitlab/graphql/tracers/logger_tracer_spec.rb'
- 'spec/lib/gitlab/graphql/tracers/metrics_tracer_spec.rb'
- 'spec/lib/gitlab/import_export/version_checker_spec.rb'
- - 'spec/lib/gitlab/job_waiter_spec.rb'
- 'spec/lib/gitlab/metrics/background_transaction_spec.rb'
- 'spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb'
- 'spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb'
diff --git a/.rubocop_todo/rspec/missing_feature_category.yml b/.rubocop_todo/rspec/missing_feature_category.yml
index 3cc6a613303..c38fc19d762 100644
--- a/.rubocop_todo/rspec/missing_feature_category.yml
+++ b/.rubocop_todo/rspec/missing_feature_category.yml
@@ -6973,6 +6973,7 @@ RSpec/MissingFeatureCategory:
- 'spec/rubocop/cop/migration/migration_record_spec.rb'
- 'spec/rubocop/cop/migration/prevent_global_enable_lock_retries_with_disable_ddl_transaction_spec.rb'
- 'spec/rubocop/cop/migration/prevent_index_creation_spec.rb'
+ - 'spec/rubocop/cop/migration/prevent_single_statement_with_disable_ddl_transaction_spec.rb'
- 'spec/rubocop/cop/migration/prevent_strings_spec.rb'
- 'spec/rubocop/cop/migration/refer_to_index_by_name_spec.rb'
- 'spec/rubocop/cop/migration/remove_column_spec.rb'
diff --git a/GITALY_SERVER_VERSION b/GITALY_SERVER_VERSION
index ed96b8bb646..3dc57cea50f 100644
--- a/GITALY_SERVER_VERSION
+++ b/GITALY_SERVER_VERSION
@@ -1 +1 @@
-492447cb10b95301aea0e6410b09b11b5ea61a6b
+ac201ee33b2c7b2974945fb15ba9b1aec4794017
diff --git a/app/assets/javascripts/branches/branch_sort_dropdown.js b/app/assets/javascripts/branches/branch_sort_dropdown.js
index 9914ce05a95..9ea1331d563 100644
--- a/app/assets/javascripts/branches/branch_sort_dropdown.js
+++ b/app/assets/javascripts/branches/branch_sort_dropdown.js
@@ -1,8 +1,9 @@
import Vue from 'vue';
+import { parseBoolean } from '~/lib/utils/common_utils';
import SortDropdown from './components/sort_dropdown.vue';
const mountDropdownApp = (el) => {
- const { mode, projectBranchesFilteredPath, sortOptions } = el.dataset;
+ const { projectBranchesFilteredPath, sortOptions, showDropdown, sortedBy } = el.dataset;
return new Vue({
el,
@@ -11,9 +12,10 @@ const mountDropdownApp = (el) => {
SortDropdown,
},
provide: {
- mode,
projectBranchesFilteredPath,
sortOptions: JSON.parse(sortOptions),
+ showDropdown: parseBoolean(showDropdown),
+ sortedBy,
},
render: (createElement) => createElement(SortDropdown),
});
diff --git a/app/assets/javascripts/branches/components/sort_dropdown.vue b/app/assets/javascripts/branches/components/sort_dropdown.vue
index 263efcaa788..99c82fc9a5a 100644
--- a/app/assets/javascripts/branches/components/sort_dropdown.vue
+++ b/app/assets/javascripts/branches/components/sort_dropdown.vue
@@ -1,10 +1,8 @@
<script>
import { GlCollapsibleListbox, GlSearchBoxByClick } from '@gitlab/ui';
-import { mergeUrlParams, visitUrl, getParameterValues } from '~/lib/utils/url_utility';
+import { mergeUrlParams, visitUrl } from '~/lib/utils/url_utility';
import { s__ } from '~/locale';
-const OVERVIEW_MODE = 'overview';
-
export default {
i18n: {
searchPlaceholder: s__('Branches|Filter by branch name'),
@@ -13,17 +11,20 @@ export default {
GlCollapsibleListbox,
GlSearchBoxByClick,
},
- inject: ['projectBranchesFilteredPath', 'sortOptions', 'mode'],
+ // external parameters
+ inject: [
+ 'projectBranchesFilteredPath',
+ 'sortOptions', // dropdown choices (value, text) pairs
+ 'showDropdown', // if not set, only text filter is shown
+ 'sortedBy', // (required) value of choice to sort by
+ ],
+ // own attributes, also in created()
data() {
return {
- selectedKey: 'updated_desc',
searchTerm: '',
};
},
computed: {
- shouldShowDropdown() {
- return this.mode !== OVERVIEW_MODE;
- },
selectedSortMethodName() {
return this.sortOptions[this.selectedKey];
},
@@ -31,26 +32,16 @@ export default {
return Object.entries(this.sortOptions).map(([value, text]) => ({ value, text }));
},
},
+ // contructor or initialization function
created() {
- const sortValue = getParameterValues('sort');
- const searchValue = getParameterValues('search');
-
- if (sortValue.length > 0) {
- [this.selectedKey] = sortValue;
- }
-
- if (searchValue.length > 0) {
- [this.searchTerm] = searchValue;
- }
+ this.selectedKey = this.sortedBy;
},
methods: {
visitUrlFromOption(sortKey) {
this.selectedKey = sortKey;
const urlParams = {};
- if (this.mode !== OVERVIEW_MODE) {
- urlParams.sort = sortKey;
- }
+ urlParams.sort = sortKey;
urlParams.search = this.searchTerm.length > 0 ? this.searchTerm : null;
@@ -71,7 +62,7 @@ export default {
/>
<gl-collapsible-listbox
- v-if="shouldShowDropdown"
+ v-if="showDropdown"
v-model="selectedKey"
:items="listboxItems"
:toggle-text="selectedSortMethodName"
diff --git a/app/assets/javascripts/design_management/components/design_overlay.vue b/app/assets/javascripts/design_management/components/design_overlay.vue
index 674415ec449..4ce6395140e 100644
--- a/app/assets/javascripts/design_management/components/design_overlay.vue
+++ b/app/assets/javascripts/design_management/components/design_overlay.vue
@@ -262,6 +262,7 @@ export default {
<div
class="gl-absolute gl-top-0 gl-left-0 frame"
:style="overlayStyle"
+ data-testid="design-overlay"
@mousemove="onOverlayMousemove"
@mouseleave="onNoteMouseup"
>
@@ -287,6 +288,7 @@ export default {
:is-inactive="isNoteInactive(note)"
:is-resolved="note.resolved"
is-on-image
+ data-testid="note-pin"
@mousedown.stop="onNoteMousedown($event, note)"
@mouseup.stop="onNoteMouseup(note)"
/>
@@ -294,6 +296,7 @@ export default {
<design-note-pin
v-if="currentCommentForm"
:position="currentCommentPositionStyle"
+ data-testid="comment-badge"
@mousedown.stop="onNoteMousedown"
@mouseup.stop="onNoteMouseup"
/>
diff --git a/app/assets/javascripts/design_management/graphql.js b/app/assets/javascripts/design_management/graphql.js
index 8c44c5a5d0a..cef2d5e1a18 100644
--- a/app/assets/javascripts/design_management/graphql.js
+++ b/app/assets/javascripts/design_management/graphql.js
@@ -14,7 +14,7 @@ import { CREATE_DESIGN_TODO_EXISTS_ERROR } from './utils/error_messages';
Vue.use(VueApollo);
-const resolvers = {
+export const resolvers = {
Mutation: {
updateActiveDiscussion: (_, { id = null, source }, { cache }) => {
const sourceData = cache.readQuery({ query: activeDiscussionQuery });
diff --git a/app/assets/javascripts/pipelines/pipeline_details_bundle.js b/app/assets/javascripts/pipelines/pipeline_details_bundle.js
index f00378733fc..ba51347ad69 100644
--- a/app/assets/javascripts/pipelines/pipeline_details_bundle.js
+++ b/app/assets/javascripts/pipelines/pipeline_details_bundle.js
@@ -1,6 +1,7 @@
import VueRouter from 'vue-router';
import { createAlert } from '~/flash';
import { __ } from '~/locale';
+import { pipelineTabName } from './constants';
import { createPipelineHeaderApp } from './pipeline_details_header';
import { apolloProvider } from './pipeline_shared_client';
@@ -38,6 +39,12 @@ export default async function initPipelineDetailsBundle() {
routes,
});
+ // We handle the shortcut `pipelines/latest` by forwarding the user to the pipeline graph
+ // tab and changing the route to the correct `pipelines/:id`
+ if (window.location.pathname.endsWith('latest')) {
+ router.replace({ name: pipelineTabName });
+ }
+
try {
const appOptions = createAppOptions(SELECTORS.PIPELINE_TABS, apolloProvider, router);
createPipelineTabs(appOptions);
diff --git a/app/assets/javascripts/vue_shared/components/web_ide_link.vue b/app/assets/javascripts/vue_shared/components/web_ide_link.vue
index 98630512308..28bec63b244 100644
--- a/app/assets/javascripts/vue_shared/components/web_ide_link.vue
+++ b/app/assets/javascripts/vue_shared/components/web_ide_link.vue
@@ -423,6 +423,7 @@ export default {
target="_blank"
:href="webIdeUrl"
block
+ @click="dismissCalloutOnActionClicked(dismiss)"
>
{{ __('Try it out now') }}
</gl-link>
diff --git a/app/controllers/projects/branches_controller.rb b/app/controllers/projects/branches_controller.rb
index 7b01e4db42a..8ea9d23f1ea 100644
--- a/app/controllers/projects/branches_controller.rb
+++ b/app/controllers/projects/branches_controller.rb
@@ -20,7 +20,7 @@ class Projects::BranchesController < Projects::ApplicationController
respond_to do |format|
format.html do
@mode = params[:state].presence || 'overview'
- @sort = sort_value_for_mode
+ @sort = sort_param || default_sort
@overview_max_branches = 5
# Fetch branches for the specified mode
@@ -128,11 +128,7 @@ class Projects::BranchesController < Projects::ApplicationController
private
- def sort_value_for_mode
- custom_sort || default_sort
- end
-
- def custom_sort
+ def sort_param
sort = params[:sort].presence
unless sort.in?(supported_sort_options)
@@ -144,11 +140,11 @@ class Projects::BranchesController < Projects::ApplicationController
end
def default_sort
- 'stale' == @mode ? sort_value_oldest_updated : sort_value_recently_updated
+ 'stale' == @mode ? SORT_UPDATED_OLDEST : SORT_UPDATED_RECENT
end
def supported_sort_options
- [nil, sort_value_name, sort_value_oldest_updated, sort_value_recently_updated]
+ [nil, SORT_NAME, SORT_UPDATED_OLDEST, SORT_UPDATED_RECENT]
end
# It can be expensive to calculate the diverging counts for each
@@ -206,10 +202,10 @@ class Projects::BranchesController < Projects::ApplicationController
limit = @overview_max_branches + 1
@active_branches =
- BranchesFinder.new(@repository, { per_page: limit, sort: sort_value_recently_updated })
+ BranchesFinder.new(@repository, { per_page: limit, sort: SORT_UPDATED_RECENT })
.execute(gitaly_pagination: true).select(&:active?)
@stale_branches =
- BranchesFinder.new(@repository, { per_page: limit, sort: sort_value_oldest_updated })
+ BranchesFinder.new(@repository, { per_page: limit, sort: SORT_UPDATED_OLDEST })
.execute(gitaly_pagination: true).select(&:stale?)
@branches = @active_branches + @stale_branches
diff --git a/app/finders/todos_finder.rb b/app/finders/todos_finder.rb
index 0bf31ea33dd..2bb233e1906 100644
--- a/app/finders/todos_finder.rb
+++ b/app/finders/todos_finder.rb
@@ -24,7 +24,7 @@ class TodosFinder
NONE = '0'
- TODO_TYPES = Set.new(%w[Issue WorkItem MergeRequest DesignManagement::Design AlertManagement::Alert]).freeze
+ TODO_TYPES = Set.new(%w[Issue WorkItem MergeRequest DesignManagement::Design AlertManagement::Alert Namespace Project]).freeze
attr_accessor :current_user, :params
diff --git a/app/graphql/mutations/issues/bulk_update.rb b/app/graphql/mutations/issues/bulk_update.rb
new file mode 100644
index 00000000000..5d72a9372c9
--- /dev/null
+++ b/app/graphql/mutations/issues/bulk_update.rb
@@ -0,0 +1,84 @@
+# frozen_string_literal: true
+
+module Mutations
+ module Issues
+ class BulkUpdate < BaseMutation
+ graphql_name 'IssuesBulkUpdate'
+
+ include Gitlab::Graphql::Authorize::AuthorizeResource
+
+ MAX_ISSUES = 100
+
+ description 'Allows updating several properties for a set of issues. ' \
+ 'Does nothing if the `bulk_update_issues_mutation` feature flag is disabled.'
+
+ argument :parent_id, ::Types::GlobalIDType[::IssueParent],
+ required: true,
+ description: 'Global ID of the parent that the bulk update will be scoped to . ' \
+ 'Example `IssueParentID` are `"gid://gitlab/Project/1"` and `"gid://gitlab/Group/1"`.'
+
+ argument :ids, [::Types::GlobalIDType[::Issue]],
+ required: true,
+ description: 'Global ID array of the issues that will be updated. ' \
+ "IDs that the user can\'t update will be ignored. A max of #{MAX_ISSUES} can be provided."
+
+ argument :assignee_ids, [::Types::GlobalIDType[::User]],
+ required: false,
+ description: 'Global ID array of the users that will be assigned to the given issues. ' \
+ 'Existing assignees will be replaced with the ones on this list.'
+
+ field :updated_issue_count, GraphQL::Types::Int,
+ null: true,
+ description: 'Number of issues that were successfully updated.'
+
+ def ready?(**args)
+ if Feature.disabled?(:bulk_update_issues_mutation)
+ raise Gitlab::Graphql::Errors::ResourceNotAvailable, '`bulk_update_issues_mutation` feature flag is disabled.'
+ end
+
+ if args[:ids].size > MAX_ISSUES
+ raise Gitlab::Graphql::Errors::ArgumentError,
+ format(_('No more than %{max_issues} issues can be updated at the same time'), max_issues: MAX_ISSUES)
+ end
+
+ super
+ end
+
+ def resolve(ids:, parent_id:, **attributes)
+ parent = find_parent!(parent_id)
+
+ result = Issuable::BulkUpdateService.new(
+ parent,
+ current_user,
+ prepared_params(attributes, ids)
+ ).execute('issue')
+
+ if result.success?
+ { updated_issue_count: result.payload[:count], errors: [] }
+ else
+ { errors: result.errors }
+ end
+ end
+
+ private
+
+ def find_parent!(parent_id)
+ parent = GitlabSchema.find_by_gid(parent_id).sync
+ raise_resource_not_available_error! unless current_user.can?("read_#{parent.to_ability_name}", parent)
+
+ parent
+ end
+
+ def prepared_params(attributes, ids)
+ prepared = { issuable_ids: model_ids_from(ids).uniq }
+ prepared[:assignee_ids] = model_ids_from(attributes[:assignee_ids]) if attributes[:assignee_ids]
+
+ prepared
+ end
+
+ def model_ids_from(attributes)
+ attributes.map(&:model_id)
+ end
+ end
+ end
+end
diff --git a/app/graphql/resolvers/group_releases_resolver.rb b/app/graphql/resolvers/group_releases_resolver.rb
new file mode 100644
index 00000000000..115289e1fca
--- /dev/null
+++ b/app/graphql/resolvers/group_releases_resolver.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+module Resolvers
+ class GroupReleasesResolver < BaseResolver
+ type Types::ReleaseType.connection_type, null: true
+
+ argument :sort, Types::GroupReleaseSortEnum,
+ required: false, default_value: :released_at_desc,
+ description: 'Sort group releases by given criteria.'
+
+ alias_method :group, :object
+
+ # GroupReleasesFinder only supports sorting by `released_at`
+ SORT_TO_PARAMS_MAP = {
+ released_at_desc: { sort: 'desc' },
+ released_at_asc: { sort: 'asc' }
+ }.freeze
+
+ def resolve(sort:)
+ releases = Releases::GroupReleasesFinder.new(
+ group,
+ current_user,
+ SORT_TO_PARAMS_MAP[sort]
+ ).execute
+ # fix ordering problem with GroupReleasesFinder and keyset pagination
+ # See more on https://gitlab.com/gitlab-org/gitlab/-/issues/378160
+ offset_pagination(releases)
+ end
+ end
+end
diff --git a/app/graphql/resolvers/releases_resolver.rb b/app/graphql/resolvers/releases_resolver.rb
index 358f3c33836..06f4ca2065c 100644
--- a/app/graphql/resolvers/releases_resolver.rb
+++ b/app/graphql/resolvers/releases_resolver.rb
@@ -6,7 +6,7 @@ module Resolvers
argument :sort, Types::ReleaseSortEnum,
required: false, default_value: :released_at_desc,
- description: 'Sort releases by this criteria.'
+ description: 'Sort releases by given criteria.'
alias_method :project, :object
diff --git a/app/graphql/types/group_release_sort_enum.rb b/app/graphql/types/group_release_sort_enum.rb
new file mode 100644
index 00000000000..7420e7a31ad
--- /dev/null
+++ b/app/graphql/types/group_release_sort_enum.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+module Types
+ # Not inheriting from Types::SortEnum since we only want
+ # to implement a subset of the sort values it defines.
+ class GroupReleaseSortEnum < BaseEnum
+ graphql_name 'GroupReleaseSort'
+ description 'Values for sorting releases belonging to a group'
+
+ # Borrowed from Types::ReleaseSortEnum and Types::SortEnum
+ # These values/descriptions should stay in-sync as much as possible.
+ value 'RELEASED_AT_DESC', 'Released at by descending order.', value: :released_at_desc
+ value 'RELEASED_AT_ASC', 'Released at by ascending order.', value: :released_at_asc
+ end
+end
diff --git a/app/graphql/types/group_type.rb b/app/graphql/types/group_type.rb
index dcf56f961ce..ad149d039fa 100644
--- a/app/graphql/types/group_type.rb
+++ b/app/graphql/types/group_type.rb
@@ -233,6 +233,12 @@ module Types
resolver: Resolvers::WorkItems::TypesResolver,
description: 'Work item types available to the group.'
+ field :releases,
+ Types::ReleaseType.connection_type,
+ null: true,
+ description: 'Releases belonging to projects in the group.',
+ resolver: Resolvers::GroupReleasesResolver
+
def label(title:)
BatchLoader::GraphQL.for(title).batch(key: group) do |titles, loader, args|
LabelsFinder
diff --git a/app/graphql/types/mutation_type.rb b/app/graphql/types/mutation_type.rb
index 0d7cda5ec5f..e48e9deae96 100644
--- a/app/graphql/types/mutation_type.rb
+++ b/app/graphql/types/mutation_type.rb
@@ -66,6 +66,7 @@ module Types
mount_mutation Mutations::Issues::Move
mount_mutation Mutations::Issues::LinkAlerts
mount_mutation Mutations::Issues::UnlinkAlert
+ mount_mutation Mutations::Issues::BulkUpdate, alpha: { milestone: '15.9' }
mount_mutation Mutations::Labels::Create
mount_mutation Mutations::Members::Groups::BulkUpdate
mount_mutation Mutations::MergeRequests::Accept
diff --git a/app/helpers/sorting_titles_values_helper.rb b/app/helpers/sorting_titles_values_helper.rb
index b49cb617d80..db779a9d8c1 100644
--- a/app/helpers/sorting_titles_values_helper.rb
+++ b/app/helpers/sorting_titles_values_helper.rb
@@ -1,6 +1,10 @@
# frozen_string_literal: true
module SortingTitlesValuesHelper
+ SORT_NAME = 'name_asc'
+ SORT_UPDATED_OLDEST = 'updated_asc'
+ SORT_UPDATED_RECENT = 'updated_desc'
+
# Titles.
def sort_title_created_date
s_('SortOptions|Created date')
diff --git a/app/helpers/tab_helper.rb b/app/helpers/tab_helper.rb
index 04619ad3bda..137b24102e0 100644
--- a/app/helpers/tab_helper.rb
+++ b/app/helpers/tab_helper.rb
@@ -9,7 +9,7 @@ module TabHelper
# the <gl-tabs/> component. Can be populated by
# gl_tab_link_to elements.
#
- # See more at: https://gitlab-org.gitlab.io/gitlab-ui/?path=/story/base-tabs-tab--default
+ # See more at: https://gitlab-org.gitlab.io/gitlab-ui/?path=/story/base-tabs--default
def gl_tabs_nav(html_options = {}, &block)
gl_tabs_classes = %w[nav gl-tabs-nav]
diff --git a/app/models/concerns/issue_parent.rb b/app/models/concerns/issue_parent.rb
new file mode 100644
index 00000000000..c1fcbdcfc12
--- /dev/null
+++ b/app/models/concerns/issue_parent.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+# == IssuParent
+#
+# Used as a common ancestor for Group and Project so we can allow a polymorphic
+# Types::GlobalIDType[::IssueParent] in the GraphQL API
+#
+# Used by Project, Group
+#
+module IssueParent
+end
diff --git a/app/models/group.rb b/app/models/group.rb
index a2d984973bd..67800306606 100644
--- a/app/models/group.rb
+++ b/app/models/group.rb
@@ -21,6 +21,7 @@ class Group < Namespace
include ChronicDurationAttribute
include RunnerTokenExpirationInterval
include Todoable
+ include IssueParent
extend ::Gitlab::Utils::Override
diff --git a/app/models/namespaces/traversal/linear.rb b/app/models/namespaces/traversal/linear.rb
index 16a9c20dfdc..0e9760832af 100644
--- a/app/models/namespaces/traversal/linear.rb
+++ b/app/models/namespaces/traversal/linear.rb
@@ -47,6 +47,9 @@ module Namespaces
# This uses rails internal before_commit API to sync traversal_ids on namespace create, right before transaction is committed.
# This helps reduce the time during which the root namespace record is locked to ensure updated traversal_ids are valid
before_commit :sync_traversal_ids, on: [:create]
+ after_commit :set_traversal_ids,
+ if: -> { traversal_ids.empty? || saved_change_to_parent_id? },
+ on: [:create, :update]
define_model_callbacks :sync_traversal_ids
end
@@ -78,6 +81,15 @@ module Namespaces
end
end
+ def traversal_ids=(ids)
+ super(ids)
+ self.transient_traversal_ids = nil
+ end
+
+ def traversal_ids
+ read_attribute(:traversal_ids).presence || transient_traversal_ids || []
+ end
+
def use_traversal_ids?
return false unless Feature.enabled?(:use_traversal_ids)
@@ -174,12 +186,11 @@ module Namespaces
# we need to preserve those specific parameters for super.
hierarchy_order ||= :desc
- # Get all ancestor IDs inclusively between top and our parent.
- top_index = top ? traversal_ids.find_index(top.id) : 0
- ids = traversal_ids[top_index...-1]
- ids_string = ids.map { |id| Integer(id) }.join(',')
+ top_index = ancestors_upto_top_index(top)
+ ids = traversal_ids[top_index...-1].reverse
# WITH ORDINALITY lets us order the result to match traversal_ids order.
+ ids_string = ids.map { |id| Integer(id) }.join(',')
from_sql = <<~SQL
unnest(ARRAY[#{ids_string}]::bigint[]) WITH ORDINALITY AS ancestors(id, ord)
INNER JOIN namespaces ON namespaces.id = ancestors.id
@@ -206,6 +217,8 @@ module Namespaces
private
+ attr_accessor :transient_traversal_ids
+
# Update the traversal_ids for the full hierarchy.
#
# NOTE: self.traversal_ids will be stale. Reload for a fresh record.
@@ -218,6 +231,27 @@ module Namespaces
end
end
+ def set_traversal_ids
+ # This is a temporary guard and will be removed.
+ return if is_a?(Namespaces::ProjectNamespace)
+
+ return unless Feature.enabled?(:set_traversal_ids_on_save, root_ancestor)
+
+ self.transient_traversal_ids = if parent_id
+ parent.traversal_ids + [id]
+ else
+ [id]
+ end
+
+ # Clear root_ancestor memo if changed.
+ if read_attribute(traversal_ids)&.first != transient_traversal_ids.first
+ clear_memoization(:root_ancestor)
+ end
+
+ # Update traversal_ids for any associated child objects.
+ children.each(&:reload) if children.loaded?
+ end
+
# Lock the root of the hierarchy we just left, and lock the root of the hierarchy
# we just joined. In most cases the two hierarchies will be the same.
def lock_both_roots
@@ -266,6 +300,17 @@ module Namespaces
skope
end
+
+ def ancestors_upto_top_index(top)
+ return 0 if top.nil?
+
+ index = traversal_ids.find_index(top.id)
+ if index.nil?
+ 0
+ else
+ index + 1
+ end
+ end
end
end
end
diff --git a/app/models/project.rb b/app/models/project.rb
index a4fa20b227e..1941397b846 100644
--- a/app/models/project.rb
+++ b/app/models/project.rb
@@ -40,6 +40,7 @@ class Project < ApplicationRecord
include RunnerTokenExpirationInterval
include BlocksUnsafeSerialization
include Subquery
+ include IssueParent
extend Gitlab::Cache::RequestCache
extend Gitlab::Utils::Override
diff --git a/app/models/work_item.rb b/app/models/work_item.rb
index be090b09425..ec2071b663f 100644
--- a/app/models/work_item.rb
+++ b/app/models/work_item.rb
@@ -22,6 +22,8 @@ class WorkItem < Issue
scope :inc_relations_for_permission_check, -> { includes(:author, project: :project_feature) }
+ delegate :supports_assignee?, to: :work_item_type
+
class << self
def assignee_association_name
'issue'
@@ -57,7 +59,7 @@ class WorkItem < Issue
end
def supported_quick_action_commands
- commands_for_widgets = widgets.map(&:class).flat_map(&:quick_action_commands).uniq
+ commands_for_widgets = work_item_type.widgets.flat_map(&:quick_action_commands).uniq
COMMON_QUICK_ACTIONS_COMMANDS + commands_for_widgets
end
diff --git a/app/models/work_items/type.rb b/app/models/work_items/type.rb
index e1f6a13f7a7..9b434ef946c 100644
--- a/app/models/work_items/type.rb
+++ b/app/models/work_items/type.rb
@@ -141,6 +141,10 @@ module WorkItems
WIDGETS_FOR_TYPE[base_type.to_sym]
end
+ def supports_assignee?
+ widgets.include? ::WorkItems::Widgets::Assignees
+ end
+
private
def strip_whitespace
diff --git a/app/serializers/build_details_entity.rb b/app/serializers/build_details_entity.rb
index 1caa9720c08..9b21fc57b9e 100644
--- a/app/serializers/build_details_entity.rb
+++ b/app/serializers/build_details_entity.rb
@@ -4,7 +4,7 @@ class BuildDetailsEntity < Ci::JobEntity
expose :coverage, :erased_at, :finished_at, :duration
expose :tag_list, as: :tags
expose :has_trace?, as: :has_trace
- expose :stage
+ expose :stage_name, as: :stage
expose :stuck?, as: :stuck
expose :user, using: UserEntity
expose :runner, using: RunnerEntity
diff --git a/app/services/issuable/bulk_update_service.rb b/app/services/issuable/bulk_update_service.rb
index 30444fa3938..48bfa71ee92 100644
--- a/app/services/issuable/bulk_update_service.rb
+++ b/app/services/issuable/bulk_update_service.rb
@@ -13,9 +13,9 @@ module Issuable
end
def execute(type)
- ids = params.delete(:issuable_ids).split(",")
+ model_ids = ids_from_params(params.delete(:issuable_ids))
set_update_params(type)
- updated_issuables = update_issuables(type, ids)
+ updated_issuables = update_issuables(type, model_ids)
if updated_issuables.present? && requires_count_cache_reset?(type)
schedule_group_issues_count_reset(updated_issuables)
@@ -28,9 +28,15 @@ module Issuable
private
+ def ids_from_params(issuable_ids)
+ return issuable_ids if issuable_ids.is_a?(Array)
+
+ issuable_ids.split(',')
+ end
+
def set_update_params(type)
params.slice!(*permitted_attrs(type))
- params.delete_if { |k, v| v.blank? }
+ params.delete_if { |_, v| !v.is_a?(Array) && v.blank? }
if params[:assignee_ids] == [IssuableFinder::Params::NONE.to_s]
params[:assignee_ids] = []
@@ -53,10 +59,12 @@ module Issuable
model_class = type.classify.constantize
update_class = type.classify.pluralize.constantize::UpdateService
items = find_issuables(parent, model_class, ids)
+ authorized_issuables = []
items.each do |issuable|
next unless can?(current_user, :"update_#{type}", issuable)
+ authorized_issuables << issuable
update_class.new(
**update_class.constructor_container_arg(issuable.issuing_parent),
current_user: current_user,
@@ -64,23 +72,22 @@ module Issuable
).execute(issuable)
end
- items
+ authorized_issuables
end
def find_issuables(parent, model_class, ids)
+ issuables = model_class.id_in(ids)
+
case parent
when Project
- projects = parent
+ issuables = issuables.of_projects(parent)
when Group
- projects = parent.all_projects
+ issuables = issuables.of_projects(parent.all_projects)
else
- return
+ raise ArgumentError, _('A parent must be provided when bulk updating issuables')
end
- model_class
- .id_in(ids)
- .of_projects(projects)
- .includes_for_bulk_update
+ issuables.includes_for_bulk_update
end
# Duplicates params and its top-level values
diff --git a/app/services/members/approve_access_request_service.rb b/app/services/members/approve_access_request_service.rb
index 5e73d7a957b..20f96ac2949 100644
--- a/app/services/members/approve_access_request_service.rb
+++ b/app/services/members/approve_access_request_service.rb
@@ -15,6 +15,12 @@ module Members
private
+ def after_execute(member:, skip_log_audit_event:)
+ super
+
+ resolve_access_request_todos(current_user, member)
+ end
+
def validate_access!(access_requester)
raise Gitlab::Access::AccessDeniedError unless can_approve_access_requester?(access_requester)
diff --git a/app/services/members/base_service.rb b/app/services/members/base_service.rb
index 62b8fc5d6f7..801f77ae082 100644
--- a/app/services/members/base_service.rb
+++ b/app/services/members/base_service.rb
@@ -53,6 +53,10 @@ module Members
end
end
+ def resolve_access_request_todos(current_user, requester)
+ todo_service.resolve_access_request_todos(current_user, requester)
+ end
+
def enqueue_delete_todos(member)
type = member.is_a?(GroupMember) ? 'Group' : 'Project'
# don't enqueue immediately to prevent todos removal in case of a mistake
diff --git a/app/services/members/destroy_service.rb b/app/services/members/destroy_service.rb
index 24c5b12b335..dd84b890385 100644
--- a/app/services/members/destroy_service.rb
+++ b/app/services/members/destroy_service.rb
@@ -63,6 +63,7 @@ module Members
delete_subresources(member) unless skip_subresources
delete_project_invitations_by(member) unless skip_subresources
+ resolve_access_request_todos(current_user, member)
enqueue_delete_todos(member)
enqueue_unassign_issuables(member) if unassign_issuables
diff --git a/app/services/preview_markdown_service.rb b/app/services/preview_markdown_service.rb
index 03844c2dc7e..b3a9beabba5 100644
--- a/app/services/preview_markdown_service.rb
+++ b/app/services/preview_markdown_service.rb
@@ -17,7 +17,7 @@ class PreviewMarkdownService < BaseService
private
def quick_action_types
- %w(Issue MergeRequest Commit)
+ %w(Issue MergeRequest Commit WorkItem)
end
def explain_quick_actions(text)
diff --git a/app/services/quick_actions/target_service.rb b/app/services/quick_actions/target_service.rb
index 9c228c27523..04ae5287302 100644
--- a/app/services/quick_actions/target_service.rb
+++ b/app/services/quick_actions/target_service.rb
@@ -4,6 +4,8 @@ module QuickActions
class TargetService < BaseService
def execute(type, type_iid)
case type&.downcase
+ when 'workitem'
+ work_item(type_iid)
when 'issue'
issue(type_iid)
when 'mergerequest'
@@ -16,6 +18,12 @@ module QuickActions
private
# rubocop: disable CodeReuse/ActiveRecord
+ def work_item(type_iid)
+ WorkItems::WorkItemsFinder.new(current_user, project_id: project.id).find_by(iid: type_iid)
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ # rubocop: disable CodeReuse/ActiveRecord
def issue(type_iid)
return project.issues.build if type_iid.nil?
diff --git a/app/services/todo_service.rb b/app/services/todo_service.rb
index bfd1e55507c..42a8aca17d3 100644
--- a/app/services/todo_service.rb
+++ b/app/services/todo_service.rb
@@ -198,6 +198,23 @@ class TodoService
current_user.update_todos_count_cache
end
+ def resolve_access_request_todos(current_user, member)
+ return if current_user.nil? || member.nil?
+
+ target = member.source
+
+ finder_params = {
+ state: :pending,
+ author_id: member.user_id,
+ action_id: ::Todo::MEMBER_ACCESS_REQUESTED,
+ type: target.class.polymorphic_name,
+ target: target.id
+ }
+
+ todos = TodosFinder.new(current_user, finder_params).execute
+ resolve_todos(todos, current_user)
+ end
+
def restore_todos(todos, current_user)
todos_ids = todos.batch_update(state: :pending)
diff --git a/app/views/admin/broadcast_messages/edit.html.haml b/app/views/admin/broadcast_messages/edit.html.haml
index bef435c07f7..212cc437d3d 100644
--- a/app/views/admin/broadcast_messages/edit.html.haml
+++ b/app/views/admin/broadcast_messages/edit.html.haml
@@ -1,4 +1,5 @@
-- breadcrumb_title _("Messages")
+- add_to_breadcrumbs _("Messages"), admin_broadcast_messages_path
+- breadcrumb_title @broadcast_message.id
- page_title _("Broadcast Messages")
#js-broadcast-message{ data: {
diff --git a/app/views/projects/branches/index.html.haml b/app/views/projects/branches/index.html.haml
index 475bc9e1c20..f43d19e2542 100644
--- a/app/views/projects/branches/index.html.haml
+++ b/app/views/projects/branches/index.html.haml
@@ -2,6 +2,11 @@
- page_title _('Branches')
- add_to_breadcrumbs(_('Repository'), project_tree_path(@project))
+-# Possible values for variables passed down from the projects/branches_controller.rb
+-#
+-# @mode - overview|active|stale|all (default:overview)
+-# @sort - name_asc|updated_asc|updated_desc
+
.top-area.gl-border-0
= gl_tabs_nav({ class: 'gl-flex-grow-1 gl-border-b-0' }) do
= gl_tab_link_to s_('Branches|Overview'), project_branches_path(@project), { item_active: @mode == 'overview', title: s_('Branches|Show overview of the branches') }
@@ -10,7 +15,12 @@
= gl_tab_link_to s_('Branches|All'), project_branches_filtered_path(@project, state: 'all'), { item_active: %w[overview active stale].exclude?(@mode), title: s_('Branches|Show all branches') }
.nav-controls
- #js-branches-sort-dropdown{ data: { project_branches_filtered_path: project_branches_path(@project, state: 'all'), sort_options: branches_sort_options_hash.to_json, mode: @mode } }
+ #js-branches-sort-dropdown{ data: {
+ project_branches_filtered_path: project_branches_path(@project, state: 'all'),
+ sort_options: branches_sort_options_hash.to_json,
+ show_dropdown: @mode == 'overview' ? 'false' : 'true',
+ sorted_by: @sort }
+ }
- if can? current_user, :push_code, @project
.js-delete-merged-branches{ data: {
diff --git a/config/feature_flags/development/bulk_update_issues_mutation.yml b/config/feature_flags/development/bulk_update_issues_mutation.yml
new file mode 100644
index 00000000000..72de356f019
--- /dev/null
+++ b/config/feature_flags/development/bulk_update_issues_mutation.yml
@@ -0,0 +1,8 @@
+---
+name: bulk_update_issues_mutation
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/108505
+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/388472
+milestone: '15.9'
+type: development
+group: group::project management
+default_enabled: false
diff --git a/config/feature_flags/development/set_traversal_ids_on_save.yml b/config/feature_flags/development/set_traversal_ids_on_save.yml
new file mode 100644
index 00000000000..ea07dafd9e4
--- /dev/null
+++ b/config/feature_flags/development/set_traversal_ids_on_save.yml
@@ -0,0 +1,8 @@
+---
+name: set_traversal_ids_on_save
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/104328
+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/383217
+milestone: '15.8'
+type: development
+group: group::organization
+default_enabled: false
diff --git a/db/post_migrate/20230119095024_validate_fk_on_ci_builds_runner_session_partition_id_and_build_id.rb b/db/post_migrate/20230119095024_validate_fk_on_ci_builds_runner_session_partition_id_and_build_id.rb
index 4c332a1db81..07402b749ea 100644
--- a/db/post_migrate/20230119095024_validate_fk_on_ci_builds_runner_session_partition_id_and_build_id.rb
+++ b/db/post_migrate/20230119095024_validate_fk_on_ci_builds_runner_session_partition_id_and_build_id.rb
@@ -1,8 +1,6 @@
# frozen_string_literal: true
class ValidateFkOnCiBuildsRunnerSessionPartitionIdAndBuildId < Gitlab::Database::Migration[2.1]
- disable_ddl_transaction!
-
TABLE_NAME = :ci_builds_runner_session
FK_NAME = :fk_rails_70707857d3_p
COLUMNS = [:partition_id, :build_id]
diff --git a/doc/api/draft_notes.md b/doc/api/draft_notes.md
index 423ba2bb31d..f1749346dc2 100644
--- a/doc/api/draft_notes.md
+++ b/doc/api/draft_notes.md
@@ -22,7 +22,7 @@ GET /projects/:id/merge_requests/:merge_request_iid/draft_notes
| `merge_request_iid` | integer | yes | The IID of a project merge request
```json
-{
+[{
id: 5,
author_id: 23,
merge_request_id: 11,
@@ -43,9 +43,10 @@ GET /projects/:id/merge_requests/:merge_request_iid/draft_notes
new_line: nil,
line_range: nil
}
-}
+}]
```
```shell
-curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/5/merge_requests/11/draft_notes"
+curl --header "PRIVATE-TOKEN: <your_access_token>" \
+ "https://gitlab.example.com/api/v4/projects/14/merge_requests/11/draft_notes"
```
diff --git a/doc/api/graphql/reference/index.md b/doc/api/graphql/reference/index.md
index e82b5c71a9b..1dddd44d030 100644
--- a/doc/api/graphql/reference/index.md
+++ b/doc/api/graphql/reference/index.md
@@ -3563,6 +3563,33 @@ Input type: `IssueUnlinkAlertInput`
| <a id="mutationissueunlinkalerterrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
| <a id="mutationissueunlinkalertissue"></a>`issue` | [`Issue`](#issue) | Issue after mutation. |
+### `Mutation.issuesBulkUpdate`
+
+Allows updating several properties for a set of issues. Does nothing if the `bulk_update_issues_mutation` feature flag is disabled.
+
+WARNING:
+**Introduced** in 15.9.
+This feature is in Alpha. It can be changed or removed at any time.
+
+Input type: `IssuesBulkUpdateInput`
+
+#### Arguments
+
+| Name | Type | Description |
+| ---- | ---- | ----------- |
+| <a id="mutationissuesbulkupdateassigneeids"></a>`assigneeIds` | [`[UserID!]`](#userid) | Global ID array of the users that will be assigned to the given issues. Existing assignees will be replaced with the ones on this list. |
+| <a id="mutationissuesbulkupdateclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
+| <a id="mutationissuesbulkupdateids"></a>`ids` | [`[IssueID!]!`](#issueid) | Global ID array of the issues that will be updated. IDs that the user can't update will be ignored. A max of 100 can be provided. |
+| <a id="mutationissuesbulkupdateparentid"></a>`parentId` | [`IssueParentID!`](#issueparentid) | Global ID of the parent that the bulk update will be scoped to . Example `IssueParentID` are `"gid://gitlab/Project/1"` and `"gid://gitlab/Group/1"`. |
+
+#### Fields
+
+| Name | Type | Description |
+| ---- | ---- | ----------- |
+| <a id="mutationissuesbulkupdateclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
+| <a id="mutationissuesbulkupdateerrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
+| <a id="mutationissuesbulkupdateupdatedissuecount"></a>`updatedIssueCount` | [`Int`](#int) | Number of issues that were successfully updated. |
+
### `Mutation.iterationCadenceCreate`
Input type: `IterationCadenceCreateInput`
@@ -14421,6 +14448,22 @@ four standard [pagination arguments](#connection-pagination-arguments):
| <a id="groupprojectswithissuesenabled"></a>`withIssuesEnabled` | [`Boolean`](#boolean) | Return only projects with issues enabled. |
| <a id="groupprojectswithmergerequestsenabled"></a>`withMergeRequestsEnabled` | [`Boolean`](#boolean) | Return only projects with merge requests enabled. |
+##### `Group.releases`
+
+Releases belonging to projects in the group.
+
+Returns [`ReleaseConnection`](#releaseconnection).
+
+This field returns a [connection](#connections). It accepts the
+four standard [pagination arguments](#connection-pagination-arguments):
+`before: String`, `after: String`, `first: Int`, `last: Int`.
+
+###### Arguments
+
+| Name | Type | Description |
+| ---- | ---- | ----------- |
+| <a id="groupreleasessort"></a>`sort` | [`GroupReleaseSort`](#groupreleasesort) | Sort group releases by given criteria. |
+
##### `Group.runners`
Find runners visible to the current user.
@@ -18504,7 +18547,7 @@ four standard [pagination arguments](#connection-pagination-arguments):
| Name | Type | Description |
| ---- | ---- | ----------- |
-| <a id="projectreleasessort"></a>`sort` | [`ReleaseSort`](#releasesort) | Sort releases by this criteria. |
+| <a id="projectreleasessort"></a>`sort` | [`ReleaseSort`](#releasesort) | Sort releases by given criteria. |
##### `Project.requirement`
@@ -22384,6 +22427,15 @@ User permission on groups.
| <a id="grouppermissioncreate_projects"></a>`CREATE_PROJECTS` | Groups where the user can create projects. |
| <a id="grouppermissiontransfer_projects"></a>`TRANSFER_PROJECTS` | Groups where the user can transfer projects to. |
+### `GroupReleaseSort`
+
+Values for sorting releases belonging to a group.
+
+| Value | Description |
+| ----- | ----------- |
+| <a id="groupreleasesortreleased_at_asc"></a>`RELEASED_AT_ASC` | Released at by ascending order. |
+| <a id="groupreleasesortreleased_at_desc"></a>`RELEASED_AT_DESC` | Released at by descending order. |
+
### `HealthStatus`
Health status of an issue or epic.
@@ -24057,6 +24109,12 @@ A `IssueID` is a global ID. It is encoded as a string.
An example `IssueID` is: `"gid://gitlab/Issue/1"`.
+### `IssueParentID`
+
+A `IssueParentID` is a global ID. It is encoded as a string.
+
+An example `IssueParentID` is: `"gid://gitlab/IssueParent/1"`.
+
### `IterationID`
A `IterationID` is a global ID. It is encoded as a string.
diff --git a/doc/ci/environments/deployment_approvals.md b/doc/ci/environments/deployment_approvals.md
index a95c47ca4b0..50c49079ce7 100644
--- a/doc/ci/environments/deployment_approvals.md
+++ b/doc/ci/environments/deployment_approvals.md
@@ -139,6 +139,9 @@ Using either the GitLab UI or the API, you can:
- Approve a deployment to allow it to proceed.
- Reject a deployment to prevent it.
+NOTE:
+GitLab administrators can approve or reject all deployments.
+
### Approve or reject a deployment using the UI
Prerequisites:
diff --git a/doc/ci/environments/index.md b/doc/ci/environments/index.md
index 522b8d0af79..657e79267ff 100644
--- a/doc/ci/environments/index.md
+++ b/doc/ci/environments/index.md
@@ -699,14 +699,16 @@ You can manually override a deployment's expiration date.
The `auto_stop_in` setting is overwritten and the environment remains active until it's stopped manually.
-#### Delete a stopped environment
+### Delete an environment
-> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/20620) in GitLab 12.10.
+Delete an environment when you want to remove it and all its deployments.
-You can delete [stopped environments](#stop-an-environment) in the GitLab UI or by using
-[the API](../../api/environments.md#delete-an-environment).
+Prerequisites:
+
+- You must have at least the Developer role.
+- You must [stop](#stop-an-environment) the environment before it can be deleted.
-To delete a stopped environment in the GitLab UI:
+To delete an environment:
1. On the top bar, select **Main menu > Projects** and find your project.
1. On the left sidebar, select **Deployments > Environments**.
@@ -714,18 +716,6 @@ To delete a stopped environment in the GitLab UI:
1. Next to the environment you want to delete, select **Delete environment**.
1. On the confirmation dialog box, select **Delete environment**.
-#### Delete an active environment without running a stop job
-
-> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/225794) in GitLab 15.1.
-
-You can delete an active environment without running a stop job.
-This is useful when you have an active environment, but the corresponding `action: stop` job can't run or succeed for some reason.
-
-To delete an active environment:
-
-1. Execute the [Stop an environment API](../../api/environments.md#stop-an-environment) while specifying `force=true`.
-1. Execute the [Delete an environment API](../../api/environments.md#delete-an-environment).
-
### Access an environment for preparation or verification purposes
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/208655) in GitLab 13.2.
diff --git a/doc/ci/environments/protected_environments.md b/doc/ci/environments/protected_environments.md
index 8e8cee302bc..fc49be798ec 100644
--- a/doc/ci/environments/protected_environments.md
+++ b/doc/ci/environments/protected_environments.md
@@ -125,7 +125,7 @@ If the user also has push or merge access to the branch deployed on production,
they have the following privileges:
- [Stop an environment](index.md#stop-an-environment).
-- [Delete a stopped environment](index.md#delete-a-stopped-environment).
+- [Delete an environment](index.md#delete-an-environment).
- [Create an environment terminal](index.md#web-terminals-deprecated).
## Deployment-only access to protected environments
diff --git a/doc/development/architecture.md b/doc/development/architecture.md
index 8a0b5d5607b..2bb5ab6bca6 100644
--- a/doc/development/architecture.md
+++ b/doc/development/architecture.md
@@ -34,7 +34,7 @@ Kubernetes platform. The largest known GitLab instance is on GitLab.com, which i
[official GitLab Helm chart](https://docs.gitlab.com/charts/) and the [official Linux package](https://about.gitlab.com/install/).
A typical installation uses NGINX or Apache as a web server to proxy through
-[GitLab Workhorse](https://gitlab.com/gitlab-org/gitlab-workhorse) and into the [Puma](https://puma.io)
+[GitLab Workhorse](https://gitlab.com/gitlab-org/gitlab/tree/master/workhorse) and into the [Puma](https://puma.io)
application server. GitLab serves web pages and the [GitLab API](../api/rest/index.md) using the Puma
application server. It uses Sidekiq as a job queue which, in turn, uses Redis as a non-persistent
database backend for job information, metadata, and incoming jobs.
diff --git a/doc/development/code_review.md b/doc/development/code_review.md
index 398f0c17bac..ee85c34b54d 100644
--- a/doc/development/code_review.md
+++ b/doc/development/code_review.md
@@ -290,6 +290,23 @@ warrant a comment could be:
If there are any projects, snippets, or other assets that are required for a reviewer to validate the solution, ensure they have access to those assets before requesting review.
+When assigning reviewers, it can be helpful to:
+
+- Add a comment to the MR indicating which *type* of review you are looking for
+ from that reviewer.
+ - For example, if an MR changes a database query and updates
+ backend code, the MR author first needs a `~backend` review and a `~database`
+ review. While assigning the reviewers, the author adds a comment to the MR
+ letting each reviewer know which domain they should review.
+ - Many GitLab team members are domain experts in more than one area,
+ so without this type of comment it is sometimes ambiguous what type
+ of review they are being asked to provide.
+ - Explicitness around MR review types is efficient for the MR author because
+ they receive the type of review that they are looking for and it is
+ efficient for the MR reviewers because they immediately know which type of review to provide.
+ - [Example 1](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/75921#note_758161716)
+ - [Example 2](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/109500#note_1253955051)
+
Avoid:
- Adding TODO comments (referenced above) directly to the source code unless the reviewer requires
diff --git a/doc/development/feature_categorization/index.md b/doc/development/feature_categorization/index.md
index 26c2c553d59..dad94a2aae2 100644
--- a/doc/development/feature_categorization/index.md
+++ b/doc/development/feature_categorization/index.md
@@ -221,3 +221,8 @@ Additionally, we flag the offenses via `RSpec/MissingFeatureCategory` RuboCop ru
For Engineering Productivity internal tooling we use `feature_category: :tooling`.
For example in [`spec/tooling/danger/specs_spec.rb`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/spec/tooling/danger/specs_spec.rb#L12).
+
+### Shared feature category
+
+For features that support developers and they are not specific to a product group we use `feature_category: :shared`
+For example [`spec/lib/gitlab/job_waiter_spec.rb`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/spec/lib/gitlab/job_waiter_spec.rb)
diff --git a/doc/development/merge_request_concepts/performance.md b/doc/development/merge_request_concepts/performance.md
index c1bdd45891d..740b8f1607b 100644
--- a/doc/development/merge_request_concepts/performance.md
+++ b/doc/development/merge_request_concepts/performance.md
@@ -15,7 +15,7 @@ with and agreed upon by backend maintainers and performance specialists.
It's also highly recommended that you read the following guides:
-- [Performance Guidelines../performance.md)
+- [Performance Guidelines](../performance.md)
- [Avoiding downtime in migrations](../database/avoiding_downtime_in_migrations.md)
## Definition
@@ -59,8 +59,8 @@ section below for more information.
about the impact.
Sometimes it's hard to assess the impact of a merge request. In this case you
-should ask one of the merge request reviewers to review your changes. You can
-find a list of these reviewers at <https://about.gitlab.com/company/team/>. A reviewer
+should ask one of the merge request reviewers to review your changes.
+([A list of reviewers](https://about.gitlab.com/company/team/) is available.) A reviewer
in turn can request a performance specialist to review the changes.
## Think outside of the box
@@ -119,7 +119,7 @@ data migration. Migrating millions of rows is always troublesome and
can have a negative impact on the application.
To better understand how to get help with the query plan reviews
-read this section on [how to prepare the merge request for a database review../database_review.md#how-to-prepare-the-merge-request-for-a-database-review).
+read this section on [how to prepare the merge request for a database review](../database_review.md#how-to-prepare-the-merge-request-for-a-database-review).
## Query Counts
@@ -193,7 +193,12 @@ costly, time-consuming query to the replicas.
## Use CTEs wisely
-Read about [complex queries on the relation object../database/iterating_tables_in_batches.md#complex-queries-on-the-relation-object) for considerations on how to use CTEs. We have found in some situations that CTEs can become problematic in use (similar to the n+1 problem above). In particular, hierarchical recursive CTE queries such as the CTE in [AuthorizedProjectsWorker](https://gitlab.com/gitlab-org/gitlab/-/issues/325688) are very difficult to optimize and don't scale. We should avoid them when implementing new features that require any kind of hierarchical structure.
+Read about [complex queries on the relation object](../database/iterating_tables_in_batches.md#complex-queries-on-the-relation-object)
+for considerations on how to use CTEs. We have found in some situations that CTEs can become
+problematic in use (similar to the n+1 problem above). In particular, hierarchical recursive
+CTE queries such as the CTE in [AuthorizedProjectsWorker](https://gitlab.com/gitlab-org/gitlab/-/issues/325688)
+are very difficult to optimize and don't scale. We should avoid them when implementing new features
+that require any kind of hierarchical structure.
CTEs have been effectively used as an optimization fence in many simpler cases,
such as this [example](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/43242#note_61416277).
@@ -224,7 +229,7 @@ The total number of the queries (including cached ones) executed by the code mod
should not increase unless absolutely necessary.
The number of executed queries (including cached queries) should not depend on
collection size.
-You can write a test by passing the `skip_cached` variable to [QueryRecorder../database/query_recorder.md) to detect this and prevent regressions.
+You can write a test by passing the `skip_cached` variable to [QueryRecorder](../database/query_recorder.md) to detect this and prevent regressions.
As an example, say you have a CI pipeline. All pipeline builds belong to the same pipeline,
thus they also belong to the same project (`pipeline.project`):
@@ -312,7 +317,7 @@ This could result in Puma timeout and should be avoided at all cost.
You should set a reasonable timeout, gracefully handle exceptions and surface the
errors in UI or logging internally.
-Using [`ReactiveCaching`../utilities.md#reactivecaching) is one of the best solutions to fetch external data.
+Using [`ReactiveCaching`](../utilities.md#reactivecaching) is one of the best solutions to fetch external data.
## Keep database transaction minimal
@@ -424,7 +429,7 @@ Take into consideration the following when choosing a pagination strategy:
The database has to sort and iterate all previous items, and this operation usually
can result in substantial load put on database.
-You can find useful tips related to pagination in the [pagination guidelines../database/pagination_guidelines.md).
+You can find useful tips related to pagination in the [pagination guidelines](../database/pagination_guidelines.md).
## Badge counters
@@ -561,5 +566,5 @@ can time out, which is especially problematic for slow clients. If clients take
to upload/download the processing slot might be killed due to request processing
timeout (usually between 30s-60s).
-For the above reasons it is required that [Workhorse direct upload../uploads/index.md#direct-upload) is implemented
+For the above reasons it is required that [Workhorse direct upload](../uploads/index.md#direct-upload) is implemented
for all file uploads and downloads.
diff --git a/doc/user/project/merge_requests/conflicts.md b/doc/user/project/merge_requests/conflicts.md
index 6063d64a721..932eec5e631 100644
--- a/doc/user/project/merge_requests/conflicts.md
+++ b/doc/user/project/merge_requests/conflicts.md
@@ -59,7 +59,8 @@ in the user interface, and you can also resolve conflicts locally through the co
To resolve less-complex conflicts from the GitLab user interface:
-1. Go to your merge request.
+1. On the top bar, select **Main menu > Projects** and find your project.
+1. On the left sidebar, select **Merge requests** and find the merge request.
1. Select **Overview**, and scroll to the merge request reports section.
1. Find the merge conflicts message, and select **Resolve conflicts**.
GitLab shows a list of files with merge conflicts. The conflicts are
@@ -83,7 +84,8 @@ Some merge conflicts are more complex, requiring you to manually modify lines to
resolve their conflicts. Use the merge conflict resolution editor to resolve complex
conflicts in the GitLab interface:
-1. Go to your merge request.
+1. On the top bar, select **Main menu > Projects** and find your project.
+1. On the left sidebar, select **Merge requests** and find the merge request.
1. Select **Overview**, and scroll to the merge request reports section.
1. Find the merge conflicts message, and select **Resolve conflicts**.
GitLab shows a list of files with merge conflicts.
diff --git a/doc/user/todos.md b/doc/user/todos.md
index cb7a7248bb2..50b60dc5b4b 100644
--- a/doc/user/todos.md
+++ b/doc/user/todos.md
@@ -130,6 +130,7 @@ To-do items are marked as done if you:
- Create a comment.
- Edit the description.
- Resolve a [design discussion thread](project/issues/design_management.md#resolve-a-discussion-thread-on-a-design).
+- Accept or deny a project or group membership request.
To-do items are **not** marked as done if you:
diff --git a/lib/api/helpers/projects_helpers.rb b/lib/api/helpers/projects_helpers.rb
index c5636fa06de..820ad2e9b33 100644
--- a/lib/api/helpers/projects_helpers.rb
+++ b/lib/api/helpers/projects_helpers.rb
@@ -32,7 +32,7 @@ module API
optional :builds_access_level, type: String, values: %w(disabled private enabled), desc: 'Builds access level. One of `disabled`, `private` or `enabled`'
optional :snippets_access_level, type: String, values: %w(disabled private enabled), desc: 'Snippets access level. One of `disabled`, `private` or `enabled`'
optional :pages_access_level, type: String, values: %w(disabled private enabled public), desc: 'Pages access level. One of `disabled`, `private`, `enabled` or `public`'
- optional :operations_access_level, type: String, values: %w(disabled private enabled), desc: 'Operations access level. One of `disabled`, `private` or `enabled`'
+ optional :operations_access_level, type: String, values: %w(disabled private enabled), desc: 'Operations access level. One of `disabled`, `private` or `enabled`. Deprecated in GitLab 15.8, see https://gitlab.com/gitlab-org/gitlab/-/issues/385798.'
optional :analytics_access_level, type: String, values: %w(disabled private enabled), desc: 'Analytics access level. One of `disabled`, `private` or `enabled`'
optional :container_registry_access_level, type: String, values: %w(disabled private enabled), desc: 'Controls visibility of the container registry. One of `disabled`, `private` or `enabled`. `private` will make the container registry accessible only to project members (reporter role and above). `enabled` will make the container registry accessible to everyone who has access to the project. `disabled` will disable the container registry'
optional :security_and_compliance_access_level, type: String, values: %w(disabled private enabled), desc: 'Security and compliance access level. One of `disabled`, `private` or `enabled`'
diff --git a/lib/gitlab/quick_actions/command_definition.rb b/lib/gitlab/quick_actions/command_definition.rb
index d9135d1bacb..118c8aad460 100644
--- a/lib/gitlab/quick_actions/command_definition.rb
+++ b/lib/gitlab/quick_actions/command_definition.rb
@@ -149,7 +149,13 @@ module Gitlab
end
def valid_type?(context)
- types.blank? || types.any? { |type| context.quick_action_target.is_a?(type) }
+ types.blank? || types.any? do |type|
+ if context.quick_action_target.is_a?(WorkItem)
+ context.quick_action_target.supported_quick_action_commands.include?(name.to_sym)
+ else
+ context.quick_action_target.is_a?(type)
+ end
+ end
end
end
end
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index a3ef32f3b7e..e904cd6c55f 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -1735,6 +1735,9 @@ msgstr ""
msgid "A page with that title already exists"
msgstr ""
+msgid "A parent must be provided when bulk updating issuables"
+msgstr ""
+
msgid "A personal access token has been revoked"
msgstr ""
@@ -28075,6 +28078,9 @@ msgstr ""
msgid "No milestone"
msgstr ""
+msgid "No more than %{max_issues} issues can be updated at the same time"
+msgstr ""
+
msgid "No other labels with such name or description"
msgstr ""
diff --git a/rubocop/cop/migration/prevent_single_statement_with_disable_ddl_transaction.rb b/rubocop/cop/migration/prevent_single_statement_with_disable_ddl_transaction.rb
new file mode 100644
index 00000000000..547dfb60945
--- /dev/null
+++ b/rubocop/cop/migration/prevent_single_statement_with_disable_ddl_transaction.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+require_relative '../../migration_helpers'
+
+module RuboCop
+ module Cop
+ module Migration
+ # Cop that prevents usage of `disable_ddl_transaction!`
+ # if the only statement being called in the migration is :validate_foreign_key.
+ #
+ # We do this because PostgreSQL will add an implicit transaction for single
+ # statements. So there's no reason to add the disable_ddl_transaction!.
+ #
+ # This cop was introduced to clarify the need for disable_ddl_transaction!
+ # and to avoid bike-shedding and review back-and-forth.
+ #
+ # @examples
+ #
+ # # bad
+ # class SomeMigration < Gitlab::Database::Migration[2.1]
+ # disable_ddl_transaction!
+ # def up
+ # validate_foreign_key :emails, :user_id
+ # end
+ # def down
+ # # no-op
+ # end
+ # end
+ #
+ # # good
+ # class SomeMigration < Gitlab::Database::Migration[2.1]
+ # def up
+ # validate_foreign_key :emails, :user_id
+ # end
+ # def down
+ # # no-op
+ # end
+ # end
+ class PreventSingleStatementWithDisableDdlTransaction < RuboCop::Cop::Base
+ include MigrationHelpers
+
+ MSG = "PostgreSQL will add an implicit transaction for single statements. " \
+ "So there's no reason to use `disable_ddl_transaction!`, if you're only " \
+ "executing validate_foreign_key."
+
+ def_node_matcher :disable_ddl_transaction?, <<~PATTERN
+ (send _ :disable_ddl_transaction! ...)
+ PATTERN
+
+ def_node_matcher :validate_foreign_key?, <<~PATTERN
+ (send :validate_foreign_key ...)
+ PATTERN
+
+ def on_begin(node)
+ return unless in_migration?(node)
+
+ disable_ddl_transaction_node = nil
+
+ # Only perform cop if disable_ddl_transaction! is present
+ node.each_descendant(:send) do |send_node|
+ disable_ddl_transaction_node = send_node if disable_ddl_transaction?(send_node)
+ end
+
+ return unless disable_ddl_transaction_node
+
+ # For each migration method, check if :validate_foreign_key is the only statement.
+ node.each_descendant(:def) do |def_node|
+ break unless [:up, :down, :change].include? def_node.children[0]
+
+ statement_count = 0
+ has_validate_foreign_key = false
+
+ def_node.each_descendant(:send) do |send_node|
+ has_validate_foreign_key = true if send_node.children[1] == :validate_foreign_key
+ statement_count += 1
+ end
+
+ if disable_ddl_transaction_node && has_validate_foreign_key && statement_count == 1
+ add_offense(disable_ddl_transaction_node, message: MSG)
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/rubocop/cop/rspec/invalid_feature_category.rb b/rubocop/cop/rspec/invalid_feature_category.rb
index 4d658fdc36f..be6d802bc92 100644
--- a/rubocop/cop/rspec/invalid_feature_category.rb
+++ b/rubocop/cop/rspec/invalid_feature_category.rb
@@ -36,7 +36,9 @@ module RuboCop
# List of feature categories which are not defined in config/feature_categories.yml
CUSTOM_FEATURE_CATEGORIES = [
# https://docs.gitlab.com/ee/development/feature_categorization/#tooling-feature-category
- :tooling
+ :tooling,
+ # https://docs.gitlab.com/ee/development/feature_categorization/#shared-feature-category
+ :shared
].to_set.freeze
# @!method feature_category?(node)
diff --git a/spec/controllers/projects/autocomplete_sources_controller_spec.rb b/spec/controllers/projects/autocomplete_sources_controller_spec.rb
index 1797f7b794b..70178083e71 100644
--- a/spec/controllers/projects/autocomplete_sources_controller_spec.rb
+++ b/spec/controllers/projects/autocomplete_sources_controller_spec.rb
@@ -9,7 +9,9 @@ RSpec.describe Projects::AutocompleteSourcesController do
let_it_be(:public_project) { create(:project, :public, group: group) }
let_it_be(:development) { create(:label, project: project, name: 'Development') }
let_it_be(:private_issue) { create(:labeled_issue, project: project, labels: [development]) }
+ let_it_be(:private_work_item) { create(:work_item, project: project) }
let_it_be(:issue) { create(:labeled_issue, project: public_project, labels: [development]) }
+ let_it_be(:work_item) { create(:work_item, project: public_project, id: 1, iid: 100) }
let_it_be(:user) { create(:user) }
def members_by_username(username)
@@ -54,6 +56,13 @@ RSpec.describe Projects::AutocompleteSourcesController do
it_behaves_like 'issuable commands'
end
+ context 'with work items' do
+ let(:issuable_type) { work_item.class.name }
+ let(:issuable_iid) { work_item.iid }
+
+ it_behaves_like 'issuable commands'
+ end
+
context 'with merge request' do
let(:merge_request) { create(:merge_request, target_project: public_project, source_project: public_project) }
let(:issuable_type) { merge_request.class.name }
@@ -91,11 +100,19 @@ RSpec.describe Projects::AutocompleteSourcesController do
it_behaves_like 'label commands'
end
+
+ context 'with work items' do
+ let(:issuable_type) { work_item.class.name }
+ let(:issuable_iid) { work_item.iid }
+
+ it_behaves_like 'label commands'
+ end
end
describe 'GET members' do
let_it_be(:invited_private_member) { create(:user) }
let_it_be(:issue) { create(:labeled_issue, project: public_project, labels: [development], author: user) }
+ let_it_be(:work_item) { create(:work_item, project: public_project, author: user) }
before_all do
create(:project_group_link, group: private_group, project: public_project)
@@ -145,6 +162,12 @@ RSpec.describe Projects::AutocompleteSourcesController do
it_behaves_like 'all members are returned'
end
+
+ context 'with work item' do
+ let(:issuable_type) { work_item.class.name }
+
+ it_behaves_like 'all members are returned'
+ end
end
context 'when anonymous' do
@@ -181,6 +204,16 @@ RSpec.describe Projects::AutocompleteSourcesController do
let(:issuable_type) { issue.class.name }
end
end
+
+ context 'with work item' do
+ it_behaves_like 'private project is inaccessible' do
+ let(:issuable_type) { private_work_item.class.name }
+ end
+
+ it_behaves_like 'only public members are returned for public project' do
+ let(:issuable_type) { work_item.class.name }
+ end
+ end
end
end
diff --git a/spec/features/projects/pipelines/pipeline_spec.rb b/spec/features/projects/pipelines/pipeline_spec.rb
index d5739386a30..343c7f53022 100644
--- a/spec/features/projects/pipelines/pipeline_spec.rb
+++ b/spec/features/projects/pipelines/pipeline_spec.rb
@@ -1233,12 +1233,27 @@ RSpec.describe 'Pipeline', :js, feature_category: :projects do
it 'displays the pipeline graph' do
subject
- expect(page).to have_current_path(pipeline_path(pipeline), ignore_query: true)
+ expect(page).to have_current_path(pipeline_path(pipeline))
expect(page).to have_selector('.js-pipeline-graph')
end
end
end
+ describe 'GET /:project/-/pipelines/latest' do
+ let_it_be(:project) { create(:project, :repository) }
+
+ let!(:pipeline) { create(:ci_pipeline, project: project, ref: 'master', sha: project.commit.id) }
+
+ before do
+ visit latest_project_pipelines_path(project)
+ end
+
+ it 'displays the pipeline graph with correct URL' do
+ expect(page).to have_current_path("#{pipeline_path(pipeline)}/")
+ expect(page).to have_selector('.js-pipeline-graph')
+ end
+ end
+
describe 'GET /:project/-/pipelines/:id/dag' do
include_context 'pipeline builds'
diff --git a/spec/finders/todos_finder_spec.rb b/spec/finders/todos_finder_spec.rb
index bcead6b0170..c0203dd6132 100644
--- a/spec/finders/todos_finder_spec.rb
+++ b/spec/finders/todos_finder_spec.rb
@@ -327,9 +327,9 @@ RSpec.describe TodosFinder do
it 'returns the expected types' do
expected_result =
if Gitlab.ee?
- %w[Epic Issue WorkItem MergeRequest DesignManagement::Design AlertManagement::Alert]
+ %w[Epic Issue WorkItem MergeRequest DesignManagement::Design AlertManagement::Alert Namespace Project]
else
- %w[Issue WorkItem MergeRequest DesignManagement::Design AlertManagement::Alert]
+ %w[Issue WorkItem MergeRequest DesignManagement::Design AlertManagement::Alert Namespace Project]
end
expect(described_class.todo_types).to contain_exactly(*expected_result)
diff --git a/spec/frontend/add_context_commits_modal/store/actions_spec.js b/spec/frontend/add_context_commits_modal/store/actions_spec.js
index 77666e922ff..27c8d760a96 100644
--- a/spec/frontend/add_context_commits_modal/store/actions_spec.js
+++ b/spec/frontend/add_context_commits_modal/store/actions_spec.js
@@ -16,7 +16,7 @@ import {
} from '~/add_context_commits_modal/store/actions';
import * as types from '~/add_context_commits_modal/store/mutation_types';
import axios from '~/lib/utils/axios_utils';
-import { HTTP_STATUS_NO_CONTENT } from '~/lib/utils/http_status';
+import { HTTP_STATUS_NO_CONTENT, HTTP_STATUS_OK } from '~/lib/utils/http_status';
describe('AddContextCommitsModalStoreActions', () => {
const contextCommitEndpoint =
@@ -100,7 +100,7 @@ describe('AddContextCommitsModalStoreActions', () => {
describe('createContextCommits', () => {
it('calls API to create context commits', async () => {
- mock.onPost(contextCommitEndpoint).reply(200, {});
+ mock.onPost(contextCommitEndpoint).reply(HTTP_STATUS_OK, {});
await testAction(createContextCommits, { commits: [] }, {}, [], []);
@@ -117,7 +117,7 @@ describe('AddContextCommitsModalStoreActions', () => {
.onGet(
`/api/${gon.api_version}/projects/gitlab-org%2Fgitlab/merge_requests/1/context_commits`,
)
- .reply(200, [dummyCommit]);
+ .reply(HTTP_STATUS_OK, [dummyCommit]);
});
it('commits FETCH_CONTEXT_COMMITS', () => {
const contextCommit = { ...dummyCommit, isSelected: true };
diff --git a/spec/frontend/api/alert_management_alerts_api_spec.js b/spec/frontend/api/alert_management_alerts_api_spec.js
index c8da867e573..507f659a170 100644
--- a/spec/frontend/api/alert_management_alerts_api_spec.js
+++ b/spec/frontend/api/alert_management_alerts_api_spec.js
@@ -1,7 +1,11 @@
import MockAdapter from 'axios-mock-adapter';
import * as alertManagementAlertsApi from '~/api/alert_management_alerts_api';
import axios from '~/lib/utils/axios_utils';
-import { HTTP_STATUS_CREATED, HTTP_STATUS_NO_CONTENT } from '~/lib/utils/http_status';
+import {
+ HTTP_STATUS_CREATED,
+ HTTP_STATUS_NO_CONTENT,
+ HTTP_STATUS_OK,
+} from '~/lib/utils/http_status';
describe('~/api/alert_management_alerts_api.js', () => {
let mock;
@@ -34,7 +38,7 @@ describe('~/api/alert_management_alerts_api.js', () => {
const expectedData = [imageData];
const options = { alertIid, id: projectId };
- mock.onGet(expectedUrl).reply(200, { data: expectedData });
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, { data: expectedData });
return alertManagementAlertsApi.fetchAlertMetricImages(options).then(({ data }) => {
expect(axios.get).toHaveBeenCalledWith(expectedUrl);
@@ -97,7 +101,7 @@ describe('~/api/alert_management_alerts_api.js', () => {
expectedFormData.append('url', url);
expectedFormData.append('url_text', urlText);
- mock.onPut(expectedUrl).reply(200, { data: expectedData });
+ mock.onPut(expectedUrl).reply(HTTP_STATUS_OK, { data: expectedData });
return alertManagementAlertsApi
.updateAlertMetricImage({
diff --git a/spec/frontend/branches/components/sort_dropdown_spec.js b/spec/frontend/branches/components/sort_dropdown_spec.js
index 16ed02bfa88..bd41b0daaaa 100644
--- a/spec/frontend/branches/components/sort_dropdown_spec.js
+++ b/spec/frontend/branches/components/sort_dropdown_spec.js
@@ -18,6 +18,8 @@ describe('Branches Sort Dropdown', () => {
updated_asc: 'Oldest updated',
updated_desc: 'Last updated',
},
+ showDropdown: false,
+ sortedBy: 'updated_desc',
...props,
},
}),
@@ -54,7 +56,7 @@ describe('Branches Sort Dropdown', () => {
describe('when in All branches mode', () => {
beforeEach(() => {
- wrapper = createWrapper({ mode: 'all' });
+ wrapper = createWrapper({ mode: 'all', showDropdown: true });
});
it('should have a search box with a placeholder', () => {
@@ -64,7 +66,7 @@ describe('Branches Sort Dropdown', () => {
expect(searchBox.find('input').attributes('placeholder')).toBe('Filter by branch name');
});
- it('should have a branches dropdown when in all branches mode', () => {
+ it('should have a branches dropdown', () => {
const branchesDropdown = findBranchesDropdown();
expect(branchesDropdown.exists()).toBe(true);
@@ -84,7 +86,7 @@ describe('Branches Sort Dropdown', () => {
searchBox.vm.$emit('submit');
expect(urlUtils.visitUrl).toHaveBeenCalledWith(
- '/root/ci-cd-project-demo/-/branches?state=all',
+ '/root/ci-cd-project-demo/-/branches?state=all&sort=updated_desc',
);
});
});
diff --git a/spec/frontend/captcha/captcha_modal_axios_interceptor_spec.js b/spec/frontend/captcha/captcha_modal_axios_interceptor_spec.js
index a7a639e8728..a4eecabcf28 100644
--- a/spec/frontend/captcha/captcha_modal_axios_interceptor_spec.js
+++ b/spec/frontend/captcha/captcha_modal_axios_interceptor_spec.js
@@ -34,7 +34,7 @@ describe('registerCaptchaModalInterceptor', () => {
waitForCaptchaToBeSolved.mockRejectedValue(new UnsolvedCaptchaError());
mock = new MockAdapter(axios);
- mock.onAny('/endpoint-without-captcha').reply(200, AXIOS_RESPONSE);
+ mock.onAny('/endpoint-without-captcha').reply(HTTP_STATUS_OK, AXIOS_RESPONSE);
mock.onAny('/endpoint-with-unrelated-error').reply(HTTP_STATUS_NOT_FOUND, AXIOS_RESPONSE);
mock.onAny('/endpoint-with-captcha').reply((config) => {
if (!supportedMethods.includes(config.method)) {
diff --git a/spec/frontend/ci/reports/codequality_report/store/actions_spec.js b/spec/frontend/ci/reports/codequality_report/store/actions_spec.js
index 50a91f070f1..a606bce3d78 100644
--- a/spec/frontend/ci/reports/codequality_report/store/actions_spec.js
+++ b/spec/frontend/ci/reports/codequality_report/store/actions_spec.js
@@ -60,7 +60,7 @@ describe('Codequality Reports actions', () => {
describe('on success', () => {
it('commits REQUEST_REPORTS and dispatches receiveReportsSuccess', () => {
- mock.onGet(endpoint).reply(200, reportIssues);
+ mock.onGet(endpoint).reply(HTTP_STATUS_OK, reportIssues);
return testAction(
actions.fetchReports,
@@ -94,7 +94,7 @@ describe('Codequality Reports actions', () => {
describe('when base report is not found', () => {
it('commits REQUEST_REPORTS and dispatches receiveReportsError', () => {
const data = { status: STATUS_NOT_FOUND };
- mock.onGet(`${TEST_HOST}/codequality_reports.json`).reply(200, data);
+ mock.onGet(`${TEST_HOST}/codequality_reports.json`).reply(HTTP_STATUS_OK, data);
return testAction(
actions.fetchReports,
diff --git a/spec/frontend/ci/runner/components/search_tokens/tag_token_spec.js b/spec/frontend/ci/runner/components/search_tokens/tag_token_spec.js
index a665ec244a2..b7d9d3ad23e 100644
--- a/spec/frontend/ci/runner/components/search_tokens/tag_token_spec.js
+++ b/spec/frontend/ci/runner/components/search_tokens/tag_token_spec.js
@@ -5,7 +5,7 @@ import { nextTick } from 'vue';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
-import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status';
+import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import TagToken, { TAG_SUGGESTIONS_PATH } from '~/ci/runner/components/search_tokens/tag_token.vue';
import { OPERATORS_IS } from '~/vue_shared/components/filtered_search_bar/constants';
import { getRecentlyUsedSuggestions } from '~/vue_shared/components/filtered_search_bar/filtered_search_utils';
@@ -80,10 +80,10 @@ describe('TagToken', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
- mock.onGet(TAG_SUGGESTIONS_PATH, { params: { search: '' } }).reply(200, mockTags);
+ mock.onGet(TAG_SUGGESTIONS_PATH, { params: { search: '' } }).reply(HTTP_STATUS_OK, mockTags);
mock
.onGet(TAG_SUGGESTIONS_PATH, { params: { search: mockSearchTerm } })
- .reply(200, mockTagsFiltered);
+ .reply(HTTP_STATUS_OK, mockTagsFiltered);
getRecentlyUsedSuggestions.mockReturnValue([]);
});
diff --git a/spec/frontend/confidential_merge_request/components/project_form_group_spec.js b/spec/frontend/confidential_merge_request/components/project_form_group_spec.js
index 0e73d50fdb5..d6f16f1a644 100644
--- a/spec/frontend/confidential_merge_request/components/project_form_group_spec.js
+++ b/spec/frontend/confidential_merge_request/components/project_form_group_spec.js
@@ -3,6 +3,7 @@ import { shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import ProjectFormGroup from '~/confidential_merge_request/components/project_form_group.vue';
import axios from '~/lib/utils/axios_utils';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
const mockData = [
{
@@ -27,7 +28,7 @@ let mock;
function factory(projects = mockData) {
mock = new MockAdapter(axios);
- mock.onGet(/api\/(.*)\/projects\/gitlab-org%2Fgitlab-ce\/forks/).reply(200, projects);
+ mock.onGet(/api\/(.*)\/projects\/gitlab-org%2Fgitlab-ce\/forks/).reply(HTTP_STATUS_OK, projects);
wrapper = shallowMount(ProjectFormGroup, {
propsData: {
diff --git a/spec/frontend/contributors/component/contributors_spec.js b/spec/frontend/contributors/component/contributors_spec.js
index c14bbc2f9f9..03b1e977548 100644
--- a/spec/frontend/contributors/component/contributors_spec.js
+++ b/spec/frontend/contributors/component/contributors_spec.js
@@ -4,6 +4,7 @@ import { mountExtended } from 'helpers/vue_test_utils_helper';
import ContributorsCharts from '~/contributors/components/contributors.vue';
import { createStore } from '~/contributors/stores';
import axios from '~/lib/utils/axios_utils';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { visitUrl } from '~/lib/utils/url_utility';
import RefSelector from '~/ref/components/ref_selector.vue';
import { REF_TYPE_BRANCHES, REF_TYPE_TAGS } from '~/ref/constants';
@@ -28,7 +29,7 @@ const commitsPath = 'some/path';
function factory() {
mock = new MockAdapter(axios);
jest.spyOn(axios, 'get');
- mock.onGet().reply(200, chartData);
+ mock.onGet().reply(HTTP_STATUS_OK, chartData);
store = createStore();
wrapper = mountExtended(Component, {
diff --git a/spec/frontend/contributors/store/actions_spec.js b/spec/frontend/contributors/store/actions_spec.js
index a4a78fc12ee..b2ebdf2f53c 100644
--- a/spec/frontend/contributors/store/actions_spec.js
+++ b/spec/frontend/contributors/store/actions_spec.js
@@ -4,7 +4,7 @@ import * as actions from '~/contributors/stores/actions';
import * as types from '~/contributors/stores/mutation_types';
import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
-import { HTTP_STATUS_BAD_REQUEST } from '~/lib/utils/http_status';
+import { HTTP_STATUS_BAD_REQUEST, HTTP_STATUS_OK } from '~/lib/utils/http_status';
jest.mock('~/flash.js');
@@ -23,7 +23,7 @@ describe('Contributors store actions', () => {
});
it('should commit SET_CHART_DATA with received response', () => {
- mock.onGet().reply(200, chartData);
+ mock.onGet().reply(HTTP_STATUS_OK, chartData);
return testAction(
actions.fetchChartData,
diff --git a/spec/frontend/deploy_keys/components/app_spec.js b/spec/frontend/deploy_keys/components/app_spec.js
index 79a9aaa9184..d11ecf95de6 100644
--- a/spec/frontend/deploy_keys/components/app_spec.js
+++ b/spec/frontend/deploy_keys/components/app_spec.js
@@ -8,6 +8,7 @@ import deployKeysApp from '~/deploy_keys/components/app.vue';
import ConfirmModal from '~/deploy_keys/components/confirm_modal.vue';
import eventHub from '~/deploy_keys/eventhub';
import axios from '~/lib/utils/axios_utils';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
const TEST_ENDPOINT = `${TEST_HOST}/dummy/`;
@@ -28,7 +29,7 @@ describe('Deploy keys app component', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
- mock.onGet(TEST_ENDPOINT).reply(200, data);
+ mock.onGet(TEST_ENDPOINT).reply(HTTP_STATUS_OK, data);
});
afterEach(() => {
@@ -67,7 +68,7 @@ describe('Deploy keys app component', () => {
});
it('does not render key panels when keys object is empty', () => {
- mock.onGet(TEST_ENDPOINT).reply(200, []);
+ mock.onGet(TEST_ENDPOINT).reply(HTTP_STATUS_OK, []);
return mountComponent().then(() => {
expect(findKeyPanels().length).toBe(0);
diff --git a/spec/frontend/design_management/components/design_overlay_spec.js b/spec/frontend/design_management/components/design_overlay_spec.js
index 169f2dbdccb..2807fe7727f 100644
--- a/spec/frontend/design_management/components/design_overlay_spec.js
+++ b/spec/frontend/design_management/components/design_overlay_spec.js
@@ -1,32 +1,54 @@
-import { mount } from '@vue/test-utils';
-import { nextTick } from 'vue';
+import { shallowMount } from '@vue/test-utils';
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
import DesignOverlay from '~/design_management/components/design_overlay.vue';
-import { ACTIVE_DISCUSSION_SOURCE_TYPES } from '~/design_management/constants';
-import updateActiveDiscussion from '~/design_management/graphql/mutations/update_active_discussion.mutation.graphql';
+import { resolvers } from '~/design_management/graphql';
+import activeDiscussionQuery from '~/design_management/graphql/queries/active_discussion.query.graphql';
import notes from '../mock_data/notes';
-const mutate = jest.fn(() => Promise.resolve());
+Vue.use(VueApollo);
describe('Design overlay component', () => {
let wrapper;
+ let apolloProvider;
const mockDimensions = { width: 100, height: 100 };
- const findAllNotes = () => wrapper.findAll('.js-image-badge');
- const findCommentBadge = () => wrapper.find('.comment-indicator');
+ const findOverlay = () => wrapper.find('[data-testid="design-overlay"]');
+ const findAllNotes = () => wrapper.findAll('[data-testid="note-pin"]');
+ const findCommentBadge = () => wrapper.find('[data-testid="comment-badge"]');
const findBadgeAtIndex = (noteIndex) => findAllNotes().at(noteIndex);
const findFirstBadge = () => findBadgeAtIndex(0);
const findSecondBadge = () => findBadgeAtIndex(1);
const clickAndDragBadge = async (elem, fromPoint, toPoint) => {
- elem.trigger('mousedown', { clientX: fromPoint.x, clientY: fromPoint.y });
+ elem.vm.$emit(
+ 'mousedown',
+ new MouseEvent('click', { clientX: fromPoint.x, clientY: fromPoint.y }),
+ );
+ findOverlay().trigger('mousemove', { clientX: toPoint.x, clientY: toPoint.y });
await nextTick();
- elem.trigger('mousemove', { clientX: toPoint.x, clientY: toPoint.y });
+ elem.vm.$emit('mouseup', new MouseEvent('click', { clientX: toPoint.x, clientY: toPoint.y }));
await nextTick();
};
function createComponent(props = {}, data = {}) {
- wrapper = mount(DesignOverlay, {
+ apolloProvider = createMockApollo([], resolvers);
+ apolloProvider.clients.defaultClient.writeQuery({
+ query: activeDiscussionQuery,
+ data: {
+ activeDiscussion: {
+ __typename: 'ActiveDiscussion',
+ id: null,
+ source: null,
+ },
+ },
+ });
+
+ wrapper = shallowMount(DesignOverlay, {
+ apolloProvider,
propsData: {
dimensions: mockDimensions,
position: {
@@ -45,14 +67,13 @@ describe('Design overlay component', () => {
...data,
};
},
- mocks: {
- $apollo: {
- mutate,
- },
- },
});
}
+ afterEach(() => {
+ apolloProvider = null;
+ });
+
it('should have correct inline style', () => {
createComponent();
@@ -96,12 +117,15 @@ describe('Design overlay component', () => {
});
it('should have set the correct position for each note badge', () => {
- expect(findFirstBadge().attributes().style).toBe('left: 10px; top: 15px;');
- expect(findSecondBadge().attributes().style).toBe('left: 50px; top: 50px;');
+ expect(findFirstBadge().props('position')).toEqual({
+ left: '10px',
+ top: '15px',
+ });
+ expect(findSecondBadge().props('position')).toEqual({ left: '50px', top: '50px' });
});
it('should apply resolved class to the resolved note pin', () => {
- expect(findSecondBadge().classes()).toContain('resolved');
+ expect(findSecondBadge().props('isResolved')).toBe(true);
});
describe('when no discussion is active', () => {
@@ -116,33 +140,37 @@ describe('Design overlay component', () => {
it.each([notes[0].discussion.notes.nodes[1], notes[0].discussion.notes.nodes[0]])(
'should not apply inactive class to the pin for the active discussion',
async (note) => {
- // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
- // eslint-disable-next-line no-restricted-syntax
- wrapper.setData({
- activeDiscussion: {
- id: note.id,
- source: 'discussion',
+ apolloProvider.clients.defaultClient.writeQuery({
+ query: activeDiscussionQuery,
+ data: {
+ activeDiscussion: {
+ __typename: 'ActiveDiscussion',
+ id: note.id,
+ source: 'discussion',
+ },
},
});
await nextTick();
- expect(findBadgeAtIndex(0).classes()).not.toContain('inactive');
+ expect(findBadgeAtIndex(0).props('isInactive')).toBe(false);
},
);
it('should apply inactive class to all pins besides the active one', async () => {
- // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
- // eslint-disable-next-line no-restricted-syntax
- wrapper.setData({
- activeDiscussion: {
- id: notes[0].id,
- source: 'discussion',
+ apolloProvider.clients.defaultClient.writeQuery({
+ query: activeDiscussionQuery,
+ data: {
+ activeDiscussion: {
+ __typename: 'ActiveDiscussion',
+ id: notes[0].id,
+ source: 'discussion',
+ },
},
});
await nextTick();
- expect(findSecondBadge().classes()).toContain('inactive');
- expect(findFirstBadge().classes()).not.toContain('inactive');
+ expect(findSecondBadge().props('isInactive')).toBe(true);
+ expect(findFirstBadge().props('isInactive')).toBe(false);
});
});
});
@@ -156,7 +184,7 @@ describe('Design overlay component', () => {
},
});
- expect(findFirstBadge().attributes().style).toBe('left: 40px; top: 60px;');
+ expect(findFirstBadge().props('position')).toEqual({ left: '40px', top: '60px' });
wrapper.setProps({
dimensions: {
@@ -166,10 +194,10 @@ describe('Design overlay component', () => {
});
await nextTick();
- expect(findFirstBadge().attributes().style).toBe('left: 20px; top: 30px;');
+ expect(findFirstBadge().props('position')).toEqual({ left: '20px', top: '30px' });
});
- it('should call an update active discussion mutation when clicking a note without moving it', async () => {
+ it('should update active discussion when clicking a note without moving it', async () => {
createComponent({
notes,
dimensions: {
@@ -178,61 +206,36 @@ describe('Design overlay component', () => {
},
});
+ expect(findFirstBadge().props('isInactive')).toBe(null);
+
const note = notes[0];
const { position } = note;
- const mutationVariables = {
- mutation: updateActiveDiscussion,
- variables: {
- id: note.id,
- source: ACTIVE_DISCUSSION_SOURCE_TYPES.pin,
- },
- };
- findFirstBadge().trigger('mousedown', { clientX: position.x, clientY: position.y });
+ findFirstBadge().vm.$emit(
+ 'mousedown',
+ new MouseEvent('click', { clientX: position.x, clientY: position.y }),
+ );
await nextTick();
- findFirstBadge().trigger('mouseup', { clientX: position.x, clientY: position.y });
- expect(mutate).toHaveBeenCalledWith(mutationVariables);
+ findFirstBadge().vm.$emit(
+ 'mouseup',
+ new MouseEvent('click', { clientX: position.x, clientY: position.y }),
+ );
+ await waitForPromises();
+ expect(findFirstBadge().props('isInactive')).toBe(false);
});
});
describe('when moving notes', () => {
- it('should update badge style when note is being moved', async () => {
- createComponent({
- notes,
- });
-
- const { position } = notes[0];
-
- await clickAndDragBadge(findFirstBadge(), { x: position.x, y: position.y }, { x: 20, y: 20 });
- expect(findFirstBadge().attributes().style).toBe('left: 20px; top: 20px;');
- });
-
it('should emit `moveNote` event when note-moving action ends', async () => {
createComponent({ notes });
const note = notes[0];
const { position } = note;
const newCoordinates = { x: 20, y: 20 };
- // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
- // eslint-disable-next-line no-restricted-syntax
- wrapper.setData({
- movingNoteNewPosition: {
- ...position,
- ...newCoordinates,
- },
- movingNoteStartPosition: {
- noteId: notes[0].id,
- discussionId: notes[0].discussion.id,
- ...position,
- },
- });
-
const badge = findFirstBadge();
await clickAndDragBadge(badge, { x: position.x, y: position.y }, newCoordinates);
- badge.trigger('mouseup');
- await nextTick();
expect(wrapper.emitted('moveNote')).toEqual([
[
{
@@ -266,9 +269,10 @@ describe('Design overlay component', () => {
const badge = findAllNotes().at(0);
await clickAndDragBadge(badge, { ...mockNoteCoordinates }, { x: 20, y: 20 });
// note position should not change after a click-and-drag attempt
- expect(findFirstBadge().attributes().style).toContain(
- `left: ${mockNoteCoordinates.x}px; top: ${mockNoteCoordinates.y}px;`,
- );
+ expect(findFirstBadge().props('position')).toEqual({
+ left: `${mockNoteCoordinates.x}px`,
+ top: `${mockNoteCoordinates.y}px`,
+ });
});
});
});
@@ -282,27 +286,10 @@ describe('Design overlay component', () => {
});
expect(findCommentBadge().exists()).toBe(true);
- expect(findCommentBadge().attributes().style).toBe('left: 10px; top: 15px;');
+ expect(findCommentBadge().props('position')).toEqual({ left: '10px', top: '15px' });
});
describe('when moving the comment badge', () => {
- it('should update badge style to reflect new position', async () => {
- const { position } = notes[0];
-
- createComponent({
- currentCommentForm: {
- ...position,
- },
- });
-
- await clickAndDragBadge(
- findCommentBadge(),
- { x: position.x, y: position.y },
- { x: 20, y: 20 },
- );
- expect(findCommentBadge().attributes().style).toBe('left: 20px; top: 20px;');
- });
-
it('should update badge style when note-moving action ends', async () => {
const { position } = notes[0];
createComponent({
@@ -315,7 +302,7 @@ describe('Design overlay component', () => {
const toPoint = { x: 20, y: 20 };
await clickAndDragBadge(commentBadge, { x: position.x, y: position.y }, toPoint);
- commentBadge.trigger('mouseup');
+ commentBadge.vm.$emit('mouseup', new MouseEvent('click'));
// simulates the currentCommentForm being updated in index.vue component, and
// propagated back down to this prop
wrapper.setProps({
@@ -323,110 +310,50 @@ describe('Design overlay component', () => {
});
await nextTick();
- expect(commentBadge.attributes().style).toBe('left: 20px; top: 20px;');
+ expect(commentBadge.props('position')).toEqual({ left: '20px', top: '20px' });
});
- it.each`
- element | getElementFunc | event
- ${'overlay'} | ${() => wrapper} | ${'mouseleave'}
- ${'comment badge'} | ${findCommentBadge} | ${'mouseup'}
- `(
- 'should emit `openCommentForm` event when $event fired on $element element',
- async ({ getElementFunc, event }) => {
- createComponent({
- notes,
- currentCommentForm: {
- ...notes[0].position,
- },
- });
-
- const newCoordinates = { x: 20, y: 20 };
- // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
- // eslint-disable-next-line no-restricted-syntax
- wrapper.setData({
- movingNoteStartPosition: {
- ...notes[0].position,
- },
- movingNoteNewPosition: {
- ...notes[0].position,
- ...newCoordinates,
- },
- });
-
- getElementFunc().trigger(event);
- await nextTick();
- expect(wrapper.emitted('openCommentForm')).toEqual([[newCoordinates]]);
- },
- );
- });
- });
-
- describe('getMovingNotePositionDelta', () => {
- it('should calculate delta correctly from state', () => {
- createComponent();
+ it('should emit `openCommentForm` event when mouseleave fired on overlay element', async () => {
+ const { position } = notes[0];
+ createComponent({
+ notes,
+ currentCommentForm: {
+ ...position,
+ },
+ });
- // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
- // eslint-disable-next-line no-restricted-syntax
- wrapper.setData({
- movingNoteStartPosition: {
- clientX: 10,
- clientY: 20,
- },
- });
+ const newCoordinates = { x: 20, y: 20 };
- const mockMouseEvent = {
- clientX: 30,
- clientY: 10,
- };
+ await clickAndDragBadge(
+ findCommentBadge(),
+ { x: position.x, y: position.y },
+ newCoordinates,
+ );
- expect(wrapper.vm.getMovingNotePositionDelta(mockMouseEvent)).toEqual({
- deltaX: 20,
- deltaY: -10,
+ wrapper.trigger('mouseleave');
+ await nextTick();
+ expect(wrapper.emitted('openCommentForm')).toEqual([[newCoordinates]]);
});
- });
- });
- describe('isPositionInOverlay', () => {
- createComponent({ dimensions: mockDimensions });
-
- it.each`
- test | coordinates | expectedResult
- ${'within overlay bounds'} | ${{ x: 50, y: 50 }} | ${true}
- ${'outside overlay bounds'} | ${{ x: 101, y: 101 }} | ${false}
- `('returns [$expectedResult] when position is $test', ({ coordinates, expectedResult }) => {
- const position = { ...mockDimensions, ...coordinates };
+ it('should emit `openCommentForm` event when mouseup fired on comment badge element', async () => {
+ const { position } = notes[0];
+ createComponent({
+ notes,
+ currentCommentForm: {
+ ...position,
+ },
+ });
- expect(wrapper.vm.isPositionInOverlay(position)).toBe(expectedResult);
- });
- });
+ const newCoordinates = { x: 20, y: 20 };
- describe('getNoteRelativePosition', () => {
- it('calculates position correctly', () => {
- createComponent({ dimensions: mockDimensions });
- const position = { x: 50, y: 50, width: 200, height: 200 };
+ await clickAndDragBadge(
+ findCommentBadge(),
+ { x: position.x, y: position.y },
+ newCoordinates,
+ );
- expect(wrapper.vm.getNoteRelativePosition(position)).toEqual({ left: 25, top: 25 });
+ expect(wrapper.emitted('openCommentForm')).toEqual([[newCoordinates]]);
+ });
});
});
-
- describe('canMoveNote', () => {
- it.each`
- repositionNotePermission | canMoveNoteResult
- ${true} | ${true}
- ${false} | ${false}
- ${undefined} | ${false}
- `(
- 'returns [$canMoveNoteResult] when [repositionNote permission] is [$repositionNotePermission]',
- ({ repositionNotePermission, canMoveNoteResult }) => {
- createComponent();
-
- const note = {
- userPermissions: {
- repositionNote: repositionNotePermission,
- },
- };
- expect(wrapper.vm.canMoveNote(note)).toBe(canMoveNoteResult);
- },
- );
- });
});
diff --git a/spec/frontend/diffs/components/app_spec.js b/spec/frontend/diffs/components/app_spec.js
index c8be0bedb4c..513e67ea247 100644
--- a/spec/frontend/diffs/components/app_spec.js
+++ b/spec/frontend/diffs/components/app_spec.js
@@ -17,6 +17,7 @@ import CollapsedFilesWarning from '~/diffs/components/collapsed_files_warning.vu
import HiddenFilesWarning from '~/diffs/components/hidden_files_warning.vue';
import axios from '~/lib/utils/axios_utils';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import * as urlUtils from '~/lib/utils/url_utility';
import { stubPerformanceWebAPI } from 'helpers/performance';
import createDiffsStore from '../create_diffs_store';
@@ -87,7 +88,7 @@ describe('diffs/components/app', () => {
};
window.mrTabs.expandViewContainer = jest.fn();
mock = new MockAdapter(axios);
- mock.onGet(TEST_ENDPOINT).reply(200, {});
+ mock.onGet(TEST_ENDPOINT).reply(HTTP_STATUS_OK, {});
});
afterEach(() => {
diff --git a/spec/frontend/editor/source_editor_markdown_livepreview_ext_spec.js b/spec/frontend/editor/source_editor_markdown_livepreview_ext_spec.js
index 0058712ace6..c42ac28c498 100644
--- a/spec/frontend/editor/source_editor_markdown_livepreview_ext_spec.js
+++ b/spec/frontend/editor/source_editor_markdown_livepreview_ext_spec.js
@@ -14,7 +14,7 @@ import { EditorMarkdownPreviewExtension } from '~/editor/extensions/source_edito
import SourceEditor from '~/editor/source_editor';
import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
-import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status';
+import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import syntaxHighlight from '~/syntax_highlight';
import { spyOnApi } from './helpers';
@@ -155,7 +155,7 @@ describe('Markdown Live Preview Extension for Source Editor', () => {
describe('onBeforeUnuse', () => {
beforeEach(async () => {
- mockAxios.onPost().reply(200, { body: responseData });
+ mockAxios.onPost().reply(HTTP_STATUS_OK, { body: responseData });
await togglePreview();
});
afterEach(() => {
@@ -261,7 +261,9 @@ describe('Markdown Live Preview Extension for Source Editor', () => {
let previewMarkdownSpy;
beforeEach(() => {
- previewMarkdownSpy = jest.fn().mockImplementation(() => [200, { body: responseData }]);
+ previewMarkdownSpy = jest
+ .fn()
+ .mockImplementation(() => [HTTP_STATUS_OK, { body: responseData }]);
mockAxios.onPost(previewMarkdownPath).replyOnce((req) => previewMarkdownSpy(req));
});
@@ -322,7 +324,7 @@ describe('Markdown Live Preview Extension for Source Editor', () => {
describe('togglePreview', () => {
beforeEach(() => {
- mockAxios.onPost().reply(200, { body: responseData });
+ mockAxios.onPost().reply(HTTP_STATUS_OK, { body: responseData });
});
it('toggles the condition to toggle preview/hide actions in the context menu', () => {
diff --git a/spec/frontend/emoji/awards_app/store/actions_spec.js b/spec/frontend/emoji/awards_app/store/actions_spec.js
index fa83a0f6ce2..3e9b49707ed 100644
--- a/spec/frontend/emoji/awards_app/store/actions_spec.js
+++ b/spec/frontend/emoji/awards_app/store/actions_spec.js
@@ -42,10 +42,10 @@ describe('Awards app actions', () => {
window.gon = { relative_url_root: relativeRootUrl };
mock
.onGet(`${relativeRootUrl || ''}/awards`, { params: { per_page: 100, page: '1' } })
- .reply(200, ['thumbsup'], { 'x-next-page': '2' });
+ .reply(HTTP_STATUS_OK, ['thumbsup'], { 'x-next-page': '2' });
mock
.onGet(`${relativeRootUrl || ''}/awards`, { params: { per_page: 100, page: '2' } })
- .reply(200, ['thumbsdown']);
+ .reply(HTTP_STATUS_OK, ['thumbsdown']);
});
it('commits FETCH_AWARDS_SUCCESS', async () => {
@@ -116,7 +116,7 @@ describe('Awards app actions', () => {
describe('adding new award', () => {
describe('success', () => {
beforeEach(() => {
- mock.onPost(`${relativeRootUrl || ''}/awards`).reply(200, { id: 1 });
+ mock.onPost(`${relativeRootUrl || ''}/awards`).reply(HTTP_STATUS_OK, { id: 1 });
});
it('adds an optimistic award, removes it, and then commits ADD_NEW_AWARD', async () => {
diff --git a/spec/frontend/error_tracking/store/actions_spec.js b/spec/frontend/error_tracking/store/actions_spec.js
index 6e6fc6f3351..3ec43010d80 100644
--- a/spec/frontend/error_tracking/store/actions_spec.js
+++ b/spec/frontend/error_tracking/store/actions_spec.js
@@ -4,7 +4,7 @@ import * as actions from '~/error_tracking/store/actions';
import * as types from '~/error_tracking/store/mutation_types';
import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
-import { HTTP_STATUS_BAD_REQUEST } from '~/lib/utils/http_status';
+import { HTTP_STATUS_BAD_REQUEST, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { visitUrl } from '~/lib/utils/url_utility';
jest.mock('~/flash.js');
@@ -30,7 +30,7 @@ describe('Sentry common store actions', () => {
describe('updateStatus', () => {
it('should handle successful status update', async () => {
- mock.onPut().reply(200, {});
+ mock.onPut().reply(HTTP_STATUS_OK, {});
await testAction(
actions.updateStatus,
params,
diff --git a/spec/frontend/error_tracking/store/details/actions_spec.js b/spec/frontend/error_tracking/store/details/actions_spec.js
index 27ab314564f..383d8aaeb20 100644
--- a/spec/frontend/error_tracking/store/details/actions_spec.js
+++ b/spec/frontend/error_tracking/store/details/actions_spec.js
@@ -4,7 +4,11 @@ import * as actions from '~/error_tracking/store/details/actions';
import * as types from '~/error_tracking/store/details/mutation_types';
import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
-import { HTTP_STATUS_BAD_REQUEST, HTTP_STATUS_NO_CONTENT } from '~/lib/utils/http_status';
+import {
+ HTTP_STATUS_BAD_REQUEST,
+ HTTP_STATUS_NO_CONTENT,
+ HTTP_STATUS_OK,
+} from '~/lib/utils/http_status';
import Poll from '~/lib/utils/poll';
let mockedAdapter;
@@ -31,7 +35,7 @@ describe('Sentry error details store actions', () => {
const endpoint = '123/stacktrace';
it('should commit SET_ERROR with received response', () => {
const payload = { error: [1, 2, 3] };
- mockedAdapter.onGet().reply(200, payload);
+ mockedAdapter.onGet().reply(HTTP_STATUS_OK, payload);
return testAction(
actions.startPollingStacktrace,
{ endpoint },
diff --git a/spec/frontend/feature_flags/components/new_environments_dropdown_spec.js b/spec/frontend/feature_flags/components/new_environments_dropdown_spec.js
index b71cdf78207..14e1f34bc59 100644
--- a/spec/frontend/feature_flags/components/new_environments_dropdown_spec.js
+++ b/spec/frontend/feature_flags/components/new_environments_dropdown_spec.js
@@ -49,7 +49,7 @@ describe('New Environments Dropdown', () => {
describe('with empty results', () => {
let item;
beforeEach(async () => {
- axiosMock.onGet(TEST_HOST).reply(200, []);
+ axiosMock.onGet(TEST_HOST).reply(HTTP_STATUS_OK, []);
wrapper.findComponent(GlSearchBoxByType).vm.$emit('focus');
wrapper.findComponent(GlSearchBoxByType).vm.$emit('input', TEST_SEARCH);
await axios.waitForAll();
diff --git a/spec/frontend/ide/components/repo_editor_spec.js b/spec/frontend/ide/components/repo_editor_spec.js
index 9092d73571b..c9f033bffbb 100644
--- a/spec/frontend/ide/components/repo_editor_spec.js
+++ b/spec/frontend/ide/components/repo_editor_spec.js
@@ -22,6 +22,7 @@ import ModelManager from '~/ide/lib/common/model_manager';
import service from '~/ide/services';
import { createStoreOptions } from '~/ide/stores';
import axios from '~/lib/utils/axios_utils';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import ContentViewer from '~/vue_shared/components/content_viewer/content_viewer.vue';
import SourceEditorInstance from '~/editor/source_editor_instance';
import { file } from '../helpers';
@@ -265,7 +266,7 @@ describe('RepoEditor', () => {
mock = new MockAdapter(axios);
- mock.onPost(/(.*)\/preview_markdown/).reply(200, {
+ mock.onPost(/(.*)\/preview_markdown/).reply(HTTP_STATUS_OK, {
body: `<p>${dummyFile.text.content}</p>`,
});
});
diff --git a/spec/frontend/ide/stores/modules/commit/actions_spec.js b/spec/frontend/ide/stores/modules/commit/actions_spec.js
index 8601e13f7ca..4068a9d0919 100644
--- a/spec/frontend/ide/stores/modules/commit/actions_spec.js
+++ b/spec/frontend/ide/stores/modules/commit/actions_spec.js
@@ -14,6 +14,7 @@ import {
COMMIT_TO_NEW_BRANCH,
} from '~/ide/stores/modules/commit/constants';
import * as mutationTypes from '~/ide/stores/modules/commit/mutation_types';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { visitUrl } from '~/lib/utils/url_utility';
jest.mock('~/lib/utils/url_utility', () => ({
@@ -48,7 +49,7 @@ describe('IDE commit module actions', () => {
mock
.onGet('/api/v1/projects/abcproject/repository/branches/main')
- .reply(200, { commit: COMMIT_RESPONSE });
+ .reply(HTTP_STATUS_OK, { commit: COMMIT_RESPONSE });
});
afterEach(() => {
diff --git a/spec/frontend/ide/stores/modules/terminal/actions/session_status_spec.js b/spec/frontend/ide/stores/modules/terminal/actions/session_status_spec.js
index 3928f209e96..9616733f052 100644
--- a/spec/frontend/ide/stores/modules/terminal/actions/session_status_spec.js
+++ b/spec/frontend/ide/stores/modules/terminal/actions/session_status_spec.js
@@ -6,7 +6,7 @@ import { PENDING, RUNNING, STOPPING, STOPPED } from '~/ide/stores/modules/termin
import * as messages from '~/ide/stores/modules/terminal/messages';
import * as mutationTypes from '~/ide/stores/modules/terminal/mutation_types';
import axios from '~/lib/utils/axios_utils';
-import { HTTP_STATUS_BAD_REQUEST } from '~/lib/utils/http_status';
+import { HTTP_STATUS_BAD_REQUEST, HTTP_STATUS_OK } from '~/lib/utils/http_status';
jest.mock('~/flash');
@@ -146,7 +146,7 @@ describe('IDE store terminal session controls actions', () => {
});
it('dispatches success on success', () => {
- mock.onGet(state.session.showPath).reply(200, TEST_SESSION);
+ mock.onGet(state.session.showPath).reply(HTTP_STATUS_OK, TEST_SESSION);
return testAction(
actions.fetchSessionStatus,
diff --git a/spec/frontend/import_entities/import_groups/services/status_poller_spec.js b/spec/frontend/import_entities/import_groups/services/status_poller_spec.js
index 13d2a95ca14..4a1b85d24e3 100644
--- a/spec/frontend/import_entities/import_groups/services/status_poller_spec.js
+++ b/spec/frontend/import_entities/import_groups/services/status_poller_spec.js
@@ -4,6 +4,7 @@ import { createAlert } from '~/flash';
import { STATUSES } from '~/import_entities/constants';
import { StatusPoller } from '~/import_entities/import_groups/services/status_poller';
import axios from '~/lib/utils/axios_utils';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import Poll from '~/lib/utils/poll';
jest.mock('visibilityjs');
@@ -21,7 +22,7 @@ describe('Bulk import status poller', () => {
beforeEach(() => {
mockAdapter = new MockAdapter(axios);
- mockAdapter.onGet(FAKE_POLL_PATH).reply(200, {});
+ mockAdapter.onGet(FAKE_POLL_PATH).reply(HTTP_STATUS_OK, {});
updateImportStatus = jest.fn();
poller = new StatusPoller({ updateImportStatus, pollPath: FAKE_POLL_PATH });
});
diff --git a/spec/frontend/issues/issue_spec.js b/spec/frontend/issues/issue_spec.js
index 089ea8dbbad..f04e766a78c 100644
--- a/spec/frontend/issues/issue_spec.js
+++ b/spec/frontend/issues/issue_spec.js
@@ -4,6 +4,7 @@ import { loadHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import { EVENT_ISSUABLE_VUE_APP_CHANGE } from '~/issuable/constants';
import Issue from '~/issues/issue';
import axios from '~/lib/utils/axios_utils';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
describe('Issue', () => {
let testContext;
@@ -11,7 +12,7 @@ describe('Issue', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
- mock.onGet(/(.*)\/related_branches$/).reply(200, {});
+ mock.onGet(/(.*)\/related_branches$/).reply(HTTP_STATUS_OK, {});
testContext = {};
testContext.issue = new Issue();
diff --git a/spec/frontend/issues/related_merge_requests/components/related_merge_requests_spec.js b/spec/frontend/issues/related_merge_requests/components/related_merge_requests_spec.js
index 8413b8463c1..010c719bd84 100644
--- a/spec/frontend/issues/related_merge_requests/components/related_merge_requests_spec.js
+++ b/spec/frontend/issues/related_merge_requests/components/related_merge_requests_spec.js
@@ -2,6 +2,7 @@ import { shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import mockData from 'test_fixtures/issues/related_merge_requests.json';
import axios from '~/lib/utils/axios_utils';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import RelatedMergeRequests from '~/issues/related_merge_requests/components/related_merge_requests.vue';
import createStore from '~/issues/related_merge_requests/store/index';
import RelatedIssuableItem from '~/issuable/components/related_issuable_item.vue';
@@ -18,7 +19,7 @@ describe('RelatedMergeRequests', () => {
document.getElementById('js-issuable-app').dataset.initial = JSON.stringify(mockData);
mock = new MockAdapter(axios);
- mock.onGet(`${API_ENDPOINT}?per_page=100`).reply(200, mockData, { 'x-total': 2 });
+ mock.onGet(`${API_ENDPOINT}?per_page=100`).reply(HTTP_STATUS_OK, mockData, { 'x-total': 2 });
wrapper = shallowMount(RelatedMergeRequests, {
store: createStore(),
diff --git a/spec/frontend/labels/components/promote_label_modal_spec.js b/spec/frontend/labels/components/promote_label_modal_spec.js
index 9dd6f1ca64c..97913c20229 100644
--- a/spec/frontend/labels/components/promote_label_modal_spec.js
+++ b/spec/frontend/labels/components/promote_label_modal_spec.js
@@ -6,7 +6,7 @@ import { TEST_HOST } from 'helpers/test_constants';
import { stubComponent } from 'helpers/stub_component';
import axios from '~/lib/utils/axios_utils';
-import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status';
+import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import PromoteLabelModal from '~/labels/components/promote_label_modal.vue';
import eventHub from '~/labels/event_hub';
@@ -67,7 +67,7 @@ describe('Promote label modal', () => {
it('redirects when a label is promoted', async () => {
const responseURL = `${TEST_HOST}/dummy/endpoint`;
- axiosMock.onPost(labelMockData.url).reply(200, { url: responseURL });
+ axiosMock.onPost(labelMockData.url).reply(HTTP_STATUS_OK, { url: responseURL });
wrapper.findComponent(GlModal).vm.$emit('primary');
diff --git a/spec/frontend/lib/utils/icon_utils_spec.js b/spec/frontend/lib/utils/icon_utils_spec.js
index f32c21fbb4f..59839862504 100644
--- a/spec/frontend/lib/utils/icon_utils_spec.js
+++ b/spec/frontend/lib/utils/icon_utils_spec.js
@@ -1,6 +1,6 @@
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
-import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status';
+import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { clearSvgIconPathContentCache, getSvgIconPathContent } from '~/lib/utils/icon_utils';
describe('Icon utils', () => {
@@ -31,7 +31,7 @@ describe('Icon utils', () => {
describe('when the icons can be loaded', () => {
beforeEach(() => {
- axiosMock.onGet(gon.sprite_icons).reply(200, mockIcons);
+ axiosMock.onGet(gon.sprite_icons).reply(HTTP_STATUS_OK, mockIcons);
});
it('extracts svg icon path content from sprite icons', () => {
@@ -55,7 +55,7 @@ describe('Icon utils', () => {
.onGet(gon.sprite_icons)
.replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR)
.onGet(gon.sprite_icons)
- .reply(200, mockIcons);
+ .reply(HTTP_STATUS_OK, mockIcons);
});
it('returns null', () => {
diff --git a/spec/frontend/lib/utils/poll_spec.js b/spec/frontend/lib/utils/poll_spec.js
index 0e6c08bf033..63eeb54e850 100644
--- a/spec/frontend/lib/utils/poll_spec.js
+++ b/spec/frontend/lib/utils/poll_spec.js
@@ -1,5 +1,9 @@
import waitForPromises from 'helpers/wait_for_promises';
-import { HTTP_STATUS_INTERNAL_SERVER_ERROR, successCodes } from '~/lib/utils/http_status';
+import {
+ HTTP_STATUS_INTERNAL_SERVER_ERROR,
+ HTTP_STATUS_OK,
+ successCodes,
+} from '~/lib/utils/http_status';
import Poll from '~/lib/utils/poll';
describe('Poll', () => {
@@ -51,7 +55,7 @@ describe('Poll', () => {
});
it('calls the success callback when no header for interval is provided', () => {
- mockServiceCall({ status: 200 });
+ mockServiceCall({ status: HTTP_STATUS_OK });
setup();
return waitForAllCallsToFinish(1, () => {
@@ -82,7 +86,7 @@ describe('Poll', () => {
});
it('should call the success callback when the interval header is -1', () => {
- mockServiceCall({ status: 200, headers: { 'poll-interval': -1 } });
+ mockServiceCall({ status: HTTP_STATUS_OK, headers: { 'poll-interval': -1 } });
return setup().then(() => {
expect(callbacks.success).toHaveBeenCalled();
expect(callbacks.error).not.toHaveBeenCalled();
@@ -118,7 +122,7 @@ describe('Poll', () => {
describe('with delayed initial request', () => {
it('delays the first request', async () => {
- mockServiceCall({ status: 200, headers: { 'poll-interval': 1 } });
+ mockServiceCall({ status: HTTP_STATUS_OK, headers: { 'poll-interval': 1 } });
const Polling = new Poll({
resource: service,
@@ -147,7 +151,7 @@ describe('Poll', () => {
describe('stop', () => {
it('stops polling when method is called', () => {
- mockServiceCall({ status: 200, headers: { 'poll-interval': 1 } });
+ mockServiceCall({ status: HTTP_STATUS_OK, headers: { 'poll-interval': 1 } });
const Polling = new Poll({
resource: service,
@@ -173,7 +177,7 @@ describe('Poll', () => {
describe('enable', () => {
it('should enable polling upon a response', () => {
- mockServiceCall({ status: 200 });
+ mockServiceCall({ status: HTTP_STATUS_OK });
const Polling = new Poll({
resource: service,
method: 'fetch',
@@ -183,7 +187,7 @@ describe('Poll', () => {
Polling.enable({
data: { page: 4 },
- response: { status: 200, headers: { 'poll-interval': 1 } },
+ response: { status: HTTP_STATUS_OK, headers: { 'poll-interval': 1 } },
});
return waitForAllCallsToFinish(1, () => {
@@ -198,7 +202,7 @@ describe('Poll', () => {
describe('restart', () => {
it('should restart polling when its called', () => {
- mockServiceCall({ status: 200, headers: { 'poll-interval': 1 } });
+ mockServiceCall({ status: HTTP_STATUS_OK, headers: { 'poll-interval': 1 } });
const Polling = new Poll({
resource: service,
diff --git a/spec/frontend/lib/utils/select2_utils_spec.js b/spec/frontend/lib/utils/select2_utils_spec.js
index 6d601dd5ad1..f8a87316655 100644
--- a/spec/frontend/lib/utils/select2_utils_spec.js
+++ b/spec/frontend/lib/utils/select2_utils_spec.js
@@ -3,6 +3,7 @@ import $ from 'jquery';
import { setHTMLFixture } from 'helpers/fixtures';
import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { select2AxiosTransport } from '~/lib/utils/select2_utils';
import 'select2/select2';
@@ -85,7 +86,7 @@ describe('lib/utils/select2_utils', () => {
`(
'passes results and pagination to results callback, with headers=$headers',
async ({ headers, pagination }) => {
- mock.onGet(TEST_URL).reply(200, TEST_DATA, headers);
+ mock.onGet(TEST_URL).reply(HTTP_STATUS_OK, TEST_DATA, headers);
await setupSelect2AndSearch();
diff --git a/spec/frontend/merge_requests/components/compare_dropdown_spec.js b/spec/frontend/merge_requests/components/compare_dropdown_spec.js
index fecb82aaa5e..ab5c315816c 100644
--- a/spec/frontend/merge_requests/components/compare_dropdown_spec.js
+++ b/spec/frontend/merge_requests/components/compare_dropdown_spec.js
@@ -3,6 +3,7 @@ import { GlCollapsibleListbox } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import CompareDropdown from '~/merge_requests/components/compare_dropdown.vue';
let wrapper;
@@ -27,7 +28,7 @@ const findDropdown = () => wrapper.findComponent(GlCollapsibleListbox);
describe('Merge requests compare dropdown component', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
- mock.onGet('/gitlab-org/gitlab/target_projects').reply(200, [
+ mock.onGet('/gitlab-org/gitlab/target_projects').reply(HTTP_STATUS_OK, [
{
id: 10,
name: 'Gitlab Test',
diff --git a/spec/frontend/mr_notes/stores/actions_spec.js b/spec/frontend/mr_notes/stores/actions_spec.js
index 03d87ef43bf..ae30ed1f0b3 100644
--- a/spec/frontend/mr_notes/stores/actions_spec.js
+++ b/spec/frontend/mr_notes/stores/actions_spec.js
@@ -1,6 +1,6 @@
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
-import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status';
+import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { createStore } from '~/mr_notes/stores';
describe('MR Notes Mutator Actions', () => {
@@ -31,7 +31,7 @@ describe('MR Notes Mutator Actions', () => {
mock = new MockAdapter(axios);
- mock.onGet(metadata).reply(200, mrMetadata);
+ mock.onGet(metadata).reply(HTTP_STATUS_OK, mrMetadata);
});
afterEach(() => {
diff --git a/spec/frontend/notes/components/discussion_filter_spec.js b/spec/frontend/notes/components/discussion_filter_spec.js
index ed9fc47540d..ed1ced1b3d1 100644
--- a/spec/frontend/notes/components/discussion_filter_spec.js
+++ b/spec/frontend/notes/components/discussion_filter_spec.js
@@ -7,6 +7,7 @@ import { TEST_HOST } from 'helpers/test_constants';
import createEventHub from '~/helpers/event_hub_factory';
import axios from '~/lib/utils/axios_utils';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import DiscussionFilter from '~/notes/components/discussion_filter.vue';
import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
import Tracking from '~/tracking';
@@ -74,7 +75,7 @@ describe('DiscussionFilter component', () => {
// We are mocking the discussions retrieval,
// as it doesn't matter for our tests here
- mock.onGet(DISCUSSION_PATH).reply(200, '');
+ mock.onGet(DISCUSSION_PATH).reply(HTTP_STATUS_OK, '');
window.mrTabs = undefined;
wrapper = mountComponent();
jest.spyOn(Tracking, 'event');
diff --git a/spec/frontend/notes/components/note_awards_list_spec.js b/spec/frontend/notes/components/note_awards_list_spec.js
index 9fc89ffa473..89ac0216f41 100644
--- a/spec/frontend/notes/components/note_awards_list_spec.js
+++ b/spec/frontend/notes/components/note_awards_list_spec.js
@@ -2,6 +2,7 @@ import AxiosMockAdapter from 'axios-mock-adapter';
import Vue from 'vue';
import { TEST_HOST } from 'helpers/test_constants';
import axios from '~/lib/utils/axios_utils';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import awardsNote from '~/notes/components/note_awards_list.vue';
import createStore from '~/notes/stores';
import { noteableDataMock, notesDataMock } from '../mock_data';
@@ -17,7 +18,7 @@ describe('note_awards_list component', () => {
beforeEach(() => {
mock = new AxiosMockAdapter(axios);
- mock.onPost(toggleAwardPath).reply(200, '');
+ mock.onPost(toggleAwardPath).reply(HTTP_STATUS_OK, '');
const Component = Vue.extend(awardsNote);
diff --git a/spec/frontend/notes/components/notes_app_spec.js b/spec/frontend/notes/components/notes_app_spec.js
index 0c3d0da4f0f..b08a22f8674 100644
--- a/spec/frontend/notes/components/notes_app_spec.js
+++ b/spec/frontend/notes/components/notes_app_spec.js
@@ -7,6 +7,7 @@ import waitForPromises from 'helpers/wait_for_promises';
import DraftNote from '~/batch_comments/components/draft_note.vue';
import batchComments from '~/batch_comments/stores/modules/batch_comments';
import axios from '~/lib/utils/axios_utils';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { getLocationHash } from '~/lib/utils/url_utility';
import * as urlUtility from '~/lib/utils/url_utility';
import CommentForm from '~/notes/components/comment_form.vue';
@@ -286,7 +287,7 @@ describe('note_app', () => {
describe('emoji awards', () => {
beforeEach(() => {
- axiosMock.onAny().reply(200, []);
+ axiosMock.onAny().reply(HTTP_STATUS_OK, []);
wrapper = mountComponent();
return waitForPromises();
});
diff --git a/spec/frontend/pages/import/history/components/import_error_details_spec.js b/spec/frontend/pages/import/history/components/import_error_details_spec.js
index 82a3e11186e..628ee8d7999 100644
--- a/spec/frontend/pages/import/history/components/import_error_details_spec.js
+++ b/spec/frontend/pages/import/history/components/import_error_details_spec.js
@@ -2,6 +2,7 @@ import { GlLoadingIcon } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import ImportErrorDetails from '~/pages/import/history/components/import_error_details.vue';
describe('ImportErrorDetails', () => {
@@ -46,7 +47,7 @@ describe('ImportErrorDetails', () => {
it('renders import_error if it is available', async () => {
const FAKE_IMPORT_ERROR = 'IMPORT ERROR';
- mock.onGet(API_URL).reply(200, { import_error: FAKE_IMPORT_ERROR });
+ mock.onGet(API_URL).reply(HTTP_STATUS_OK, { import_error: FAKE_IMPORT_ERROR });
createComponent();
await axios.waitForAll();
@@ -55,7 +56,7 @@ describe('ImportErrorDetails', () => {
});
it('renders default text if error is not available', async () => {
- mock.onGet(API_URL).reply(200, { import_error: null });
+ mock.onGet(API_URL).reply(HTTP_STATUS_OK, { import_error: null });
createComponent();
await axios.waitForAll();
diff --git a/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js b/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js
index 5f5303a5339..99bccd21656 100644
--- a/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js
+++ b/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js
@@ -10,6 +10,7 @@ import waitForPromises from 'helpers/wait_for_promises';
import getPipelineDetails from 'shared_queries/pipelines/get_pipeline_details.query.graphql';
import getUserCallouts from '~/graphql_shared/queries/get_user_callouts.query.graphql';
import axios from '~/lib/utils/axios_utils';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import {
PIPELINES_DETAIL_LINK_DURATION,
PIPELINES_DETAIL_LINKS_TOTAL,
@@ -546,7 +547,7 @@ describe('Pipeline graph wrapper', () => {
describe('with duration and no error', () => {
beforeEach(async () => {
mock = new MockAdapter(axios);
- mock.onPost(metricsPath).reply(200, {});
+ mock.onPost(metricsPath).reply(HTTP_STATUS_OK, {});
jest.spyOn(window.performance, 'getEntriesByName').mockImplementation(() => {
return [{ duration }];
diff --git a/spec/frontend/projects/commit_box/info/load_branches_spec.js b/spec/frontend/projects/commit_box/info/load_branches_spec.js
index 50e305dea7f..e49d92188ed 100644
--- a/spec/frontend/projects/commit_box/info/load_branches_spec.js
+++ b/spec/frontend/projects/commit_box/info/load_branches_spec.js
@@ -2,7 +2,7 @@ import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import { setHTMLFixture } from 'helpers/fixtures';
import waitForPromises from 'helpers/wait_for_promises';
-import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status';
+import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { loadBranches } from '~/projects/commit_box/info/load_branches';
const mockCommitPath = '/commit/abcd/branches';
@@ -23,7 +23,7 @@ describe('~/projects/commit_box/info/load_branches', () => {
</div>`);
mock = new MockAdapter(axios);
- mock.onGet(mockCommitPath).reply(200, mockBranchesRes);
+ mock.onGet(mockCommitPath).reply(HTTP_STATUS_OK, mockBranchesRes);
});
it('loads and renders branches info', async () => {
@@ -46,7 +46,7 @@ describe('~/projects/commit_box/info/load_branches', () => {
beforeEach(() => {
mock
.onGet(mockCommitPath)
- .reply(200, '<a onload="alert(\'xss!\');" href="/-/commits/main">main</a>');
+ .reply(HTTP_STATUS_OK, '<a onload="alert(\'xss!\');" href="/-/commits/main">main</a>');
});
it('displays sanitized html', async () => {
diff --git a/spec/frontend/prometheus_metrics/custom_metrics_spec.js b/spec/frontend/prometheus_metrics/custom_metrics_spec.js
index a079b0b97fd..3852f2678b7 100644
--- a/spec/frontend/prometheus_metrics/custom_metrics_spec.js
+++ b/spec/frontend/prometheus_metrics/custom_metrics_spec.js
@@ -1,6 +1,7 @@
import MockAdapter from 'axios-mock-adapter';
import { loadHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import axios from '~/lib/utils/axios_utils';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import PANEL_STATE from '~/prometheus_metrics/constants';
import CustomMetrics from '~/prometheus_metrics/custom_metrics';
import { metrics1 as metrics } from './mock_data';
@@ -13,7 +14,7 @@ describe('PrometheusMetrics', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
- mock.onGet(customMetricsEndpoint).reply(200, {
+ mock.onGet(customMetricsEndpoint).reply(HTTP_STATUS_OK, {
metrics,
});
loadHTMLFixture(FIXTURE);
diff --git a/spec/frontend/prometheus_metrics/prometheus_metrics_spec.js b/spec/frontend/prometheus_metrics/prometheus_metrics_spec.js
index a65cbe1a47a..45654d6a2eb 100644
--- a/spec/frontend/prometheus_metrics/prometheus_metrics_spec.js
+++ b/spec/frontend/prometheus_metrics/prometheus_metrics_spec.js
@@ -2,6 +2,7 @@ import MockAdapter from 'axios-mock-adapter';
import { loadHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import PANEL_STATE from '~/prometheus_metrics/constants';
import PrometheusMetrics from '~/prometheus_metrics/prometheus_metrics';
import { metrics2 as metrics, missingVarMetrics } from './mock_data';
@@ -116,7 +117,7 @@ describe('PrometheusMetrics', () => {
let mock;
function mockSuccess() {
- mock.onGet(prometheusMetrics.activeMetricsEndpoint).reply(200, {
+ mock.onGet(prometheusMetrics.activeMetricsEndpoint).reply(HTTP_STATUS_OK, {
data: metrics,
success: true,
});
diff --git a/spec/frontend/releases/components/app_edit_new_spec.js b/spec/frontend/releases/components/app_edit_new_spec.js
index d7d8e634569..bd61e4537f9 100644
--- a/spec/frontend/releases/components/app_edit_new_spec.js
+++ b/spec/frontend/releases/components/app_edit_new_spec.js
@@ -5,6 +5,7 @@ import Vuex from 'vuex';
import { nextTick } from 'vue';
import { GlDatepicker, GlFormCheckbox } from '@gitlab/ui';
import originalOneReleaseForEditingQueryResponse from 'test_fixtures/graphql/releases/graphql/queries/one_release_for_editing.query.graphql.json';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { convertOneReleaseGraphQLResponse } from '~/releases/util';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import setWindowLocation from 'helpers/set_window_location_helper';
@@ -95,7 +96,7 @@ describe('Release edit/new component', () => {
mock = new MockAdapter(axios);
gon.api_version = 'v4';
- mock.onGet('/api/v4/projects/8/milestones').reply(200, originalMilestones);
+ mock.onGet('/api/v4/projects/8/milestones').reply(HTTP_STATUS_OK, originalMilestones);
release = convertOneReleaseGraphQLResponse(originalOneReleaseForEditingQueryResponse).data;
});
diff --git a/spec/frontend/repository/log_tree_spec.js b/spec/frontend/repository/log_tree_spec.js
index c1309539b6d..a2e86c86add 100644
--- a/spec/frontend/repository/log_tree_spec.js
+++ b/spec/frontend/repository/log_tree_spec.js
@@ -1,6 +1,7 @@
import MockAdapter from 'axios-mock-adapter';
import { createMockClient } from 'helpers/mock_apollo_helper';
import axios from '~/lib/utils/axios_utils';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { resolveCommit, fetchLogsTree } from '~/repository/log_tree';
import commitsQuery from '~/repository/queries/commits.query.graphql';
import projectPathQuery from '~/repository/queries/project_path.query.graphql';
@@ -47,7 +48,7 @@ describe('fetchLogsTree', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
- mock.onGet(/(.*)/).reply(200, mockData, {});
+ mock.onGet(/(.*)/).reply(HTTP_STATUS_OK, mockData, {});
jest.spyOn(axios, 'get');
diff --git a/spec/frontend/snippets/components/snippet_blob_edit_spec.js b/spec/frontend/snippets/components/snippet_blob_edit_spec.js
index ad5fca760f4..82c4a37ccc9 100644
--- a/spec/frontend/snippets/components/snippet_blob_edit_spec.js
+++ b/spec/frontend/snippets/components/snippet_blob_edit_spec.js
@@ -6,7 +6,7 @@ import waitForPromises from 'helpers/wait_for_promises';
import BlobHeaderEdit from '~/blob/components/blob_edit_header.vue';
import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
-import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status';
+import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { joinPaths } from '~/lib/utils/url_utility';
import SnippetBlobEdit from '~/snippets/components/snippet_blob_edit.vue';
import SourceEditor from '~/vue_shared/components/source_editor.vue';
@@ -58,7 +58,7 @@ describe('Snippet Blob Edit component', () => {
beforeEach(() => {
axiosMock = new AxiosMockAdapter(axios);
- axiosMock.onGet(TEST_FULL_PATH).reply(200, TEST_CONTENT);
+ axiosMock.onGet(TEST_FULL_PATH).reply(HTTP_STATUS_OK, TEST_CONTENT);
});
afterEach(() => {
@@ -104,7 +104,7 @@ describe('Snippet Blob Edit component', () => {
describe('with unloaded blob and JSON content', () => {
beforeEach(() => {
- axiosMock.onGet(TEST_FULL_PATH).reply(200, TEST_JSON_CONTENT);
+ axiosMock.onGet(TEST_FULL_PATH).reply(HTTP_STATUS_OK, TEST_JSON_CONTENT);
createComponent();
});
diff --git a/spec/frontend/users_select/test_helper.js b/spec/frontend/users_select/test_helper.js
index 9231e38ea90..6fb3436100f 100644
--- a/spec/frontend/users_select/test_helper.js
+++ b/spec/frontend/users_select/test_helper.js
@@ -4,6 +4,7 @@ import usersFixture from 'test_fixtures/autocomplete/users.json';
import { getFixture } from 'helpers/fixtures';
import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import UsersSelect from '~/users_select';
// fixtures -------------------------------------------------------------------
@@ -31,7 +32,7 @@ export const createTestContext = ({ fixturePath }) => {
document.body.appendChild(rootEl);
mock = new MockAdapter(axios);
- mock.onGet('/-/autocomplete/users.json').reply(200, cloneDeep(getUsersFixture()));
+ mock.onGet('/-/autocomplete/users.json').reply(HTTP_STATUS_OK, cloneDeep(getUsersFixture()));
};
const teardown = () => {
diff --git a/spec/frontend/vue_merge_request_widget/components/artifacts_list_app_spec.js b/spec/frontend/vue_merge_request_widget/components/artifacts_list_app_spec.js
index f993b82026d..52e2393bf05 100644
--- a/spec/frontend/vue_merge_request_widget/components/artifacts_list_app_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/artifacts_list_app_spec.js
@@ -5,7 +5,7 @@ import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
import { TEST_HOST as FAKE_ENDPOINT } from 'helpers/test_constants';
import axios from '~/lib/utils/axios_utils';
-import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status';
+import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import ArtifactsListApp from '~/vue_merge_request_widget/components/artifacts_list_app.vue';
import { getStoreConfig } from '~/vue_merge_request_widget/stores/artifacts_list';
import { artifacts } from '../mock_data';
@@ -79,10 +79,10 @@ describe('Merge Requests Artifacts list app', () => {
describe('with results', () => {
beforeEach(() => {
createComponent();
- mock.onGet(FAKE_ENDPOINT).reply(200, artifacts, {});
+ mock.onGet(FAKE_ENDPOINT).reply(HTTP_STATUS_OK, artifacts, {});
store.dispatch('receiveArtifactsSuccess', {
data: artifacts,
- status: 200,
+ status: HTTP_STATUS_OK,
});
return nextTick();
});
diff --git a/spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_container_spec.js b/spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_container_spec.js
index c3f6331e560..13beb43e10b 100644
--- a/spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_container_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_container_spec.js
@@ -2,6 +2,7 @@ import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import axios from '~/lib/utils/axios_utils';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import ArtifactsApp from '~/vue_merge_request_widget/components/artifacts_list_app.vue';
import DeploymentList from '~/vue_merge_request_widget/components/deployment/deployment_list.vue';
import MrWidgetPipeline from '~/vue_merge_request_widget/components/mr_widget_pipeline.vue';
@@ -25,7 +26,7 @@ describe('MrWidgetPipelineContainer', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
- mock.onGet().reply(200, {});
+ mock.onGet().reply(HTTP_STATUS_OK, {});
});
afterEach(() => {
diff --git a/spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_spec.js b/spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_spec.js
index 144e176b0f0..9c348fc9d58 100644
--- a/spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_spec.js
@@ -4,6 +4,7 @@ import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import { trimText } from 'helpers/text_helper';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import MRWidgetPipelineComponent from '~/vue_merge_request_widget/components/mr_widget_pipeline.vue';
import PipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/pipeline_mini_graph.vue';
import { SUCCESS } from '~/vue_merge_request_widget/constants';
@@ -39,7 +40,7 @@ describe('MRWidgetPipeline', () => {
const findMonitoringPipelineMessage = () => wrapper.findByTestId('monitoring-pipeline-message');
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
- const mockArtifactsRequest = () => new MockAdapter(axios).onGet().reply(200, []);
+ const mockArtifactsRequest = () => new MockAdapter(axios).onGet().reply(HTTP_STATUS_OK, []);
const createWrapper = (props = {}, mountFn = shallowMount) => {
wrapper = extendedWrapper(
diff --git a/spec/frontend/vue_merge_request_widget/components/widget/widget_spec.js b/spec/frontend/vue_merge_request_widget/components/widget/widget_spec.js
index 170c28c3624..c0cd5503cfc 100644
--- a/spec/frontend/vue_merge_request_widget/components/widget/widget_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/widget/widget_spec.js
@@ -8,6 +8,7 @@ import ActionButtons from '~/vue_merge_request_widget/components/widget/action_b
import Widget from '~/vue_merge_request_widget/components/widget/widget.vue';
import WidgetContentRow from '~/vue_merge_request_widget/components/widget/widget_content_row.vue';
import * as logger from '~/lib/logger';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
jest.mock('~/vue_merge_request_widget/components/extensions/telemetry', () => ({
createTelemetryHub: jest.fn().mockReturnValue({
@@ -33,7 +34,7 @@ describe('~/vue_merge_request_widget/components/widget/widget.vue', () => {
isCollapsible: false,
loadingText: 'Loading widget',
widgetName: 'WidgetTest',
- fetchCollapsedData: () => Promise.resolve({ headers: {}, status: 200 }),
+ fetchCollapsedData: () => Promise.resolve({ headers: {}, status: HTTP_STATUS_OK }),
value: {
collapsed: null,
expanded: null,
@@ -57,7 +58,7 @@ describe('~/vue_merge_request_widget/components/widget/widget.vue', () => {
it('fetches collapsed', async () => {
const fetchCollapsedData = jest
.fn()
- .mockReturnValue(Promise.resolve({ headers: {}, status: 200, data: {} }));
+ .mockReturnValue(Promise.resolve({ headers: {}, status: HTTP_STATUS_OK, data: {} }));
createComponent({ propsData: { fetchCollapsedData } });
await waitForPromises();
@@ -84,7 +85,7 @@ describe('~/vue_merge_request_widget/components/widget/widget.vue', () => {
it('displays loading icon until request is made and then displays status icon when the request is complete', async () => {
const fetchCollapsedData = jest
.fn()
- .mockReturnValue(Promise.resolve({ headers: {}, status: 200, data: {} }));
+ .mockReturnValue(Promise.resolve({ headers: {}, status: HTTP_STATUS_OK, data: {} }));
createComponent({ propsData: { fetchCollapsedData, statusIconName: 'warning' } });
@@ -123,15 +124,23 @@ describe('~/vue_merge_request_widget/components/widget/widget.vue', () => {
describe('fetch', () => {
it('sets the data.collapsed property after a successfull call - multiPolling: false', async () => {
- const mockData = { headers: {}, status: 200, data: { vulnerabilities: [] } };
+ const mockData = { headers: {}, status: HTTP_STATUS_OK, data: { vulnerabilities: [] } };
createComponent({ propsData: { fetchCollapsedData: async () => mockData } });
await waitForPromises();
expect(wrapper.emitted('input')[0][0]).toEqual({ collapsed: mockData.data, expanded: null });
});
it('sets the data.collapsed property after a successfull call - multiPolling: true', async () => {
- const mockData1 = { headers: {}, status: 200, data: { vulnerabilities: [{ vuln: 1 }] } };
- const mockData2 = { headers: {}, status: 200, data: { vulnerabilities: [{ vuln: 2 }] } };
+ const mockData1 = {
+ headers: {},
+ status: HTTP_STATUS_OK,
+ data: { vulnerabilities: [{ vuln: 1 }] },
+ };
+ const mockData2 = {
+ headers: {},
+ status: HTTP_STATUS_OK,
+ data: { vulnerabilities: [{ vuln: 2 }] },
+ };
createComponent({
propsData: {
@@ -295,13 +304,13 @@ describe('~/vue_merge_request_widget/components/widget/widget.vue', () => {
it('fetches expanded data when clicked for the first time', async () => {
const mockDataCollapsed = {
headers: {},
- status: 200,
+ status: HTTP_STATUS_OK,
data: { vulnerabilities: [{ vuln: 1 }] },
};
const mockDataExpanded = {
headers: {},
- status: 200,
+ status: HTTP_STATUS_OK,
data: { vulnerabilities: [{ vuln: 2 }] },
};
diff --git a/spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js b/spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js
index 824d0d012fb..f37276ad594 100644
--- a/spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js
+++ b/spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js
@@ -75,8 +75,10 @@ describe('MrWidgetOptions', () => {
gon.features = { asyncMrWidget: true };
mock = new MockAdapter(axios);
- mock.onGet(mockData.merge_request_widget_path).reply(() => [200, { ...mockData }]);
- mock.onGet(mockData.merge_request_cached_widget_path).reply(() => [200, { ...mockData }]);
+ mock.onGet(mockData.merge_request_widget_path).reply(() => [HTTP_STATUS_OK, { ...mockData }]);
+ mock
+ .onGet(mockData.merge_request_cached_widget_path)
+ .reply(() => [HTTP_STATUS_OK, { ...mockData }]);
});
afterEach(() => {
@@ -806,8 +808,8 @@ describe('MrWidgetOptions', () => {
// Override top-level mocked requests, which always use a fresh copy of
// mockData, which always includes the full pipeline object.
- mock.onGet(mockData.merge_request_widget_path).reply(() => [200, mrData]);
- mock.onGet(mockData.merge_request_cached_widget_path).reply(() => [200, mrData]);
+ mock.onGet(mockData.merge_request_widget_path).reply(() => [HTTP_STATUS_OK, mrData]);
+ mock.onGet(mockData.merge_request_cached_widget_path).reply(() => [HTTP_STATUS_OK, mrData]);
return createComponent(mrData, {
apolloMock: [
@@ -987,12 +989,12 @@ describe('MrWidgetOptions', () => {
() =>
Promise.resolve({
headers: { 'poll-interval': 0 },
- status: 200,
+ status: HTTP_STATUS_OK,
data: { reports: 'parsed' },
}),
() =>
Promise.resolve({
- status: 200,
+ status: HTTP_STATUS_OK,
data: { reports: 'parsed' },
}),
]),
@@ -1014,7 +1016,7 @@ describe('MrWidgetOptions', () => {
}),
() =>
Promise.resolve({
- status: 200,
+ status: HTTP_STATUS_OK,
data: { reports: 'parsed' },
}),
]),
diff --git a/spec/frontend/vue_merge_request_widget/test_extensions.js b/spec/frontend/vue_merge_request_widget/test_extensions.js
index 1977f550577..ff2dc38442f 100644
--- a/spec/frontend/vue_merge_request_widget/test_extensions.js
+++ b/spec/frontend/vue_merge_request_widget/test_extensions.js
@@ -1,3 +1,4 @@
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { EXTENSION_ICONS } from '~/vue_merge_request_widget/constants';
export const workingExtension = (shouldCollapse = true) => ({
@@ -120,7 +121,7 @@ export const pollingFullDataExtension = {
return Promise.resolve([
{
headers: { 'poll-interval': 0 },
- status: 200,
+ status: HTTP_STATUS_OK,
data: {
id: 1,
text: 'Hello world',
diff --git a/spec/frontend/vue_shared/components/entity_select/group_select_spec.js b/spec/frontend/vue_shared/components/entity_select/group_select_spec.js
index b9479b0d51d..83560e367ea 100644
--- a/spec/frontend/vue_shared/components/entity_select/group_select_spec.js
+++ b/spec/frontend/vue_shared/components/entity_select/group_select_spec.js
@@ -2,7 +2,7 @@ import { GlCollapsibleListbox } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import axios from '~/lib/utils/axios_utils';
-import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status';
+import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import GroupSelect from '~/vue_shared/components/entity_select/group_select.vue';
import EntitySelect from '~/vue_shared/components/entity_select/entity_select.vue';
import {
@@ -96,7 +96,7 @@ describe('GroupSelect', () => {
describe('with an initial selection', () => {
it("fetches the initially selected value's name", async () => {
- mock.onGet(groupEndpoint).reply(200, groupMock);
+ mock.onGet(groupEndpoint).reply(HTTP_STATUS_OK, groupMock);
createComponent({ props: { initialSelection: groupMock.id } });
await waitForPromises();
@@ -107,7 +107,7 @@ describe('GroupSelect', () => {
it('show an error if fetching the individual group fails', async () => {
mock
.onGet('/api/undefined/groups.json')
- .reply(200, [{ full_name: 'notTheSelectedGroup', id: '2' }]);
+ .reply(HTTP_STATUS_OK, [{ full_name: 'notTheSelectedGroup', id: '2' }]);
mock.onGet(groupEndpoint).reply(HTTP_STATUS_INTERNAL_SERVER_ERROR);
createComponent({ props: { initialSelection: groupMock.id } });
diff --git a/spec/frontend/vue_shared/components/entity_select/project_select_spec.js b/spec/frontend/vue_shared/components/entity_select/project_select_spec.js
index 04a57fa475c..57dce032d30 100644
--- a/spec/frontend/vue_shared/components/entity_select/project_select_spec.js
+++ b/spec/frontend/vue_shared/components/entity_select/project_select_spec.js
@@ -220,7 +220,7 @@ describe('ProjectSelect', () => {
it('show an error if fetching the individual project fails', async () => {
mock
.onGet(groupProjectEndpoint)
- .reply(200, [{ full_name: 'notTheSelectedProject', id: '2' }]);
+ .reply(HTTP_STATUS_OK, [{ full_name: 'notTheSelectedProject', id: '2' }]);
mock.onGet(projectEndpoint).reply(HTTP_STATUS_INTERNAL_SERVER_ERROR);
createComponent({ props: { initialSelection: projectMock.id } });
diff --git a/spec/frontend/vue_shared/components/notes/system_note_spec.js b/spec/frontend/vue_shared/components/notes/system_note_spec.js
index 559f9bcb1a8..bcfd7a8ec70 100644
--- a/spec/frontend/vue_shared/components/notes/system_note_spec.js
+++ b/spec/frontend/vue_shared/components/notes/system_note_spec.js
@@ -4,6 +4,7 @@ import waitForPromises from 'helpers/wait_for_promises';
import createStore from '~/notes/stores';
import IssueSystemNote from '~/vue_shared/components/notes/system_note.vue';
import axios from '~/lib/utils/axios_utils';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { renderGFM } from '~/behaviors/markdown/render_gfm';
jest.mock('~/behaviors/markdown/render_gfm');
@@ -85,7 +86,7 @@ describe('system note component', () => {
it('renders outdated code lines', async () => {
mock
.onGet('/outdated_line_change_path')
- .reply(200, [
+ .reply(HTTP_STATUS_OK, [
{ rich_text: 'console.log', type: 'new', line_code: '123', old_line: null, new_line: 1 },
]);
diff --git a/spec/frontend/vue_shared/components/web_ide_link_spec.js b/spec/frontend/vue_shared/components/web_ide_link_spec.js
index 2a0d2089fe3..18afe049149 100644
--- a/spec/frontend/vue_shared/components/web_ide_link_spec.js
+++ b/spec/frontend/vue_shared/components/web_ide_link_spec.js
@@ -1,4 +1,4 @@
-import { GlButton, GlModal, GlPopover } from '@gitlab/ui';
+import { GlButton, GlLink, GlModal, GlPopover } from '@gitlab/ui';
import { nextTick } from 'vue';
import ActionsButton from '~/vue_shared/components/actions_button.vue';
@@ -147,6 +147,11 @@ describe('Web IDE link component', () => {
const findForkConfirmModal = () => wrapper.findComponent(ConfirmForkModal);
const findUserCalloutDismisser = () => wrapper.findComponent(UserCalloutDismisser);
const findNewWebIdeCalloutPopover = () => wrapper.findComponent(GlPopover);
+ const findTryItOutLink = () =>
+ wrapper
+ .findAllComponents(GlLink)
+ .filter((link) => link.text().includes('Try it out'))
+ .at(0);
it.each([
{
@@ -516,6 +521,12 @@ describe('Web IDE link component', () => {
expect(dismiss).toHaveBeenCalled();
});
+ it('dismisses the callout when try it now link is clicked', () => {
+ findTryItOutLink().vm.$emit('click');
+
+ expect(dismiss).toHaveBeenCalled();
+ });
+
it('dismisses the callout when action button is clicked', () => {
findActionsButton().vm.$emit('actionClicked');
diff --git a/spec/frontend/work_items/components/notes/system_note_spec.js b/spec/frontend/work_items/components/notes/system_note_spec.js
index 3e3b8bf65b2..fd5f373d076 100644
--- a/spec/frontend/work_items/components/notes/system_note_spec.js
+++ b/spec/frontend/work_items/components/notes/system_note_spec.js
@@ -6,6 +6,7 @@ import { renderGFM } from '~/behaviors/markdown/render_gfm';
import WorkItemSystemNote from '~/work_items/components/notes/system_note.vue';
import NoteHeader from '~/notes/components/note_header.vue';
import axios from '~/lib/utils/axios_utils';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
jest.mock('~/behaviors/markdown/render_gfm');
@@ -95,7 +96,7 @@ describe('system note component', () => {
it.skip('renders outdated code lines', async () => {
mock
.onGet('/outdated_line_change_path')
- .reply(200, [
+ .reply(HTTP_STATUS_OK, [
{ rich_text: 'console.log', type: 'new', line_code: '123', old_line: null, new_line: 1 },
]);
diff --git a/spec/graphql/resolvers/group_releases_resolver_spec.rb b/spec/graphql/resolvers/group_releases_resolver_spec.rb
new file mode 100644
index 00000000000..73386870030
--- /dev/null
+++ b/spec/graphql/resolvers/group_releases_resolver_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::GroupReleasesResolver, feature_category: :release_orchestration do
+ include GraphqlHelpers
+
+ let_it_be(:today) { Time.now }
+ let_it_be(:yesterday) { today - 1.day }
+ let_it_be(:tomorrow) { today + 1.day }
+
+ let_it_be(:group) { create(:group, :private) }
+ let_it_be(:project) { create(:project, :private, namespace: group) }
+ let_it_be(:release_v1) do
+ create(:release, project: project, tag: 'v1.0.0', released_at: yesterday, created_at: tomorrow)
+ end
+
+ let_it_be(:release_v2) do
+ create(:release, project: project, tag: 'v2.0.0', released_at: today, created_at: yesterday)
+ end
+
+ let_it_be(:release_v3) do
+ create(:release, project: project, tag: 'v3.0.0', released_at: tomorrow, created_at: today)
+ end
+
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:public_user) { create(:user) }
+
+ let(:args) { { sort: :released_at_desc } }
+ let(:all_releases) { [release_v1, release_v2, release_v3] }
+
+ before do
+ group.add_member(developer, :owner)
+ project.add_developer(developer)
+ end
+
+ describe '#resolve' do
+ it_behaves_like 'releases and group releases resolver'
+ end
+
+ private
+
+ def resolve_releases
+ context = { current_user: current_user }
+ resolve(described_class, obj: group, args: args, ctx: context, arg_style: :internal)
+ end
+end
diff --git a/spec/graphql/resolvers/releases_resolver_spec.rb b/spec/graphql/resolvers/releases_resolver_spec.rb
index 6ba9a6c33a1..58f6257c946 100644
--- a/spec/graphql/resolvers/releases_resolver_spec.rb
+++ b/spec/graphql/resolvers/releases_resolver_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Resolvers::ReleasesResolver do
+RSpec.describe Resolvers::ReleasesResolver, feature_category: :release_orchestration do
include GraphqlHelpers
let_it_be(:today) { Time.now }
@@ -24,60 +24,28 @@ RSpec.describe Resolvers::ReleasesResolver do
end
describe '#resolve' do
- context 'when the user does not have access to the project' do
- let(:current_user) { public_user }
+ it_behaves_like 'releases and group releases resolver'
- it 'returns an empty array' do
- expect(resolve_releases).to be_empty
- end
- end
-
- context "when the user has full access to the project's releases" do
+ describe 'when order_by is created_at' do
let(:current_user) { developer }
- it 'returns all releases associated to the project' do
- expect(resolve_releases).to match_array(all_releases)
- end
-
- describe 'sorting behavior' do
- context 'with sort: :released_at_desc' do
- let(:args) { { sort: :released_at_desc } }
-
- it 'returns the releases ordered by released_at in descending order' do
- expect(resolve_releases.to_a)
- .to match_array(all_releases)
- .and be_sorted(:released_at, :desc)
- end
- end
-
- context 'with sort: :released_at_asc' do
- let(:args) { { sort: :released_at_asc } }
-
- it 'returns the releases ordered by released_at in ascending order' do
- expect(resolve_releases.to_a)
- .to match_array(all_releases)
- .and be_sorted(:released_at, :asc)
- end
- end
-
- context 'with sort: :created_desc' do
- let(:args) { { sort: :created_desc } }
+ context 'with sort: desc' do
+ let(:args) { { sort: :created_desc } }
- it 'returns the releases ordered by created_at in descending order' do
- expect(resolve_releases.to_a)
- .to match_array(all_releases)
- .and be_sorted(:created_at, :desc)
- end
+ it 'returns the releases ordered by created_at in descending order' do
+ expect(resolve_releases.to_a)
+ .to match_array(all_releases)
+ .and be_sorted(:created_at, :desc)
end
+ end
- context 'with sort: :created_asc' do
- let(:args) { { sort: :created_asc } }
+ context 'with sort: asc' do
+ let(:args) { { sort: :created_asc } }
- it 'returns the releases ordered by created_at in ascending order' do
- expect(resolve_releases.to_a)
- .to match_array(all_releases)
- .and be_sorted(:created_at, :asc)
- end
+ it 'returns the releases ordered by created_at in ascending order' do
+ expect(resolve_releases.to_a)
+ .to match_array(all_releases)
+ .and be_sorted(:created_at, :asc)
end
end
end
diff --git a/spec/graphql/types/group_type_spec.rb b/spec/graphql/types/group_type_spec.rb
index 0b65778ce90..6820cf2738e 100644
--- a/spec/graphql/types/group_type_spec.rb
+++ b/spec/graphql/types/group_type_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe GitlabSchema.types['Group'] do
dependency_proxy_image_prefix dependency_proxy_image_ttl_policy
shared_runners_setting timelogs organization_state_counts organizations
contact_state_counts contacts work_item_types
- recent_issue_boards ci_variables
+ recent_issue_boards ci_variables releases
]
expect(described_class).to include_graphql_fields(*expected_fields)
@@ -70,6 +70,13 @@ RSpec.describe GitlabSchema.types['Group'] do
it { is_expected.to have_graphql_resolver(Resolvers::Crm::OrganizationStateCountsResolver) }
end
+ describe 'releases field' do
+ subject { described_class.fields['releases'] }
+
+ it { is_expected.to have_graphql_type(Types::ReleaseType.connection_type) }
+ it { is_expected.to have_graphql_resolver(Resolvers::GroupReleasesResolver) }
+ end
+
it_behaves_like 'a GraphQL type with labels' do
let(:labels_resolver_arguments) { [:search_term, :includeAncestorGroups, :includeDescendantGroups, :onlyGroupLabels] }
end
diff --git a/spec/lib/gitlab/job_waiter_spec.rb b/spec/lib/gitlab/job_waiter_spec.rb
index f7a76035203..af2da8f20c0 100644
--- a/spec/lib/gitlab/job_waiter_spec.rb
+++ b/spec/lib/gitlab/job_waiter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::JobWaiter, :redis, feature_category: :not_owned do
+RSpec.describe Gitlab::JobWaiter, :redis, feature_category: :shared do
describe '.notify' do
it 'pushes the jid to the named queue' do
key = described_class.new.key
diff --git a/spec/lib/gitlab/quick_actions/command_definition_spec.rb b/spec/lib/gitlab/quick_actions/command_definition_spec.rb
index 8362c07baca..53c2db8a826 100644
--- a/spec/lib/gitlab/quick_actions/command_definition_spec.rb
+++ b/spec/lib/gitlab/quick_actions/command_definition_spec.rb
@@ -3,7 +3,9 @@
require 'spec_helper'
RSpec.describe Gitlab::QuickActions::CommandDefinition do
- subject { described_class.new(:command) }
+ let(:mock_command) { :command }
+
+ subject { described_class.new(mock_command) }
describe "#all_names" do
context "when the command has aliases" do
@@ -74,7 +76,7 @@ RSpec.describe Gitlab::QuickActions::CommandDefinition do
context "when the command has types" do
before do
- subject.types = [Issue, Commit]
+ subject.types = [Issue, Commit, WorkItem]
end
context "when the command target type is allowed" do
@@ -82,6 +84,26 @@ RSpec.describe Gitlab::QuickActions::CommandDefinition do
opts[:quick_action_target] = Issue.new
expect(subject.available?(opts)).to be true
end
+
+ context 'when the command target type is Work Item' do
+ context 'when the command is not allowed' do
+ it "returns false" do
+ opts[:quick_action_target] = build(:work_item)
+ expect(subject.available?(opts)).to be false
+ end
+ end
+
+ context 'when the command is allowed' do
+ it "returns true" do
+ allow_next_instance_of(WorkItem) do |work_item|
+ allow(work_item).to receive(:supported_quick_action_commands).and_return([mock_command])
+ end
+
+ opts[:quick_action_target] = build(:work_item)
+ expect(subject.available?(opts)).to be true
+ end
+ end
+ end
end
context "when the command target type is not allowed" do
@@ -99,6 +121,9 @@ RSpec.describe Gitlab::QuickActions::CommandDefinition do
opts[:quick_action_target] = MergeRequest.new
expect(subject.available?(opts)).to be true
+
+ opts[:quick_action_target] = build(:work_item)
+ expect(subject.available?(opts)).to be true
end
end
end
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 1a255379098..4c665d6ea28 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -422,7 +422,6 @@ RSpec.describe Group, feature_category: :subgroups do
before do
group.save!
- group.reload
end
it { expect(group.traversal_ids).to eq [group.id] }
@@ -434,7 +433,6 @@ RSpec.describe Group, feature_category: :subgroups do
before do
group.save!
- reload_models(parent, group)
end
it { expect(parent.traversal_ids).to eq [parent.id] }
@@ -449,7 +447,6 @@ RSpec.describe Group, feature_category: :subgroups do
before do
parent.update!(parent: new_grandparent)
group.save!
- reload_models(parent, group)
end
it 'avoid traversal_ids race condition' do
@@ -487,7 +484,6 @@ RSpec.describe Group, feature_category: :subgroups do
new_parent.update!(parent: new_grandparent)
group.save!
- reload_models(parent, group, new_grandparent, new_parent)
end
it 'avoids traversal_ids race condition' do
@@ -509,14 +505,13 @@ RSpec.describe Group, feature_category: :subgroups do
end
context 'within the same hierarchy' do
- let!(:root) { create(:group).reload }
+ let!(:root) { create(:group) }
let!(:old_parent) { create(:group, parent: root) }
let!(:new_parent) { create(:group, parent: root) }
context 'with FOR NO KEY UPDATE lock' do
before do
subject
- reload_models(old_parent, new_parent, group)
end
it 'updates traversal_ids' do
@@ -537,7 +532,6 @@ RSpec.describe Group, feature_category: :subgroups do
before do
subject
- reload_models(old_parent, new_parent, group)
end
it 'updates traversal_ids' do
@@ -567,7 +561,6 @@ RSpec.describe Group, feature_category: :subgroups do
before do
subject
- reload_models(old_parent, new_parent, group)
end
it 'updates traversal_ids' do
@@ -589,7 +582,6 @@ RSpec.describe Group, feature_category: :subgroups do
before do
subject
- reload_models(old_parent, new_parent, group)
end
it 'updates traversal_ids' do
@@ -614,11 +606,12 @@ RSpec.describe Group, feature_category: :subgroups do
before do
parent_group.update!(parent: new_grandparent)
+ reload_models(parent_group, group)
end
it 'updates traversal_ids for all descendants' do
- expect(parent_group.reload.traversal_ids).to eq [new_grandparent.id, parent_group.id]
- expect(group.reload.traversal_ids).to eq [new_grandparent.id, parent_group.id, group.id]
+ expect(parent_group.traversal_ids).to eq [new_grandparent.id, parent_group.id]
+ expect(group.traversal_ids).to eq [new_grandparent.id, parent_group.id, group.id]
end
end
end
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 852df3b8a90..f1d5bf4a2b0 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -404,6 +404,62 @@ RSpec.describe Namespace, feature_category: :subgroups do
it { is_expected.to include_module(Namespaces::Traversal::LinearScopes) }
end
+ describe '#traversal_ids' do
+ let(:namespace) { build(:group) }
+
+ context 'when namespace not persisted' do
+ it 'returns []' do
+ expect(namespace.traversal_ids).to eq []
+ end
+ end
+
+ context 'when namespace just saved' do
+ let(:namespace) { build(:group) }
+
+ before do
+ namespace.save!
+ end
+
+ it 'returns value that matches database' do
+ expect(namespace.traversal_ids).to eq Namespace.find(namespace.id).traversal_ids
+ end
+ end
+
+ context 'when namespace loaded from database' do
+ before do
+ namespace.save!
+ namespace.reload
+ end
+
+ it 'returns database value' do
+ expect(namespace.traversal_ids).to eq Namespace.find(namespace.id).traversal_ids
+ end
+ end
+
+ context 'when made a child group' do
+ let!(:namespace) { create(:group) }
+ let!(:parent_namespace) { create(:group, children: [namespace]) }
+
+ it 'returns database value' do
+ expect(namespace.traversal_ids).to eq [parent_namespace.id, namespace.id]
+ end
+ end
+
+ context 'when root_ancestor changes' do
+ let(:old_root) { create(:group) }
+ let(:namespace) { create(:group, parent: old_root) }
+ let(:new_root) { create(:group) }
+
+ it 'resets root_ancestor memo' do
+ expect(namespace.root_ancestor).to eq old_root
+
+ namespace.update!(parent: new_root)
+
+ expect(namespace.root_ancestor).to eq new_root
+ end
+ end
+ end
+
context 'traversal scopes' do
context 'recursive' do
before do
@@ -479,36 +535,60 @@ RSpec.describe Namespace, feature_category: :subgroups do
end
context 'traversal_ids on create' do
- shared_examples 'default traversal_ids' do
- let!(:namespace) { create(:group) }
- let!(:child_namespace) { create(:group, parent: namespace) }
+ let(:parent) { create(:group) }
+ let(:child) { create(:group, parent: parent) }
- it { expect(namespace.reload.traversal_ids).to eq [namespace.id] }
- it { expect(child_namespace.reload.traversal_ids).to eq [namespace.id, child_namespace.id] }
- it { expect(namespace.sync_events.count).to eq 1 }
- it { expect(child_namespace.sync_events.count).to eq 1 }
- end
+ it { expect(parent.traversal_ids).to eq [parent.id] }
+ it { expect(child.traversal_ids).to eq [parent.id, child.id] }
+ it { expect(parent.sync_events.count).to eq 1 }
+ it { expect(child.sync_events.count).to eq 1 }
+
+ context 'when set_traversal_ids_on_save feature flag is disabled' do
+ before do
+ stub_feature_flags(set_traversal_ids_on_save: false)
+ end
- it_behaves_like 'default traversal_ids'
+ it 'only sets traversal_ids on reload' do
+ expect { parent.reload }.to change(parent, :traversal_ids).from([]).to([parent.id])
+ expect { child.reload }.to change(child, :traversal_ids).from([]).to([parent.id, child.id])
+ end
+ end
end
context 'traversal_ids on update' do
- let!(:namespace1) { create(:group) }
- let!(:namespace2) { create(:group) }
+ let(:namespace1) { create(:group) }
+ let(:namespace2) { create(:group) }
- it 'updates the traversal_ids when the parent_id is changed' do
- expect do
- namespace1.update!(parent: namespace2)
- end.to change { namespace1.reload.traversal_ids }.from([namespace1.id]).to([namespace2.id, namespace1.id])
+ context 'when parent_id is changed' do
+ subject { namespace1.update!(parent: namespace2) }
+
+ it 'sets the traversal_ids attribute' do
+ expect { subject }.to change { namespace1.traversal_ids }.from([namespace1.id]).to([namespace2.id, namespace1.id])
+ end
+
+ context 'when set_traversal_ids_on_save feature flag is disabled' do
+ before do
+ stub_feature_flags(set_traversal_ids_on_save: false)
+ end
+
+ it 'sets traversal_ids after reload' do
+ subject
+
+ expect { namespace1.reload }.to change(namespace1, :traversal_ids).from([]).to([namespace2.id, namespace1.id])
+ end
+ end
end
it 'creates a Namespaces::SyncEvent using triggers' do
Namespaces::SyncEvent.delete_all
- namespace1.update!(parent: namespace2)
- expect(namespace1.reload.sync_events.count).to eq(1)
+
+ expect { namespace1.update!(parent: namespace2) }.to change(namespace1.sync_events, :count).by(1)
end
it 'creates sync_events using database trigger on the table' do
+ namespace1.save!
+ namespace2.save!
+
expect { Group.update_all(traversal_ids: [-1]) }.to change(Namespaces::SyncEvent, :count).by(2)
end
diff --git a/spec/models/work_item_spec.rb b/spec/models/work_item_spec.rb
index 71c889260a1..52155c3b936 100644
--- a/spec/models/work_item_spec.rb
+++ b/spec/models/work_item_spec.rb
@@ -57,6 +57,18 @@ RSpec.describe WorkItem, feature_category: :portfolio_management do
end
end
+ describe '#supports_assignee?' do
+ let(:work_item) { build(:work_item, :task) }
+
+ before do
+ allow(work_item.work_item_type).to receive(:supports_assignee?).and_return(false)
+ end
+
+ it 'delegates the call to its work item type' do
+ expect(work_item.supports_assignee?).to be(false)
+ end
+ end
+
describe '#supported_quick_action_commands' do
let(:work_item) { build(:work_item, :task) }
diff --git a/spec/models/work_items/type_spec.rb b/spec/models/work_items/type_spec.rb
index 1ada783385e..cf2e5d25756 100644
--- a/spec/models/work_items/type_spec.rb
+++ b/spec/models/work_items/type_spec.rb
@@ -126,4 +126,24 @@ RSpec.describe WorkItems::Type do
expect(work_item_type.name).to eq('label😸')
end
end
+
+ describe '#supports_assignee?' do
+ subject(:supports_assignee) { build(:work_item_type, :task).supports_assignee? }
+
+ context 'when the assignees widget is supported' do
+ before do
+ stub_const('::WorkItems::Type::WIDGETS_FOR_TYPE', { task: [::WorkItems::Widgets::Assignees] })
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when the assignees widget is not supported' do
+ before do
+ stub_const('::WorkItems::Type::WIDGETS_FOR_TYPE', { task: [] })
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
end
diff --git a/spec/requests/api/graphql/group/group_releases_spec.rb b/spec/requests/api/graphql/group/group_releases_spec.rb
new file mode 100644
index 00000000000..931e7c19c18
--- /dev/null
+++ b/spec/requests/api/graphql/group/group_releases_spec.rb
@@ -0,0 +1,139 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Query.group(fullPath).releases()', feature_category: :release_orchestration do
+ include GraphqlHelpers
+
+ include_context 'when releases and group releases shared context'
+
+ let(:resource_type) { :group }
+ let(:resource) { group }
+
+ describe "ensures that the correct data is returned based on the project's visibility and the user's access level" do
+ context 'when the group is private' do
+ let_it_be(:group) { create(:group, :private) }
+ let_it_be(:project) { create(:project, :repository, :private, group: group) }
+ let_it_be(:release) { create(:release, :with_evidence, project: project) }
+
+ before_all do
+ group.add_guest(guest)
+ group.add_reporter(reporter)
+ group.add_developer(developer)
+ end
+
+ context 'when the user is not logged in' do
+ let(:current_user) { stranger }
+
+ it_behaves_like 'no access to any release data'
+ end
+
+ context 'when the user has Guest permissions' do
+ let(:current_user) { guest }
+
+ it_behaves_like 'no access to any repository-related fields'
+ end
+
+ context 'when the user has Reporter permissions' do
+ let(:current_user) { reporter }
+
+ it_behaves_like 'full access to all repository-related fields'
+ it_behaves_like 'no access to editUrl'
+ end
+
+ context 'when the user has Developer permissions' do
+ let(:current_user) { developer }
+
+ it_behaves_like 'full access to all repository-related fields'
+ it_behaves_like 'access to editUrl'
+ end
+ end
+
+ context 'when the group is public' do
+ let_it_be(:group) { create(:group, :public) }
+ let_it_be(:project) { create(:project, :repository, :public, group: group) }
+ let_it_be(:release) { create(:release, :with_evidence, project: project) }
+
+ before_all do
+ group.add_guest(guest)
+ group.add_reporter(reporter)
+ group.add_developer(developer)
+ end
+
+ context 'when the user is not logged in' do
+ let(:current_user) { stranger }
+
+ it_behaves_like 'no access to any release data'
+ end
+
+ context 'when the user has Guest permissions' do
+ let(:current_user) { guest }
+
+ it_behaves_like 'full access to all repository-related fields'
+ it_behaves_like 'no access to editUrl'
+ end
+
+ context 'when the user has Reporter permissions' do
+ let(:current_user) { reporter }
+
+ it_behaves_like 'full access to all repository-related fields'
+ it_behaves_like 'no access to editUrl'
+ end
+
+ context 'when the user has Developer permissions' do
+ let(:current_user) { developer }
+
+ it_behaves_like 'full access to all repository-related fields'
+ it_behaves_like 'access to editUrl'
+ end
+ end
+ end
+
+ describe 'sorting and pagination' do
+ let_it_be(:group) { create(:group, :public) }
+ let_it_be(:project) { create(:project, :public, group: group) }
+ let(:current_user) { developer }
+
+ let(:data_path) { [:group, :releases] }
+
+ before_all do
+ group.add_developer(developer)
+ end
+
+ def pagination_query(params)
+ graphql_query_for(
+ :group,
+ { full_path: group.full_path },
+ query_graphql_field(:releases, params, "#{page_info} nodes { tagName }")
+ )
+ end
+
+ def pagination_results_data(nodes)
+ nodes.pluck('tagName')
+ end
+
+ context 'when sorting by released_at' do
+ let_it_be(:release5) { create(:release, project: project, tag: 'v5.5.0', released_at: 3.days.from_now) }
+ let_it_be(:release1) { create(:release, project: project, tag: 'v5.1.0', released_at: 3.days.ago) }
+ let_it_be(:release4) { create(:release, project: project, tag: 'v5.4.0', released_at: 2.days.from_now) }
+ let_it_be(:release2) { create(:release, project: project, tag: 'v5.2.0', released_at: 2.days.ago) }
+ let_it_be(:release3) { create(:release, project: project, tag: 'v5.3.0', released_at: 1.day.ago) }
+
+ context 'when ascending' do
+ it_behaves_like 'sorted paginated query' do
+ let(:sort_param) { :RELEASED_AT_ASC }
+ let(:first_param) { 2 }
+ let(:all_records) { [release1.tag, release2.tag, release3.tag, release4.tag, release5.tag] }
+ end
+ end
+
+ context 'when descending' do
+ it_behaves_like 'sorted paginated query' do
+ let(:sort_param) { :RELEASED_AT_DESC }
+ let(:first_param) { 2 }
+ let(:all_records) { [release5.tag, release4.tag, release3.tag, release2.tag, release1.tag] }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/issues/bulk_update_spec.rb b/spec/requests/api/graphql/mutations/issues/bulk_update_spec.rb
new file mode 100644
index 00000000000..cb1819653c2
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/issues/bulk_update_spec.rb
@@ -0,0 +1,165 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Bulk update issues', feature_category: :team_planning do
+ include GraphqlHelpers
+
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:group) { create(:group).tap { |group| group.add_developer(developer) } }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:updatable_issues, reload: true) { create_list(:issue, 3, project: project) }
+
+ let(:parent) { project }
+ let(:max_issues) { Mutations::Issues::BulkUpdate::MAX_ISSUES }
+ let(:mutation) { graphql_mutation(:issues_bulk_update, base_arguments.merge(additional_arguments)) }
+ let(:mutation_response) { graphql_mutation_response(:issues_bulk_update) }
+ let(:current_user) { developer }
+ let(:base_arguments) { { parent_id: parent.to_gid.to_s, ids: updatable_issues.map { |i| i.to_gid.to_s } } }
+ let(:additional_arguments) { { assignee_ids: [current_user.to_gid.to_s] } }
+
+ context 'when the `bulk_update_issues_mutation` feature flag is disabled' do
+ before do
+ stub_feature_flags(bulk_update_issues_mutation: false)
+ end
+
+ it 'returns a resource not available error' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(graphql_errors).to contain_exactly(
+ hash_including(
+ 'message' => '`bulk_update_issues_mutation` feature flag is disabled.'
+ )
+ )
+ end
+ end
+
+ context 'when user can not update all issues' do
+ let_it_be(:forbidden_issue) { create(:issue) }
+
+ it 'updates only issues that the user can update' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ updatable_issues.each(&:reset)
+ forbidden_issue.reset
+ end.to change { updatable_issues.flat_map(&:assignee_ids) }.from([]).to([current_user.id] * 3).and(
+ not_change(forbidden_issue, :assignee_ids).from([])
+ )
+
+ expect(mutation_response).to include(
+ 'updatedIssueCount' => updatable_issues.count
+ )
+ end
+ end
+
+ context 'when user can update all issues' do
+ it 'updates all issues' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ updatable_issues.each(&:reload)
+ end.to change { updatable_issues.flat_map(&:assignee_ids) }.from([]).to([current_user.id] * 3)
+
+ expect(mutation_response).to include(
+ 'updatedIssueCount' => updatable_issues.count
+ )
+ end
+
+ context 'when current user cannot read the specified project' do
+ let_it_be(:parent) { create(:project, :private) }
+
+ it 'returns a resource not found error' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(graphql_errors).to contain_exactly(
+ hash_including(
+ 'message' => "The resource that you are attempting to access does not exist or you don't have " \
+ 'permission to perform this action'
+ )
+ )
+ end
+ end
+
+ context 'when scoping to a parent group' do
+ let(:parent) { group }
+
+ it 'updates all issues' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ updatable_issues.each(&:reload)
+ end.to change { updatable_issues.flat_map(&:assignee_ids) }.from([]).to([current_user.id] * 3)
+
+ expect(mutation_response).to include(
+ 'updatedIssueCount' => updatable_issues.count
+ )
+ end
+
+ context 'when current user cannot read the specified group' do
+ let(:parent) { create(:group, :private) }
+
+ it 'returns a resource not found error' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(graphql_errors).to contain_exactly(
+ hash_including(
+ 'message' => "The resource that you are attempting to access does not exist or you don't have " \
+ 'permission to perform this action'
+ )
+ )
+ end
+ end
+ end
+
+ context 'when removing all assignees' do
+ let(:additional_arguments) { { assignee_ids: [] } }
+
+ before do
+ updatable_issues.each { |issue| issue.update!(assignees: [current_user]) }
+ end
+
+ it 'updates all issues' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ updatable_issues.each(&:reload)
+ end.to change { updatable_issues.flat_map(&:assignee_ids) }.from([current_user.id] * 3).to([])
+
+ expect(mutation_response).to include(
+ 'updatedIssueCount' => updatable_issues.count
+ )
+ end
+ end
+ end
+
+ context 'when update service returns an error' do
+ before do
+ allow_next_instance_of(Issuable::BulkUpdateService) do |update_service|
+ allow(update_service).to receive(:execute).and_return(
+ ServiceResponse.error(message: 'update error', http_status: 422) # rubocop:disable Gitlab/ServiceResponse
+ )
+ end
+ end
+
+ it 'returns an error message' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(graphql_data.dig('issuesBulkUpdate', 'errors')).to contain_exactly('update error')
+ end
+ end
+
+ context 'when trying to update more than the max allowed' do
+ before do
+ stub_const('Mutations::Issues::BulkUpdate::MAX_ISSUES', updatable_issues.count - 1)
+ end
+
+ it "restricts updating more than #{Mutations::Issues::BulkUpdate::MAX_ISSUES} issues at the same time" do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(graphql_errors).to contain_exactly(
+ hash_including(
+ 'message' =>
+ format(_('No more than %{max_issues} issues can be updated at the same time'), max_issues: max_issues)
+ )
+ )
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/project/releases_spec.rb b/spec/requests/api/graphql/project/releases_spec.rb
index aa454349fcf..bc47f5a0248 100644
--- a/spec/requests/api/graphql/project/releases_spec.rb
+++ b/spec/requests/api/graphql/project/releases_spec.rb
@@ -5,226 +5,10 @@ require 'spec_helper'
RSpec.describe 'Query.project(fullPath).releases()', feature_category: :release_orchestration do
include GraphqlHelpers
- let_it_be(:stranger) { create(:user) }
- let_it_be(:guest) { create(:user) }
- let_it_be(:reporter) { create(:user) }
- let_it_be(:developer) { create(:user) }
-
- let(:base_url_params) { { scope: 'all', release_tag: release.tag } }
- let(:opened_url_params) { { state: 'opened', **base_url_params } }
- let(:merged_url_params) { { state: 'merged', **base_url_params } }
- let(:closed_url_params) { { state: 'closed', **base_url_params } }
-
- let(:query) do
- graphql_query_for(:project, { fullPath: project.full_path },
- %{
- releases {
- count
- nodes {
- tagName
- tagPath
- name
- commit {
- sha
- }
- assets {
- count
- sources {
- nodes {
- url
- }
- }
- }
- evidences {
- nodes {
- sha
- }
- }
- links {
- selfUrl
- openedMergeRequestsUrl
- mergedMergeRequestsUrl
- closedMergeRequestsUrl
- openedIssuesUrl
- closedIssuesUrl
- }
- }
- }
- })
- end
-
- let(:params_for_issues_and_mrs) { { scope: 'all', state: 'opened', release_tag: release.tag } }
- let(:post_query) { post_graphql(query, current_user: current_user) }
-
- let(:data) { graphql_data.dig('project', 'releases', 'nodes', 0) }
-
- before do
- stub_default_url_options(host: 'www.example.com')
- end
-
- shared_examples 'correct total count' do
- let(:data) { graphql_data.dig('project', 'releases') }
-
- before do
- create_list(:release, 2, project: project)
-
- post_query
- end
-
- it 'returns the total count' do
- expect(data['count']).to eq(project.releases.count)
- end
- end
-
- shared_examples 'full access to all repository-related fields' do
- describe 'repository-related fields' do
- before do
- post_query
- end
-
- it 'returns data for fields that are protected in private projects' do
- expected_sources = release.sources.map do |s|
- { 'url' => s.url }
- end
-
- expected_evidences = release.evidences.map do |e|
- { 'sha' => e.sha }
- end
-
- expect(data).to eq(
- 'tagName' => release.tag,
- 'tagPath' => project_tag_path(project, release.tag),
- 'name' => release.name,
- 'commit' => {
- 'sha' => release.commit.sha
- },
- 'assets' => {
- 'count' => release.assets_count,
- 'sources' => {
- 'nodes' => expected_sources
- }
- },
- 'evidences' => {
- 'nodes' => expected_evidences
- },
- 'links' => {
- 'selfUrl' => project_release_url(project, release),
- 'openedMergeRequestsUrl' => project_merge_requests_url(project, opened_url_params),
- 'mergedMergeRequestsUrl' => project_merge_requests_url(project, merged_url_params),
- 'closedMergeRequestsUrl' => project_merge_requests_url(project, closed_url_params),
- 'openedIssuesUrl' => project_issues_url(project, opened_url_params),
- 'closedIssuesUrl' => project_issues_url(project, closed_url_params)
- }
- )
- end
- end
-
- it_behaves_like 'correct total count'
- end
-
- shared_examples 'no access to any repository-related fields' do
- describe 'repository-related fields' do
- before do
- post_query
- end
+ include_context 'when releases and group releases shared context'
- it 'does not return data for fields that expose repository information' do
- tag_name = release.tag
- release_name = release.name
- expect(data).to eq(
- 'tagName' => tag_name,
- 'tagPath' => nil,
- 'name' => release_name,
- 'commit' => nil,
- 'assets' => {
- 'count' => release.assets_count(except: [:sources]),
- 'sources' => {
- 'nodes' => []
- }
- },
- 'evidences' => {
- 'nodes' => []
- },
- 'links' => {
- 'closedIssuesUrl' => nil,
- 'closedMergeRequestsUrl' => nil,
- 'mergedMergeRequestsUrl' => nil,
- 'openedIssuesUrl' => nil,
- 'openedMergeRequestsUrl' => nil,
- 'selfUrl' => project_release_url(project, release)
- }
- )
- end
- end
-
- it_behaves_like 'correct total count'
- end
-
- # editUrl is tested separately becuase its permissions
- # are slightly different than other release fields
- shared_examples 'access to editUrl' do
- let(:query) do
- graphql_query_for(:project, { fullPath: project.full_path },
- %{
- releases {
- nodes {
- links {
- editUrl
- }
- }
- }
- })
- end
-
- before do
- post_query
- end
-
- it 'returns editUrl' do
- expect(data).to eq(
- 'links' => {
- 'editUrl' => edit_project_release_url(project, release)
- }
- )
- end
- end
-
- shared_examples 'no access to editUrl' do
- let(:query) do
- graphql_query_for(:project, { fullPath: project.full_path },
- %{
- releases {
- nodes {
- links {
- editUrl
- }
- }
- }
- })
- end
-
- before do
- post_query
- end
-
- it 'does not return editUrl' do
- expect(data).to eq(
- 'links' => {
- 'editUrl' => nil
- }
- )
- end
- end
-
- shared_examples 'no access to any release data' do
- before do
- post_query
- end
-
- it 'returns nil' do
- expect(data).to eq(nil)
- end
- end
+ let(:resource_type) { :project }
+ let(:resource) { project }
describe "ensures that the correct data is returned based on the project's visibility and the user's access level" do
context 'when the project is private' do
@@ -312,7 +96,7 @@ RSpec.describe 'Query.project(fullPath).releases()', feature_category: :release_
def pagination_query(params)
graphql_query_for(
- :project,
+ resource_type,
{ full_path: sort_project.full_path },
query_graphql_field(:releases, params, "#{page_info} nodes { tagName }")
)
diff --git a/spec/rubocop/cop/migration/prevent_single_statement_with_disable_ddl_transaction_spec.rb b/spec/rubocop/cop/migration/prevent_single_statement_with_disable_ddl_transaction_spec.rb
new file mode 100644
index 00000000000..3f5845f8f58
--- /dev/null
+++ b/spec/rubocop/cop/migration/prevent_single_statement_with_disable_ddl_transaction_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'rubocop_spec_helper'
+require_relative '../../../../rubocop/cop/migration/prevent_single_statement_with_disable_ddl_transaction'
+
+RSpec.describe RuboCop::Cop::Migration::PreventSingleStatementWithDisableDdlTransaction do
+ context 'when in migration' do
+ before do
+ allow(cop).to receive(:in_migration?).and_return(true)
+ end
+
+ it 'registers an offense when `disable_ddl_transaction!` is only for the :validate_foreign_key statement' do
+ code = <<~RUBY
+ class SomeMigration < Gitlab::Database::Migration[2.1]
+ disable_ddl_transaction!
+ def up
+ validate_foreign_key :emails, :user_id
+ end
+ def down
+ # no-op
+ end
+ end
+ RUBY
+
+ expect_offense(<<~RUBY, node: code, msg: described_class::MSG)
+ class SomeMigration < Gitlab::Database::Migration[2.1]
+ disable_ddl_transaction!
+ ^^^^^^^^^^^^^^^^^^^^^^^^ %{msg}
+ def up
+ validate_foreign_key :emails, :user_id
+ end
+ def down
+ # no-op
+ end
+ end
+ RUBY
+ end
+
+ it 'registers no offense when `disable_ddl_transaction!` is used with more than one statement' do
+ expect_no_offenses(<<~RUBY)
+ class SomeMigration < Gitlab::Database::Migration[2.1]
+ disable_ddl_transaction!
+ def up
+ add_concurrent_foreign_key :emails, :users, column: :user_id, on_delete: :cascade, validate: false
+ validate_foreign_key :emails, :user_id
+ end
+ def down
+ remove_foreign_key_if_exists :emails, column: :user_id
+ end
+ end
+ RUBY
+ end
+ end
+
+ context 'when outside of migration' do
+ it 'registers no offense' do
+ expect_no_offenses(<<~RUBY)
+ class SomeMigration
+ disable_ddl_transaction!
+ def up
+ validate_foreign_key :deployments, :environment_id
+ end
+ end
+ RUBY
+ end
+ end
+end
diff --git a/spec/rubocop/cop/rspec/invalid_feature_category_spec.rb b/spec/rubocop/cop/rspec/invalid_feature_category_spec.rb
index 0514197d6c4..e60cbe2d3ca 100644
--- a/spec/rubocop/cop/rspec/invalid_feature_category_spec.rb
+++ b/spec/rubocop/cop/rspec/invalid_feature_category_spec.rb
@@ -61,6 +61,7 @@ RSpec.describe RuboCop::Cop::RSpec::InvalidFeatureCategory, feature_category: :t
context 'with custom categories' do
it_behaves_like 'feature category validation', 'tooling'
+ it_behaves_like 'feature category validation', 'shared'
end
it 'flags invalid feature category for non-symbols' do
diff --git a/spec/services/issuable/bulk_update_service_spec.rb b/spec/services/issuable/bulk_update_service_spec.rb
index dc72cf04776..7ba349ceeae 100644
--- a/spec/services/issuable/bulk_update_service_spec.rb
+++ b/spec/services/issuable/bulk_update_service_spec.rb
@@ -117,11 +117,37 @@ RSpec.describe Issuable::BulkUpdateService do
end
end
+ shared_examples 'bulk update service' do
+ it 'result count only includes authorized issuables' do
+ all_issues = issues + [create(:issue, project: create(:project, :private))]
+ result = bulk_update(all_issues, { assignee_ids: [user.id] })
+
+ expect(result[:count]).to eq(issues.count)
+ end
+
+ context 'when issuable_ids are passed as an array' do
+ it 'updates assignees' do
+ expect do
+ described_class.new(
+ parent,
+ user,
+ { issuable_ids: issues.map(&:id), assignee_ids: [user.id] }
+ ).execute('issue')
+
+ issues.each(&:reset)
+ end.to change { issues.flat_map(&:assignee_ids) }.from([]).to([user.id] * 2)
+ end
+ end
+ end
+
context 'with issuables at a project level' do
+ let_it_be_with_reload(:issues) { create_list(:issue, 2, project: project) }
+
let(:parent) { project }
+ it_behaves_like 'bulk update service'
+
context 'with unpermitted attributes' do
- let(:issues) { create_list(:issue, 2, project: project) }
let(:label) { create(:label, project: project) }
it 'does not update the issues' do
@@ -131,9 +157,23 @@ RSpec.describe Issuable::BulkUpdateService do
end
end
- describe 'close issues' do
- let(:issues) { create_list(:issue, 2, project: project) }
+ context 'when issuable update service raises an ArgumentError' do
+ before do
+ allow_next_instance_of(Issues::UpdateService) do |update_service|
+ allow(update_service).to receive(:execute).and_raise(ArgumentError, 'update error')
+ end
+ end
+
+ it 'returns an error response' do
+ result = bulk_update(issues, add_label_ids: [])
+ expect(result).to be_error
+ expect(result.errors).to contain_exactly('update error')
+ expect(result.http_status).to eq(422)
+ end
+ end
+
+ describe 'close issues' do
it 'succeeds and returns the correct number of issues updated' do
result = bulk_update(issues, state_event: 'close')
@@ -155,24 +195,24 @@ RSpec.describe Issuable::BulkUpdateService do
end
describe 'reopen issues' do
- let(:issues) { create_list(:closed_issue, 2, project: project) }
+ let_it_be_with_reload(:closed_issues) { create_list(:closed_issue, 2, project: project) }
it 'succeeds and returns the correct number of issues updated' do
- result = bulk_update(issues, state_event: 'reopen')
+ result = bulk_update(closed_issues, state_event: 'reopen')
expect(result.success?).to be_truthy
- expect(result.payload[:count]).to eq(issues.count)
+ expect(result.payload[:count]).to eq(closed_issues.count)
end
it 'reopens all the issues passed' do
- bulk_update(issues, state_event: 'reopen')
+ bulk_update(closed_issues, state_event: 'reopen')
expect(project.issues.closed).to be_empty
expect(project.issues.opened).not_to be_empty
end
it_behaves_like 'scheduling cached group count clear' do
- let(:issuables) { issues }
+ let(:issuables) { closed_issues }
let(:params) { { state_event: 'reopen' } }
end
end
@@ -207,10 +247,10 @@ RSpec.describe Issuable::BulkUpdateService do
end
end
- context 'when the new assignee ID is not present' do
- it 'does not unassign' do
+ context 'when the new assignee IDs array is empty' do
+ it 'removes all assignees' do
expect { bulk_update(merge_request, assignee_ids: []) }
- .not_to change { merge_request.reload.assignee_ids }
+ .to change(merge_request.assignees, :count).by(-1)
end
end
end
@@ -244,10 +284,10 @@ RSpec.describe Issuable::BulkUpdateService do
end
end
- context 'when the new assignee ID is not present' do
- it 'does not unassign' do
+ context 'when the new assignee IDs array is empty' do
+ it 'removes all assignees' do
expect { bulk_update(issue, assignee_ids: []) }
- .not_to change(issue.assignees, :count)
+ .to change(issue.assignees, :count).by(-1)
end
end
end
@@ -321,6 +361,10 @@ RSpec.describe Issuable::BulkUpdateService do
group.add_reporter(user)
end
+ it_behaves_like 'bulk update service' do
+ let_it_be_with_reload(:issues) { create_list(:issue, 2, project: create(:project, group: group)) }
+ end
+
describe 'updating milestones' do
let(:milestone) { create(:milestone, group: group) }
let(:project) { create(:project, :repository, group: group) }
@@ -372,4 +416,13 @@ RSpec.describe Issuable::BulkUpdateService do
end
end
end
+
+ context 'when no parent is provided' do
+ it 'returns an unscoped update error' do
+ result = described_class.new(nil, user, { assignee_ids: [user.id], issuable_ids: [] }).execute('issue')
+
+ expect(result).to be_error
+ expect(result.errors).to contain_exactly(_('A parent must be provided when bulk updating issuables'))
+ end
+ end
end
diff --git a/spec/services/members/approve_access_request_service_spec.rb b/spec/services/members/approve_access_request_service_spec.rb
index d26bab7bb0a..ca5c052d032 100644
--- a/spec/services/members/approve_access_request_service_spec.rb
+++ b/spec/services/members/approve_access_request_service_spec.rb
@@ -30,6 +30,14 @@ RSpec.describe Members::ApproveAccessRequestService do
expect(member.requested_at).to be_nil
end
+ it 'calls the method to resolve access request for the approver' do
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:resolve_access_request_todos).with(current_user, access_requester)
+ end
+
+ described_class.new(current_user, params).execute(access_requester, **opts)
+ end
+
context 'with a custom access level' do
let(:params) { { access_level: custom_access_level } }
diff --git a/spec/services/members/base_service_spec.rb b/spec/services/members/base_service_spec.rb
new file mode 100644
index 00000000000..b2db599db9c
--- /dev/null
+++ b/spec/services/members/base_service_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Members::BaseService, feature_category: :projects do
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:access_requester) { create(:group_member) }
+
+ describe '#resolve_access_request_todos' do
+ it 'calls the resolve_access_request_todos of todo service' do
+ expect_next_instance_of(TodoService) do |todo_service|
+ expect(todo_service)
+ .to receive(:resolve_access_request_todos).with(current_user, access_requester)
+ end
+
+ described_class.new.send(:resolve_access_request_todos, current_user, access_requester)
+ end
+ end
+end
diff --git a/spec/services/members/destroy_service_spec.rb b/spec/services/members/destroy_service_spec.rb
index 9570aa1b612..2b956bec469 100644
--- a/spec/services/members/destroy_service_spec.rb
+++ b/spec/services/members/destroy_service_spec.rb
@@ -41,6 +41,14 @@ RSpec.describe Members::DestroyService, feature_category: :subgroups do
.not_to change { member_user.notification_settings.count }
end
end
+
+ it 'resolves the access request todos for the owner' do
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:resolve_access_request_todos).with(current_user, member)
+ end
+
+ described_class.new(current_user).execute(member, **opts)
+ end
end
shared_examples 'a service destroying a member with access' do
diff --git a/spec/services/preview_markdown_service_spec.rb b/spec/services/preview_markdown_service_spec.rb
index fe1ab6b1d58..d1bc10cfd28 100644
--- a/spec/services/preview_markdown_service_spec.rb
+++ b/spec/services/preview_markdown_service_spec.rb
@@ -192,4 +192,21 @@ RSpec.describe PreviewMarkdownService do
"Sets time estimate to 2y.<br>Assigns #{user.to_reference}."
end
end
+
+ context 'work item quick action types' do
+ let(:work_item) { create(:work_item, :task, project: project) }
+ let(:params) do
+ {
+ text: "/title new title",
+ target_type: 'WorkItem',
+ target_id: work_item.iid
+ }
+ end
+
+ let(:result) { described_class.new(project, user, params).execute }
+
+ it 'renders the quick action preview' do
+ expect(result[:commands]).to eq "Changes the title to \"new title\"."
+ end
+ end
end
diff --git a/spec/services/quick_actions/target_service_spec.rb b/spec/services/quick_actions/target_service_spec.rb
index 4dd21612d9e..1b0a5d4ae73 100644
--- a/spec/services/quick_actions/target_service_spec.rb
+++ b/spec/services/quick_actions/target_service_spec.rb
@@ -47,6 +47,14 @@ RSpec.describe QuickActions::TargetService do
it_behaves_like 'build target', type_iid: -1
end
+ context 'for work item' do
+ let(:target) { create(:work_item, :task, project: project) }
+ let(:target_iid) { target.iid }
+ let(:type) { 'WorkItem' }
+
+ it_behaves_like 'find target'
+ end
+
context 'for merge request' do
let(:target) { create(:merge_request, source_project: project) }
let(:target_iid) { target.iid }
diff --git a/spec/services/todo_service_spec.rb b/spec/services/todo_service_spec.rb
index 596ca9495ff..f73eae70d3c 100644
--- a/spec/services/todo_service_spec.rb
+++ b/spec/services/todo_service_spec.rb
@@ -1224,6 +1224,24 @@ RSpec.describe TodoService do
end
end
+ describe '#resolve_access_request_todos' do
+ let_it_be(:source) { create(:group, :public) }
+ let_it_be(:requester) { create(:group_member, :access_request, group: source, user: assignee) }
+
+ it 'marks the todos for request handler as done' do
+ request_handler_todo = create(:todo,
+ user: member,
+ state: :pending,
+ action: Todo::MEMBER_ACCESS_REQUESTED,
+ author: requester.user,
+ target: source)
+
+ service.resolve_access_request_todos(member, requester)
+
+ expect(request_handler_todo.reload).to be_done
+ end
+ end
+
describe '#restore_todo' do
let!(:todo) { create(:todo, :done, user: john_doe) }
diff --git a/spec/support/shared_contexts/requests/api/graphql/releases_and_group_releases_shared_context.rb b/spec/support/shared_contexts/requests/api/graphql/releases_and_group_releases_shared_context.rb
new file mode 100644
index 00000000000..81076ea6fdc
--- /dev/null
+++ b/spec/support/shared_contexts/requests/api/graphql/releases_and_group_releases_shared_context.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'when releases and group releases shared context' do
+ let_it_be(:stranger) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:reporter) { create(:user) }
+ let_it_be(:developer) { create(:user) }
+
+ let(:base_url_params) { { scope: 'all', release_tag: release.tag } }
+ let(:opened_url_params) { { state: 'opened', **base_url_params } }
+ let(:merged_url_params) { { state: 'merged', **base_url_params } }
+ let(:closed_url_params) { { state: 'closed', **base_url_params } }
+
+ let(:query) do
+ graphql_query_for(resource_type, { fullPath: resource.full_path },
+ %(
+ releases {
+ count
+ nodes {
+ tagName
+ tagPath
+ name
+ commit {
+ sha
+ }
+ assets {
+ count
+ sources {
+ nodes {
+ url
+ }
+ }
+ }
+ evidences {
+ nodes {
+ sha
+ }
+ }
+ links {
+ selfUrl
+ openedMergeRequestsUrl
+ mergedMergeRequestsUrl
+ closedMergeRequestsUrl
+ openedIssuesUrl
+ closedIssuesUrl
+ }
+ }
+ }
+ ))
+ end
+
+ let(:params_for_issues_and_mrs) { { scope: 'all', state: 'opened', release_tag: release.tag } }
+ let(:post_query) { post_graphql(query, current_user: current_user) }
+
+ let(:data) { graphql_data.dig(resource_type.to_s, 'releases', 'nodes', 0) }
+
+ before do
+ stub_default_url_options(host: 'www.example.com')
+ end
+end
diff --git a/spec/support/shared_examples/graphql/resolvers/releases_resolvers_shared_examples.rb b/spec/support/shared_examples/graphql/resolvers/releases_resolvers_shared_examples.rb
new file mode 100644
index 00000000000..0e09a9d9e66
--- /dev/null
+++ b/spec/support/shared_examples/graphql/resolvers/releases_resolvers_shared_examples.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'releases and group releases resolver' do
+ context 'when the user does not have access to the project' do
+ let(:current_user) { public_user }
+
+ it 'returns an empty array' do
+ expect(resolve_releases).to be_empty
+ end
+ end
+
+ context "when the user has full access to the project's releases" do
+ let(:current_user) { developer }
+
+ it 'returns all releases associated to the project' do
+ expect(resolve_releases).to match_array(all_releases)
+ end
+
+ describe 'when order_by is released_at' do
+ context 'with sort: desc' do
+ let(:args) { { sort: :released_at_desc } }
+
+ it 'returns the releases ordered by released_at in descending order' do
+ expect(resolve_releases.to_a)
+ .to match_array(all_releases)
+ .and be_sorted(:released_at, :desc)
+ end
+ end
+
+ context 'with sort: asc' do
+ let(:args) { { sort: :released_at_asc } }
+
+ it 'returns the releases ordered by released_at in ascending order' do
+ expect(resolve_releases.to_a)
+ .to match_array(all_releases)
+ .and be_sorted(:released_at, :asc)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/namespaces/traversal_examples.rb b/spec/support/shared_examples/namespaces/traversal_examples.rb
index 73e22b97abc..600539f7d0a 100644
--- a/spec/support/shared_examples/namespaces/traversal_examples.rb
+++ b/spec/support/shared_examples/namespaces/traversal_examples.rb
@@ -57,11 +57,6 @@ RSpec.shared_examples 'namespace traversal' do
end
describe '#ancestors' do
- before do
- # #reload is called to make sure traversal_ids are reloaded
- reload_models(group, nested_group, deep_nested_group, very_deep_nested_group)
- end
-
it 'returns the correct ancestors' do
expect(very_deep_nested_group.ancestors).to contain_exactly(group, nested_group, deep_nested_group)
expect(deep_nested_group.ancestors).to contain_exactly(group, nested_group)
diff --git a/spec/support/shared_examples/requests/api/graphql/releases_and_group_releases_shared_examples.rb b/spec/support/shared_examples/requests/api/graphql/releases_and_group_releases_shared_examples.rb
new file mode 100644
index 00000000000..b40cf6daea9
--- /dev/null
+++ b/spec/support/shared_examples/requests/api/graphql/releases_and_group_releases_shared_examples.rb
@@ -0,0 +1,164 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'correct total count' do
+ let(:data) { graphql_data.dig(resource_type.to_s, 'releases') }
+
+ before do
+ create_list(:release, 2, project: project)
+
+ post_query
+ end
+
+ it 'returns the total count' do
+ expect(data['count']).to eq(project.releases.count)
+ end
+end
+
+RSpec.shared_examples 'full access to all repository-related fields' do
+ describe 'repository-related fields' do
+ before do
+ post_query
+ end
+
+ it 'returns data for fields that are protected in private projects' do
+ expected_sources = release.sources.map do |s|
+ { 'url' => s.url }
+ end
+
+ expected_evidences = release.evidences.map do |e|
+ { 'sha' => e.sha }
+ end
+ expect(data).to eq(
+ 'tagName' => release.tag,
+ 'tagPath' => project_tag_path(project, release.tag),
+ 'name' => release.name,
+ 'commit' => {
+ 'sha' => release.commit.sha
+ },
+ 'assets' => {
+ 'count' => release.assets_count,
+ 'sources' => {
+ 'nodes' => expected_sources
+ }
+ },
+ 'evidences' => {
+ 'nodes' => expected_evidences
+ },
+ 'links' => {
+ 'selfUrl' => project_release_url(project, release),
+ 'openedMergeRequestsUrl' => project_merge_requests_url(project, opened_url_params),
+ 'mergedMergeRequestsUrl' => project_merge_requests_url(project, merged_url_params),
+ 'closedMergeRequestsUrl' => project_merge_requests_url(project, closed_url_params),
+ 'openedIssuesUrl' => project_issues_url(project, opened_url_params),
+ 'closedIssuesUrl' => project_issues_url(project, closed_url_params)
+ }
+ )
+ end
+ end
+
+ it_behaves_like 'correct total count'
+end
+
+RSpec.shared_examples 'no access to any repository-related fields' do
+ describe 'repository-related fields' do
+ before do
+ post_query
+ end
+
+ it 'does not return data for fields that expose repository information' do
+ tag_name = release.tag
+ release_name = release.name
+ expect(data).to eq(
+ 'tagName' => tag_name,
+ 'tagPath' => nil,
+ 'name' => release_name,
+ 'commit' => nil,
+ 'assets' => {
+ 'count' => release.assets_count(except: [:sources]),
+ 'sources' => {
+ 'nodes' => []
+ }
+ },
+ 'evidences' => {
+ 'nodes' => []
+ },
+ 'links' => {
+ 'closedIssuesUrl' => nil,
+ 'closedMergeRequestsUrl' => nil,
+ 'mergedMergeRequestsUrl' => nil,
+ 'openedIssuesUrl' => nil,
+ 'openedMergeRequestsUrl' => nil,
+ 'selfUrl' => project_release_url(project, release)
+ }
+ )
+ end
+ end
+
+ it_behaves_like 'correct total count'
+end
+
+RSpec.shared_examples 'access to editUrl' do
+ # editUrl is tested separately because its permissions
+ # are slightly different than other release fields
+ let(:query) do
+ graphql_query_for(resource_type, { fullPath: resource.full_path },
+ %(
+ releases {
+ nodes {
+ links {
+ editUrl
+ }
+ }
+ }
+ ))
+ end
+
+ before do
+ post_query
+ end
+
+ it 'returns editUrl' do
+ expect(data).to eq(
+ 'links' => {
+ 'editUrl' => edit_project_release_url(project, release)
+ }
+ )
+ end
+end
+
+RSpec.shared_examples 'no access to editUrl' do
+ let(:query) do
+ graphql_query_for(resource_type, { fullPath: resource.full_path },
+ %(
+ releases {
+ nodes {
+ links {
+ editUrl
+ }
+ }
+ }
+ ))
+ end
+
+ before do
+ post_query
+ end
+
+ it 'does not return editUrl' do
+ expect(data).to eq(
+ 'links' => {
+ 'editUrl' => nil
+ }
+ )
+ end
+end
+
+RSpec.shared_examples 'no access to any release data' do
+ before do
+ post_query
+ end
+
+ it 'returns nil' do
+ expect(data).to eq(nil)
+ end
+end
diff --git a/tooling/bin/js_to_system_specs_mappings b/tooling/bin/js_to_system_specs_mappings
index 59a66ab0691..3e9d9cb4c5f 100755
--- a/tooling/bin/js_to_system_specs_mappings
+++ b/tooling/bin/js_to_system_specs_mappings
@@ -3,8 +3,12 @@
require_relative '../lib/tooling/mappings/js_to_system_specs_mappings'
-changes = ARGV.shift
-output_file = ARGV.shift
+changes = ARGV.shift
+matching_tests = ARGV.shift
+
changed_files = File.read(changes).split(' ')
+matching_test_files = File.read(matching_tests).split(' ')
+
+system_tests = Tooling::Mappings::JsToSystemSpecsMappings.new.execute(changed_files)
-File.write(output_file, Tooling::Mappings::JsToSystemSpecsMappings.new.execute(changed_files).join(' '))
+File.write(matching_tests, (matching_test_files + system_tests).join(' '))