diff options
31 files changed, 307 insertions, 53 deletions
diff --git a/.gitlab/ci/rules.gitlab-ci.yml b/.gitlab/ci/rules.gitlab-ci.yml index 6e63874eca8..53420dfe31a 100644 --- a/.gitlab/ci/rules.gitlab-ci.yml +++ b/.gitlab/ci/rules.gitlab-ci.yml @@ -19,12 +19,6 @@ .if-default-branch-push: &if-default-branch-push if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $CI_PIPELINE_SOURCE == "push"' -.if-default-branch-schedule-2-hourly: &if-default-branch-schedule-2-hourly - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $CI_PIPELINE_SOURCE == "schedule" && $FREQUENCY == "2-hourly"' - -.if-default-branch-schedule-nightly: &if-default-branch-schedule-nightly - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $CI_PIPELINE_SOURCE == "schedule" && $FREQUENCY == "nightly"' - .if-auto-deploy-branches: &if-auto-deploy-branches if: '$CI_COMMIT_BRANCH =~ /^\d+-\d+-auto-deploy-\d+$/' @@ -55,12 +49,30 @@ .if-security-merge-request: &if-security-merge-request if: '$CI_PROJECT_NAMESPACE == "gitlab-org/security" && $CI_MERGE_REQUEST_IID' +.if-default-branch-schedule-2-hourly: &if-default-branch-schedule-2-hourly + if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $CI_PIPELINE_SOURCE == "schedule" && $FREQUENCY == "2-hourly"' + +.if-default-branch-schedule-nightly: &if-default-branch-schedule-nightly + if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $CI_PIPELINE_SOURCE == "schedule" && $FREQUENCY == "nightly"' + .if-security-schedule: &if-security-schedule if: '$CI_PROJECT_NAMESPACE == "gitlab-org/security" && $CI_PIPELINE_SOURCE == "schedule"' .if-dot-com-gitlab-org-schedule: &if-dot-com-gitlab-org-schedule if: '$CI_SERVER_HOST == "gitlab.com" && $CI_PROJECT_NAMESPACE == "gitlab-org" && $CI_PIPELINE_SOURCE == "schedule"' +.if-dot-com-ee-schedule: &if-dot-com-ee-schedule + if: '$CI_SERVER_HOST == "gitlab.com" && $CI_PROJECT_PATH == "gitlab-org/gitlab" && $CI_PIPELINE_SOURCE == "schedule"' + +.if-dot-com-ee-2-hourly-schedule: &if-dot-com-ee-2-hourly-schedule + if: '$CI_SERVER_HOST == "gitlab.com" && $CI_PROJECT_PATH == "gitlab-org/gitlab" && $CI_PIPELINE_SOURCE == "schedule" && $FREQUENCY == "2-hourly"' + +.if-dot-com-ee-nightly-schedule: &if-dot-com-ee-nightly-schedule + if: '$CI_SERVER_HOST == "gitlab.com" && $CI_PROJECT_PATH == "gitlab-org/gitlab" && $CI_PIPELINE_SOURCE == "schedule" && $FREQUENCY == "nightly"' + +.if-cache-credentials-schedule: &if-cache-credentials-schedule + if: '$CI_REPO_CACHE_CREDENTIALS && $CI_PIPELINE_SOURCE == "schedule"' + .if-dot-com-gitlab-org-default-branch: &if-dot-com-gitlab-org-default-branch if: '$CI_SERVER_HOST == "gitlab.com" && $CI_PROJECT_NAMESPACE == "gitlab-org" && $CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH' @@ -73,11 +85,6 @@ .if-dot-com-gitlab-org-and-security-tag: &if-dot-com-gitlab-org-and-security-tag if: '$CI_SERVER_HOST == "gitlab.com" && $CI_PROJECT_NAMESPACE =~ /^gitlab-org($|\/security$)/ && $CI_COMMIT_TAG' -.if-dot-com-ee-2-hourly-schedule: &if-dot-com-ee-2-hourly-schedule - if: '$CI_SERVER_HOST == "gitlab.com" && $CI_PROJECT_PATH == "gitlab-org/gitlab" && $CI_PIPELINE_SOURCE == "schedule" && $FREQUENCY == "2-hourly"' - -.if-cache-credentials-schedule: &if-cache-credentials-schedule - if: '$CI_REPO_CACHE_CREDENTIALS && $CI_PIPELINE_SOURCE == "schedule"' .if-rspec-fail-fast-disabled: &if-rspec-fail-fast-disabled if: '$RSPEC_FAIL_FAST_ENABLED != "true"' @@ -1306,6 +1313,8 @@ - <<: *if-dot-com-gitlab-org-merge-request changes: *code-qa-patterns allow_failure: true + - <<: *if-dot-com-ee-schedule + allow_failure: true # The rule needs to be duplicated between `on_success` and `on_failure` # because the jobs `needs` the previous job to complete. @@ -1332,6 +1341,10 @@ - <<: *if-dot-com-gitlab-org-merge-request changes: *code-qa-patterns when: on_failure + - <<: *if-dot-com-ee-schedule + when: on_success + - <<: *if-dot-com-ee-schedule + when: on_failure .review:rules:review-qa-all: rules: @@ -1344,6 +1357,8 @@ - <<: *if-dot-com-gitlab-org-merge-request changes: *qa-patterns allow_failure: true + - <<: *if-dot-com-ee-nightly-schedule + allow_failure: true # The rule needs to be duplicated between `on_success` and `on_failure` # because the jobs `needs` the previous job to complete. @@ -1358,12 +1373,18 @@ allow_failure: true - <<: *if-dot-com-gitlab-org-merge-request changes: *qa-patterns - allow_failure: true when: on_success + allow_failure: true - <<: *if-dot-com-gitlab-org-merge-request changes: *qa-patterns + when: on_failure + allow_failure: true + - <<: *if-dot-com-ee-nightly-schedule + when: on_success allow_failure: true + - <<: *if-dot-com-ee-nightly-schedule when: on_failure + allow_failure: true .review:rules:review-cleanup: rules: diff --git a/.gitlab/ci/test-metadata.gitlab-ci.yml b/.gitlab/ci/test-metadata.gitlab-ci.yml index 08c5a7267c2..135bf8b6a8c 100644 --- a/.gitlab/ci/test-metadata.gitlab-ci.yml +++ b/.gitlab/ci/test-metadata.gitlab-ci.yml @@ -43,5 +43,6 @@ update-tests-metadata: script: - run_timed_command "retry gem install fog-aws mime-types activesupport rspec_profiling postgres-copy --no-document" - source ./scripts/rspec_helpers.sh + - test -f rspec_flaky/report-suite.json || echo -e "\e[31m" 'Consider add ~"pipeline:run-all-rspec" to run full rspec jobs' "\e[0m" - update_tests_metadata - update_tests_mapping diff --git a/GITLAB_PAGES_VERSION b/GITLAB_PAGES_VERSION index 7d47e599800..a50908ca3da 100644 --- a/GITLAB_PAGES_VERSION +++ b/GITLAB_PAGES_VERSION @@ -1 +1 @@ -1.41.0 +1.42.0 diff --git a/app/assets/javascripts/boards/components/board_filtered_search.vue b/app/assets/javascripts/boards/components/board_filtered_search.vue index 20f5207bd47..7f242dea644 100644 --- a/app/assets/javascripts/boards/components/board_filtered_search.vue +++ b/app/assets/javascripts/boards/components/board_filtered_search.vue @@ -34,6 +34,7 @@ export default { search, milestoneTitle, types, + weight, } = this.filterParams; let notParams = {}; @@ -45,6 +46,7 @@ export default { 'not[assignee_username]': this.filterParams.not.assigneeUsername, 'not[types]': this.filterParams.not.types, 'not[milestone_title]': this.filterParams.not.milestoneTitle, + 'not[weight]': this.filterParams.not.weight, }, undefined, ); @@ -58,6 +60,7 @@ export default { milestone_title: milestoneTitle, search, types, + weight, }; }, }, @@ -82,6 +85,7 @@ export default { search, milestoneTitle, types, + weight, } = this.filterParams; const filteredSearchValue = []; @@ -122,6 +126,13 @@ export default { }); } + if (weight) { + filteredSearchValue.push({ + type: 'weight', + value: { data: weight, operator: '=' }, + }); + } + if (this.filterParams['not[authorUsername]']) { filteredSearchValue.push({ type: 'author_username', @@ -136,6 +147,13 @@ export default { }); } + if (this.filterParams['not[weight]']) { + filteredSearchValue.push({ + type: 'weight', + value: { data: this.filterParams['not[weight]'], operator: '!=' }, + }); + } + if (this.filterParams['not[assigneeUsername]']) { filteredSearchValue.push({ type: 'assignee_username', @@ -195,6 +213,9 @@ export default { case 'milestone_title': filterParams.milestoneTitle = filter.value.data; break; + case 'weight': + filterParams.weight = filter.value.data; + break; case 'filtered-search-term': if (filter.value.data) plainText.push(filter.value.data); break; diff --git a/app/assets/javascripts/boards/components/issue_board_filtered_search.vue b/app/assets/javascripts/boards/components/issue_board_filtered_search.vue index 42ae6b04a0e..5206db05410 100644 --- a/app/assets/javascripts/boards/components/issue_board_filtered_search.vue +++ b/app/assets/javascripts/boards/components/issue_board_filtered_search.vue @@ -9,6 +9,7 @@ import { __ } from '~/locale'; import AuthorToken from '~/vue_shared/components/filtered_search_bar/tokens/author_token.vue'; import LabelToken from '~/vue_shared/components/filtered_search_bar/tokens/label_token.vue'; import MilestoneToken from '~/vue_shared/components/filtered_search_bar/tokens/milestone_token.vue'; +import WeightToken from '~/vue_shared/components/filtered_search_bar/tokens/weight_token.vue'; export default { types: { @@ -24,6 +25,7 @@ export default { incident: __('Incident'), issue: __('Issue'), milestone: __('Milestone'), + weight: __('Weight'), is: __('is'), isNot: __('is not'), }, @@ -50,6 +52,7 @@ export default { incident, type, milestone, + weight, } = this.$options.i18n; const { types } = this.$options; const { fetchAuthors, fetchLabels } = issueBoardFilters( @@ -121,6 +124,13 @@ export default { defaultMilestones: [], // todo: https://gitlab.com/gitlab-org/gitlab/-/issues/337044#note_640010094 fetchMilestones: this.fetchMilestones, }, + { + type: 'weight', + title: weight, + icon: 'weight', + token: WeightToken, + unique: true, + }, ]; }, }, diff --git a/app/assets/javascripts/boards/constants.js b/app/assets/javascripts/boards/constants.js index 6a5492f211c..b2e25e1ae2a 100644 --- a/app/assets/javascripts/boards/constants.js +++ b/app/assets/javascripts/boards/constants.js @@ -110,6 +110,7 @@ export const FilterFields = { 'releaseTag', 'search', 'types', + 'weight', ], }; diff --git a/app/assets/javascripts/issues_list/index.js b/app/assets/javascripts/issues_list/index.js index b4def1b6e00..dcc7ee72273 100644 --- a/app/assets/javascripts/issues_list/index.js +++ b/app/assets/javascripts/issues_list/index.js @@ -24,8 +24,7 @@ export function mountJiraIssuesListApp() { } Vue.use(VueApollo); - - const defaultClient = createDefaultClient(); + const defaultClient = createDefaultClient({}, { assumeImmutableResults: true }); const apolloProvider = new VueApollo({ defaultClient, }); diff --git a/app/controllers/projects/pipelines_controller.rb b/app/controllers/projects/pipelines_controller.rb index e983fd9c993..a411264b350 100644 --- a/app/controllers/projects/pipelines_controller.rb +++ b/app/controllers/projects/pipelines_controller.rb @@ -31,10 +31,11 @@ class Projects::PipelinesController < Projects::ApplicationController feature_category :continuous_integration, [ :charts, :show, :config_variables, :stage, :cancel, :retry, - :builds, :dag, :failures, :status, :downloadable_artifacts, + :builds, :dag, :failures, :status, :index, :create, :new, :destroy ] feature_category :code_testing, [:test_report] + feature_category :build_artifacts, [:downloadable_artifacts] def index @pipelines = Ci::PipelinesFinder diff --git a/app/models/user.rb b/app/models/user.rb index d9d1003a298..cb0f15c04cb 100644 --- a/app/models/user.rb +++ b/app/models/user.rb @@ -248,7 +248,6 @@ class User < ApplicationRecord message: _("%{placeholder} is not a valid color scheme") % { placeholder: '%{value}' } } before_validation :sanitize_attrs - before_validation :set_notification_email, if: :new_record? before_validation :set_public_email, if: :public_email_changed? before_validation :set_commit_email, if: :commit_email_changed? before_save :default_private_profile_to_false @@ -273,11 +272,6 @@ class User < ApplicationRecord update_emails_with_primary_email(previous_confirmed_at, previous_email) update_invalid_gpg_signatures - - if previous_email == notification_email - self.notification_email = email - save - end end end @@ -929,7 +923,7 @@ class User < ApplicationRecord end def notification_email_verified - return if new_record? || temp_oauth_email? + return if read_attribute(:notification_email).blank? || temp_oauth_email? errors.add(:notification_email, _("must be an email you have verified")) unless verified_emails.include?(notification_email) end @@ -970,6 +964,11 @@ class User < ApplicationRecord has_attribute?(:commit_email) && super end + def notification_email + # The notification email is the same as the primary email if undefined + super.presence || self.email + end + def private_commit_email Gitlab::PrivateCommitEmail.for_user(self) end diff --git a/app/services/git/base_hooks_service.rb b/app/services/git/base_hooks_service.rb index 1eb54e13522..aee2f685e97 100644 --- a/app/services/git/base_hooks_service.rb +++ b/app/services/git/base_hooks_service.rb @@ -25,6 +25,7 @@ module Git raise NotImplementedError, "Please implement #{self.class}##{__method__}" end + # The changeset, ordered with the newest commit last def commits raise NotImplementedError, "Please implement #{self.class}##{__method__}" end @@ -132,10 +133,10 @@ module Git end def event_push_data - # We only need the last commit for the event push, and we don't + # We only need the newest commit for the event push, and we don't # need the full deltas either. @event_push_data ||= Gitlab::DataBuilder::Push.build( - **push_data_params(commits: commits.last, with_changed_files: false) + **push_data_params(commits: limited_commits.last, with_changed_files: false) ) end diff --git a/app/services/git/branch_hooks_service.rb b/app/services/git/branch_hooks_service.rb index 9738615c804..7a22d7ffcdf 100644 --- a/app/services/git/branch_hooks_service.rb +++ b/app/services/git/branch_hooks_service.rb @@ -21,8 +21,9 @@ module Git def commits strong_memoize(:commits) do if creating_default_branch? - # The most recent PROCESS_COMMIT_LIMIT commits in the default branch - project.repository.commits(newrev, limit: PROCESS_COMMIT_LIMIT) + # The most recent PROCESS_COMMIT_LIMIT commits in the default branch. + # They are returned newest-to-oldest, but we need to present them oldest-to-newest + project.repository.commits(newrev, limit: PROCESS_COMMIT_LIMIT).reverse elsif creating_branch? # Use the pushed commits that aren't reachable by the default branch # as a heuristic. This may include more commits than are actually diff --git a/db/post_migrate/20210813195518_schedule_recalculate_uuid_on_vulnerabilities_occurrences3.rb b/db/post_migrate/20210813195518_schedule_recalculate_uuid_on_vulnerabilities_occurrences3.rb new file mode 100644 index 00000000000..c59c71708ca --- /dev/null +++ b/db/post_migrate/20210813195518_schedule_recalculate_uuid_on_vulnerabilities_occurrences3.rb @@ -0,0 +1,30 @@ +# frozen_string_literal: true + +class ScheduleRecalculateUuidOnVulnerabilitiesOccurrences3 < ActiveRecord::Migration[6.0] + include Gitlab::Database::MigrationHelpers + + MIGRATION = 'RecalculateVulnerabilitiesOccurrencesUuid' + DELAY_INTERVAL = 2.minutes.to_i + BATCH_SIZE = 2_500 + + disable_ddl_transaction! + + def up + # Make sure that RemoveDuplicateVulnerabilitiesFindings has finished running + # so that we don't run into duplicate UUID issues + Gitlab::BackgroundMigration.steal('RemoveDuplicateVulnerabilitiesFindings') + + say "Scheduling #{MIGRATION} jobs" + queue_background_migration_jobs_by_range_at_intervals( + define_batchable_model('vulnerability_occurrences'), + MIGRATION, + DELAY_INTERVAL, + batch_size: BATCH_SIZE, + track_jobs: true + ) + end + + def down + # no-op + end +end diff --git a/db/schema_migrations/20210813195518 b/db/schema_migrations/20210813195518 new file mode 100644 index 00000000000..d64dd04d05b --- /dev/null +++ b/db/schema_migrations/20210813195518 @@ -0,0 +1 @@ +848e0201709b3608e76308e9d610e2a4e48ab665c7d8b52f3d23f0a215df58ff
\ No newline at end of file diff --git a/doc/architecture/blueprints/database/scalability/patterns/read_mostly.md b/doc/architecture/blueprints/database/scalability/patterns/read_mostly.md index bd87573a88e..02b56841507 100644 --- a/doc/architecture/blueprints/database/scalability/patterns/read_mostly.md +++ b/doc/architecture/blueprints/database/scalability/patterns/read_mostly.md @@ -1,6 +1,6 @@ --- stage: Enablement -group: database +group: Database info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments comments: false description: 'Learn how to scale operating on read-mostly data at scale' diff --git a/doc/architecture/blueprints/database/scalability/patterns/time_decay.md b/doc/architecture/blueprints/database/scalability/patterns/time_decay.md index 6e0187a8d74..9309c581d54 100644 --- a/doc/architecture/blueprints/database/scalability/patterns/time_decay.md +++ b/doc/architecture/blueprints/database/scalability/patterns/time_decay.md @@ -1,6 +1,6 @@ --- stage: Enablement -group: database +group: Database info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments comments: false description: 'Learn how to operate on large time-decay data' diff --git a/doc/architecture/blueprints/database_scaling/size-limits.md b/doc/architecture/blueprints/database_scaling/size-limits.md index 107cf0bb248..d63aa3bd4e8 100644 --- a/doc/architecture/blueprints/database_scaling/size-limits.md +++ b/doc/architecture/blueprints/database_scaling/size-limits.md @@ -33,7 +33,7 @@ graph LR Large tables on GitLab.com are a major problem - for both operations and development. They cause a variety of problems: 1. **Query timings** and hence overall application performance suffers -1. **Table maintenance** becomes much more costly. Vacuum activity has become a significant concern on GitLab.com - with large tables only seeing infrequent (e.g. once per day) and vacuum runs taking many hours to complete. This has various negative consequences and a very large table has potential to impact seemingly unrelated parts of the database and hence overall application performance suffers. +1. **Table maintenance** becomes much more costly. Vacuum activity has become a significant concern on GitLab.com - with large tables only seeing infrequent (once per day) processing and vacuum runs taking many hours to complete. This has various negative consequences and a very large table has potential to impact seemingly unrelated parts of the database and hence overall application performance suffers. 1. **Data migrations** on large tables are significantly more complex to implement and incur development overhead. They have potential to cause stability problems on GitLab.com and take a long time to execute on large datasets. 1. **Indexes size** is significant. This directly impacts performance as smaller parts of the index are kept in memory and also makes the indexes harder to maintain (think repacking). 1. **Index creation times** go up significantly - in 2021, we see btree creation take up to 6 hours for a single btree index. This impacts our ability to deploy frequently and leads to vacuum-related problems (delayed cleanup). @@ -141,7 +141,7 @@ There is no standard solution to reduce table sizes - there are many! 1. **Partitioning**: Apply a partitioning scheme if there is a common access dimension. 1. **Normalization**: Review relational modeling and apply normalization techniques to remove duplicate data 1. **Vertical table splits**: Review column usage and split table vertically. -1. **Externalize**: Move large data types out of the database entirely. For example, JSON documents, especially when not used for filtering, may be better stored outside the database, e.g. in object storage. +1. **Externalize**: Move large data types out of the database entirely. For example, JSON documents, especially when not used for filtering, may be better stored outside the database, for example, in object storage. NOTE: While we're targeting to limit physical table sizes, we consider retaining or improving performance a goal, too. diff --git a/doc/development/snowplow/index.md b/doc/development/snowplow/index.md index 59361e5206c..527b4292b23 100644 --- a/doc/development/snowplow/index.md +++ b/doc/development/snowplow/index.md @@ -551,14 +551,14 @@ Snowplow Micro is a Docker-based solution for testing frontend and backend event update application_settings set snowplow_collector_hostname='localhost:9090', snowplow_enabled=true, snowplow_cookie_domain='.gitlab.com'; ``` -1. Update `DEFAULT_SNOWPLOW_OPTIONS` in `app/assets/javascripts/tracking/index.js` to remove `forceSecureTracker: true`: +1. Update `DEFAULT_SNOWPLOW_OPTIONS` in `app/assets/javascripts/tracking/constants.js` to remove `forceSecureTracker: true`: ```diff - diff --git a/app/assets/javascripts/tracking/index.js b/app/assets/javascripts/tracking/index.js - index 0a1211d0a76..3b98c8f28f2 100644 - --- a/app/assets/javascripts/tracking/index.js - +++ b/app/assets/javascripts/tracking/index.js - @@ -7,7 +7,6 @@ const DEFAULT_SNOWPLOW_OPTIONS = { + diff --git a/app/assets/javascripts/tracking/constants.js b/app/assets/javascripts/tracking/constants.js + index 598111e4086..eff38074d4c 100644 + --- a/app/assets/javascripts/tracking/constants.js + +++ b/app/assets/javascripts/tracking/constants.js + @@ -7,7 +7,6 @@ export const DEFAULT_SNOWPLOW_OPTIONS = { appId: '', userFingerprint: false, respectDoNotTrack: true, @@ -566,7 +566,6 @@ Snowplow Micro is a Docker-based solution for testing frontend and backend event eventMethod: 'post', contexts: { webPage: true, performanceTiming: true }, formTracking: false, - ``` 1. Update `snowplow_options` in `lib/gitlab/tracking.rb` to add `protocol` and `port`: diff --git a/doc/install/installation.md b/doc/install/installation.md index 9db8631a6a5..a0587c6ef8a 100644 --- a/doc/install/installation.md +++ b/doc/install/installation.md @@ -40,7 +40,7 @@ can't be terminated and its memory usage grows over time. ## Select a version to install -Make sure you view [this installation guide](https://gitlab.com/gitlab-org/gitlab/-/blob/master/doc/install/installation.md) from the branch (version) of GitLab you would like to install (e.g., `11-7-stable`). +Make sure you view [this installation guide](https://gitlab.com/gitlab-org/gitlab/-/blob/master/doc/install/installation.md) from the branch (version) of GitLab you would like to install (for example, `11-7-stable`). You can select the branch in the version dropdown in the top left corner of GitLab (below the menu bar). If the highest number stable branch is unclear, check the [GitLab blog](https://about.gitlab.com/blog/) for installation guide links by version. diff --git a/doc/integration/kerberos.md b/doc/integration/kerberos.md index 5b827d23772..ba3f246f5f5 100644 --- a/doc/integration/kerberos.md +++ b/doc/integration/kerberos.md @@ -261,7 +261,7 @@ offers only `basic` authentication. kerberos: # Dedicated port: Git before 2.4 does not fall back to Basic authentication if Negotiate fails. # To support both Basic and Negotiate methods with older versions of Git, configure - # nginx to proxy GitLab on an extra port (e.g. 8443) and uncomment the following lines + # nginx to proxy GitLab on an extra port (for example: 8443) and uncomment the following lines # to dedicate this port to Kerberos authentication. (default: false) use_dedicated_port: true port: 8443 diff --git a/doc/user/project/import/github.md b/doc/user/project/import/github.md index e67b6a45280..1ab343d75fb 100644 --- a/doc/user/project/import/github.md +++ b/doc/user/project/import/github.md @@ -179,8 +179,8 @@ Sidekiq workers that process the following queues: For an optimal experience, it's recommended having at least 4 Sidekiq processes (each running a number of threads equal to the number of CPU cores) that *only* process these queues. It's also recommended that these processes run on separate -servers. For 4 servers with 8 cores this means you can import up to 32 objects (e.g., issues) in parallel. +servers. For 4 servers with 8 cores this means you can import up to 32 objects (for example, issues) in parallel. Reducing the time spent in cloning a repository can be done by increasing network throughput, CPU capacity, and disk -performance (e.g., by using high performance SSDs) of the disks that store the Git repositories (for your GitLab instance). +performance (by using high performance SSDs, for example) of the disks that store the Git repositories (for your GitLab instance). Increasing the number of Sidekiq workers will *not* reduce the time spent cloning repositories. diff --git a/doc/user/project/integrations/webhooks.md b/doc/user/project/integrations/webhooks.md index d18858f647c..44225ac2921 100644 --- a/doc/user/project/integrations/webhooks.md +++ b/doc/user/project/integrations/webhooks.md @@ -94,7 +94,7 @@ Triggered when you push to the repository except when pushing tags. NOTE: When more than 20 commits are pushed at once, the `commits` webhook -attribute only contains the first 20 for performance reasons. Loading +attribute only contains the newest 20 for performance reasons. Loading detailed commit data is expensive. Note that despite only 20 commits being present in the `commits` attribute, the `total_commits_count` attribute contains the actual total. diff --git a/doc/user/project/merge_requests/approvals/index.md b/doc/user/project/merge_requests/approvals/index.md index 06440e10f0d..47744edd5ce 100644 --- a/doc/user/project/merge_requests/approvals/index.md +++ b/doc/user/project/merge_requests/approvals/index.md @@ -116,6 +116,6 @@ important to describe those, too. Think of things that may go wrong and include This is important to minimize requests for support, and to avoid doc comments with questions that you know someone might ask. -Each scenario can be a third-level heading, e.g. `### Getting error message X`. +Each scenario can be a third-level heading, for example, `### Getting error message X`. If you have none to add when creating a doc, leave this section in place but commented out to help encourage others to add to it in the future. --> diff --git a/doc/user/project/pages/custom_domains_ssl_tls_certification/index.md b/doc/user/project/pages/custom_domains_ssl_tls_certification/index.md index 8c77714a2de..51f1ec96c22 100644 --- a/doc/user/project/pages/custom_domains_ssl_tls_certification/index.md +++ b/doc/user/project/pages/custom_domains_ssl_tls_certification/index.md @@ -139,7 +139,7 @@ If you're using Cloudflare, check > - **Do not** use a CNAME record if you want to point your `domain.com` to your GitLab Pages site. Use an `A` record instead. > - **Do not** add any special chars after the default Pages - domain. E.g., don't point `subdomain.domain.com` to + domain. For example, don't point `subdomain.domain.com` to or `namespace.gitlab.io/`. Some domain hosting providers may request a trailing dot (`namespace.gitlab.io.`), though. > - GitLab Pages IP on GitLab.com [was changed](https://about.gitlab.com/releases/2017/03/06/we-are-changing-the-ip-of-gitlab-pages-on-gitlab-com/) in 2017. > - GitLab Pages IP on GitLab.com [has changed](https://about.gitlab.com/blog/2018/07/19/gcp-move-update/#gitlab-pages-and-custom-domains) @@ -315,6 +315,6 @@ important to describe those, too. Think of things that may go wrong and include This is important to minimize requests for support, and to avoid doc comments with questions that you know someone might ask. -Each scenario can be a third-level heading, e.g. `### Getting error message X`. +Each scenario can be a third-level heading, for example, `### Getting error message X`. If you have none to add when creating a doc, leave this section in place but commented out to help encourage others to add to it in the future. --> diff --git a/doc/user/project/pages/custom_domains_ssl_tls_certification/lets_encrypt_integration.md b/doc/user/project/pages/custom_domains_ssl_tls_certification/lets_encrypt_integration.md index f0a922ff390..ee1004a3416 100644 --- a/doc/user/project/pages/custom_domains_ssl_tls_certification/lets_encrypt_integration.md +++ b/doc/user/project/pages/custom_domains_ssl_tls_certification/lets_encrypt_integration.md @@ -99,6 +99,6 @@ important to describe those, too. Think of things that may go wrong and include This is important to minimize requests for support, and to avoid doc comments with questions that you know someone might ask. -Each scenario can be a third-level heading, e.g. `### Getting error message X`. +Each scenario can be a third-level heading, for example, `### Getting error message X`. If you have none to add when creating a doc, leave this section in place but commented out to help encourage others to add to it in the future. --> diff --git a/lib/api/badges.rb b/lib/api/badges.rb index 04f155be4e1..d7c850c2f40 100644 --- a/lib/api/badges.rb +++ b/lib/api/badges.rb @@ -8,7 +8,7 @@ module API helpers ::API::Helpers::BadgesHelpers - feature_category :continuous_integration + feature_category :projects helpers do def find_source_if_admin(source_type) diff --git a/spec/controllers/profiles/notifications_controller_spec.rb b/spec/controllers/profiles/notifications_controller_spec.rb index 1ebf4363ba6..36ec36fb6f1 100644 --- a/spec/controllers/profiles/notifications_controller_spec.rb +++ b/spec/controllers/profiles/notifications_controller_spec.rb @@ -162,7 +162,7 @@ RSpec.describe Profiles::NotificationsController do it 'shows an error message if the params are invalid' do sign_in(user) - put :update, params: { user: { notification_email: '' } } + put :update, params: { user: { notification_email: 'unverified@example.com' } } expect(user.reload.notification_email).to eq('original@example.com') expect(controller).to set_flash[:alert].to('Failed to save new settings') diff --git a/spec/frontend/boards/components/board_filtered_search_spec.js b/spec/frontend/boards/components/board_filtered_search_spec.js index 615516b32bd..50f86e92adb 100644 --- a/spec/frontend/boards/components/board_filtered_search_spec.js +++ b/spec/frontend/boards/components/board_filtered_search_spec.js @@ -117,6 +117,7 @@ describe('BoardFilteredSearch', () => { { type: 'label_name', value: { data: 'label2', operator: '=' } }, { type: 'milestone_title', value: { data: 'New Milestone', operator: '=' } }, { type: 'types', value: { data: 'INCIDENT', operator: '=' } }, + { type: 'weight', value: { data: '2', operator: '=' } }, ]; jest.spyOn(urlUtility, 'updateHistory'); findFilteredSearch().vm.$emit('onFilter', mockFilters); @@ -125,7 +126,7 @@ describe('BoardFilteredSearch', () => { title: '', replace: true, url: - 'http://test.host/?author_username=root&label_name[]=label&label_name[]=label2&milestone_title=New+Milestone&types=INCIDENT', + 'http://test.host/?author_username=root&label_name[]=label&label_name[]=label2&milestone_title=New+Milestone&types=INCIDENT&weight=2', }); }); }); diff --git a/spec/frontend/boards/mock_data.js b/spec/frontend/boards/mock_data.js index 21728ec0f59..106f7b04c4b 100644 --- a/spec/frontend/boards/mock_data.js +++ b/spec/frontend/boards/mock_data.js @@ -10,6 +10,7 @@ import { __ } from '~/locale'; import AuthorToken from '~/vue_shared/components/filtered_search_bar/tokens/author_token.vue'; import LabelToken from '~/vue_shared/components/filtered_search_bar/tokens/label_token.vue'; import MilestoneToken from '~/vue_shared/components/filtered_search_bar/tokens/milestone_token.vue'; +import WeightToken from '~/vue_shared/components/filtered_search_bar/tokens/weight_token.vue'; export const boardObj = { id: 1, @@ -607,4 +608,11 @@ export const mockTokens = (fetchLabels, fetchAuthors, fetchMilestones) => [ defaultMilestones: [], fetchMilestones, }, + { + icon: 'weight', + title: __('Weight'), + type: 'weight', + token: WeightToken, + unique: true, + }, ]; diff --git a/spec/migrations/schedule_recalculate_uuid_on_vulnerabilities_occurrences3_spec.rb b/spec/migrations/schedule_recalculate_uuid_on_vulnerabilities_occurrences3_spec.rb new file mode 100644 index 00000000000..77f298b5ecb --- /dev/null +++ b/spec/migrations/schedule_recalculate_uuid_on_vulnerabilities_occurrences3_spec.rb @@ -0,0 +1,127 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe ScheduleRecalculateUuidOnVulnerabilitiesOccurrences3 do + let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') } + let(:users) { table(:users) } + let(:user) { create_user! } + let(:project) { table(:projects).create!(id: 123, namespace_id: namespace.id) } + let(:scanners) { table(:vulnerability_scanners) } + let(:scanner) { scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') } + let(:different_scanner) { scanners.create!(project_id: project.id, external_id: 'test 2', name: 'test scanner 2') } + let(:vulnerabilities) { table(:vulnerabilities) } + let(:vulnerabilities_findings) { table(:vulnerability_occurrences) } + let(:vulnerability_identifiers) { table(:vulnerability_identifiers) } + let(:vulnerability_identifier) do + vulnerability_identifiers.create!( + project_id: project.id, + external_type: 'uuid-v5', + external_id: 'uuid-v5', + fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a', + name: 'Identifier for UUIDv5') + end + + let(:different_vulnerability_identifier) do + vulnerability_identifiers.create!( + project_id: project.id, + external_type: 'uuid-v4', + external_id: 'uuid-v4', + fingerprint: '772da93d34a1ba010bcb5efa9fb6f8e01bafcc89', + name: 'Identifier for UUIDv4') + end + + let(:vulnerability_for_uuidv4) do + create_vulnerability!( + project_id: project.id, + author_id: user.id + ) + end + + let(:vulnerability_for_uuidv5) do + create_vulnerability!( + project_id: project.id, + author_id: user.id + ) + end + + let!(:finding1) do + create_finding!( + vulnerability_id: vulnerability_for_uuidv4.id, + project_id: project.id, + scanner_id: different_scanner.id, + primary_identifier_id: different_vulnerability_identifier.id, + location_fingerprint: 'fa18f432f1d56675f4098d318739c3cd5b14eb3e', + uuid: 'b3cc2518-5446-4dea-871c-89d5e999c1ac' + ) + end + + let!(:finding2) do + create_finding!( + vulnerability_id: vulnerability_for_uuidv5.id, + project_id: project.id, + scanner_id: scanner.id, + primary_identifier_id: vulnerability_identifier.id, + location_fingerprint: '838574be0210968bf6b9f569df9c2576242cbf0a', + uuid: '77211ed6-7dff-5f6b-8c9a-da89ad0a9b60' + ) + end + + before do + stub_const("#{described_class}::BATCH_SIZE", 1) + end + + around do |example| + freeze_time { Sidekiq::Testing.fake! { example.run } } + end + + it 'schedules background migrations', :aggregate_failures do + migrate! + + expect(BackgroundMigrationWorker.jobs.size).to eq(2) + expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, finding1.id, finding1.id) + expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, finding2.id, finding2.id) + end + + private + + def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0) + vulnerabilities.create!( + project_id: project_id, + author_id: author_id, + title: title, + severity: severity, + confidence: confidence, + report_type: report_type + ) + end + + def create_finding!( + vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:, location_fingerprint:, uuid:) + vulnerabilities_findings.create!( + vulnerability_id: vulnerability_id, + project_id: project_id, + name: 'test', + severity: 7, + confidence: 7, + report_type: 0, + project_fingerprint: '123qweasdzxc', + scanner_id: scanner_id, + primary_identifier_id: primary_identifier_id, + location_fingerprint: location_fingerprint, + metadata_version: 'test', + raw_metadata: 'test', + uuid: uuid + ) + end + + def create_user!(name: "Example User", email: "user@example.com", user_type: nil) + users.create!( + name: name, + email: email, + username: name, + projects_limit: 0 + ) + end +end diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb index 48e56051fc5..d73bc95a2f2 100644 --- a/spec/models/user_spec.rb +++ b/spec/models/user_spec.rb @@ -3125,6 +3125,19 @@ RSpec.describe User do end end + describe '#notification_email' do + let(:email) { 'gonzo@muppets.com' } + + context 'when the column in the database is null' do + subject { create(:user, email: email, notification_email: nil) } + + it 'defaults to the primary email' do + expect(subject.read_attribute(:notification_email)).to be nil + expect(subject.notification_email).to eq(email) + end + end + end + describe '.find_by_private_commit_email' do context 'with email' do let_it_be(:user) { create(:user) } diff --git a/spec/services/git/branch_hooks_service_spec.rb b/spec/services/git/branch_hooks_service_spec.rb index 6e9404166f3..a93f594b360 100644 --- a/spec/services/git/branch_hooks_service_spec.rb +++ b/spec/services/git/branch_hooks_service_spec.rb @@ -92,7 +92,7 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state do end describe 'Push Event' do - let(:event) { Event.pushed_action.first } + let(:event) { Event.pushed_action.take } subject(:execute_service) { service.execute } @@ -171,7 +171,7 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state do end end - context "with a new branch" do + context "with a new default branch" do let(:oldrev) { Gitlab::Git::BLANK_SHA } it 'generates a push event with more than one commit' do @@ -183,12 +183,32 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state do expect(event.push_event_payload).to be_an_instance_of(PushEventPayload) expect(event.push_event_payload.commit_from).to be_nil expect(event.push_event_payload.commit_to).to eq(newrev) - expect(event.push_event_payload.commit_title).to eq('Initial commit') + expect(event.push_event_payload.commit_title).to eq('Change some files') expect(event.push_event_payload.ref).to eq('master') expect(event.push_event_payload.commit_count).to be > 1 end end + context "with a new non-default branch" do + let(:oldrev) { Gitlab::Git::BLANK_SHA } + let(:branch) { 'fix' } + let(:commit_id) { project.commit(branch).id } + + it 'generates a push event with more than one commit' do + execute_service + + expect(event).to be_an_instance_of(PushEvent) + expect(event.project).to eq(project) + expect(event).to be_pushed_action + expect(event.push_event_payload).to be_an_instance_of(PushEventPayload) + expect(event.push_event_payload.commit_from).to be_nil + expect(event.push_event_payload.commit_to).to eq(newrev) + expect(event.push_event_payload.commit_title).to eq('Test file for directories with a leading dot') + expect(event.push_event_payload.ref).to eq('fix') + expect(event.push_event_payload.commit_count).to be > 1 + end + end + context 'removing a branch' do let(:newrev) { Gitlab::Git::BLANK_SHA } |