diff options
22 files changed, 274 insertions, 25 deletions
diff --git a/.gitlab/ci/review.gitlab-ci.yml b/.gitlab/ci/review.gitlab-ci.yml index d50f6171314..2fafc1f1f50 100644 --- a/.gitlab/ci/review.gitlab-ci.yml +++ b/.gitlab/ci/review.gitlab-ci.yml @@ -253,4 +253,4 @@ danger-review: - git version - node --version - yarn install --frozen-lockfile --cache-folder .yarn-cache --prefer-offline - - danger --verbose # fail-on-errors=true + - danger --fail-on-errors=true --new-comment --remove-previous-comments --verbose diff --git a/app/models/concerns/bulk_insert_safe.rb b/app/models/concerns/bulk_insert_safe.rb index 6d75906b21f..0d022418b1a 100644 --- a/app/models/concerns/bulk_insert_safe.rb +++ b/app/models/concerns/bulk_insert_safe.rb @@ -1,5 +1,37 @@ # frozen_string_literal: true +## +# A mixin for ActiveRecord models that enables callers to insert instances of the +# target class into the database en-bloc via the [bulk_insert] method. +# +# Upon inclusion in the target class, the mixin will perform a number of checks to +# ensure that the target is eligible for bulk insertions. For instance, it must not +# use ActiveRecord callbacks that fire between [save]s, since these would not run +# properly when instances are inserted in bulk. +# +# The mixin uses ActiveRecord 6's [InsertAll] type internally for bulk insertions. +# Unlike [InsertAll], however, it requires you to pass instances of the target type +# rather than row hashes, since it will run validations prior to insertion. +# +# @example +# +# class MyRecord < ApplicationRecord +# include BulkInsertSafe # must be included _last_ i.e. after any other concerns +# end +# +# # simple +# MyRecord.bulk_insert!(items) +# +# # with custom batch size +# MyRecord.bulk_insert!(items, batch_size: 100) +# +# # without validations +# MyRecord.bulk_insert!(items, validate: false) +# +# # with attribute hash modification +# MyRecord.bulk_insert!(items) { |item_attrs| item_attrs['col'] = 42 } +# +# module BulkInsertSafe extend ActiveSupport::Concern @@ -13,7 +45,10 @@ module BulkInsertSafe :destroy ].freeze + DEFAULT_BATCH_SIZE = 500 + MethodNotAllowedError = Class.new(StandardError) + PrimaryKeySetError = Class.new(StandardError) class_methods do def set_callback(name, *args) @@ -26,8 +61,62 @@ module BulkInsertSafe super end + # Inserts the given ActiveRecord [items] to the table mapped to this class via [InsertAll]. + # Items will be inserted in batches of a given size, where insertion semantics are + # "atomic across all batches", i.e. either all items will be inserted or none. + # + # @param [Boolean] validate Whether validations should run on [items] + # @param [Integer] batch_size How many items should at most be inserted at once + # @param [Proc] handle_attributes Block that will receive each item attribute hash + # prior to insertion for further processing + # + # Note that this method will throw on the following occasions: + # - [PrimaryKeySetError] when primary keys are set on entities prior to insertion + # - [ActiveRecord::RecordInvalid] on entity validation failures + # - [ActiveRecord::RecordNotUnique] on duplicate key errors + # + # @return true if all items succeeded to be inserted, throws otherwise. + # + def bulk_insert!(items, validate: true, batch_size: DEFAULT_BATCH_SIZE, &handle_attributes) + return true if items.empty? + + _bulk_insert_in_batches(items, batch_size, validate, &handle_attributes) + + true + end + private + def _bulk_insert_in_batches(items, batch_size, validate_items, &handle_attributes) + transaction do + items.each_slice(batch_size) do |item_batch| + attributes = _bulk_insert_item_attributes(item_batch, validate_items, &handle_attributes) + + insert_all!(attributes) + end + end + end + + def _bulk_insert_item_attributes(items, validate_items) + items.map do |item| + item.validate! if validate_items + attributes = item.attributes + + _bulk_insert_reject_primary_key!(attributes, item.class.primary_key) + + yield attributes if block_given? + + attributes + end + end + + def _bulk_insert_reject_primary_key!(attributes, primary_key) + if attributes.delete(primary_key) + raise PrimaryKeySetError, "Primary key set: #{primary_key}:#{attributes[primary_key]}\n" \ + "Bulk-inserts are only supported for rows that don't already have PK set" + end + end + def _bulk_insert_callback_allowed?(name, args) _bulk_insert_whitelisted?(name) || _bulk_insert_saved_from_belongs_to?(name, args) end diff --git a/app/models/merge_request/metrics.rb b/app/models/merge_request/metrics.rb index 05f8e18a2c1..ba363019c72 100644 --- a/app/models/merge_request/metrics.rb +++ b/app/models/merge_request/metrics.rb @@ -6,3 +6,5 @@ class MergeRequest::Metrics < ApplicationRecord belongs_to :latest_closed_by, class_name: 'User' belongs_to :merged_by, class_name: 'User' end + +MergeRequest::Metrics.prepend_if_ee('EE::MergeRequest::Metrics') diff --git a/app/models/merge_request_diff_commit.rb b/app/models/merge_request_diff_commit.rb index 460b394f067..7310aa4f580 100644 --- a/app/models/merge_request_diff_commit.rb +++ b/app/models/merge_request_diff_commit.rb @@ -11,6 +11,7 @@ class MergeRequestDiffCommit < ApplicationRecord alias_attribute :id, :sha def self.create_bulk(merge_request_diff_id, commits) + warn 'Deprecated; use `bulk_insert` from `BulkInsertSafe` mixin instead' rows = commits.map.with_index do |commit, index| # See #parent_ids. commit_hash = commit.to_hash.except(:parent_ids) diff --git a/doc/administration/high_availability/README.md b/doc/administration/high_availability/README.md index 8f39c6b3c06..4f9e3b1a180 100644 --- a/doc/administration/high_availability/README.md +++ b/doc/administration/high_availability/README.md @@ -35,7 +35,7 @@ in which you would typically configure them. | Component | Description | Configuration Instructions | |-------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------| | [Load Balancer(s)](load_balancer.md)[^6] | Handles load balancing for the GitLab nodes where required. | [Load balancer HA configuration](load_balancer.md) | -| [Consul](../../development/architecture.md#consul)[^3] | Service discovery and health checks/failover | [Consul HA configuration](consul.md) | +| [Consul](../../development/architecture.md#consul)[^3] | Service discovery and health checks/failover | [Consul HA configuration](consul.md) **(PREMIUM ONLY)** | | [PostgreSQL](../../development/architecture.md#postgresql) | Database | [Database HA configuration](database.md) | | [PgBouncer](../../development/architecture.md#pgbouncer) | Database Pool Manager | [PgBouncer HA configuration](pgbouncer.md) **(PREMIUM ONLY)** | | [Redis](../../development/architecture.md#redis)[^3] with Redis Sentinel | Key/Value store for shared data with HA watcher service | [Redis HA configuration](redis.md) | diff --git a/doc/administration/high_availability/consul.md b/doc/administration/high_availability/consul.md index 0ea5e55cc35..6762a81f671 100644 --- a/doc/administration/high_availability/consul.md +++ b/doc/administration/high_availability/consul.md @@ -94,7 +94,8 @@ Ideally all nodes will have a `Status` of `alive`. ### Restarting the server cluster -**Note**: This section only applies to server agents. It is safe to restart client agents whenever needed. +NOTE: **Note:** +This section only applies to server agents. It is safe to restart client agents whenever needed. If it is necessary to restart the server cluster, it is important to do this in a controlled fashion in order to maintain quorum. If quorum is lost, you will need to follow the Consul [outage recovery](#outage-recovery) process to recover the cluster. diff --git a/doc/administration/high_availability/gitlab.md b/doc/administration/high_availability/gitlab.md index ad00cb8df9f..cef9f9c5761 100644 --- a/doc/administration/high_availability/gitlab.md +++ b/doc/administration/high_availability/gitlab.md @@ -8,6 +8,9 @@ NOTE: **Note:** There is some additional configuration near the bottom for additional GitLab application servers. It's important to read and understand these additional steps before proceeding with GitLab installation. +NOTE: **Note:** [Cloud Object Storage service](object_storage.md) with [Gitaly](gitaly.md) +is recommended over [NFS](nfs.md) wherever possible for improved performance. + 1. If necessary, install the NFS client utility packages using the following commands: diff --git a/doc/administration/high_availability/nfs.md b/doc/administration/high_availability/nfs.md index 0d88191151a..18dbdf8d902 100644 --- a/doc/administration/high_availability/nfs.md +++ b/doc/administration/high_availability/nfs.md @@ -12,6 +12,9 @@ performance, especially for actions that read or write to Git repositories. See [Filesystem Performance Benchmarking](../operations/filesystem_benchmarking.md) for steps to test filesystem performance. +NOTE: **Note:** [Cloud Object Storage service](object_storage.md) with [Gitaly](gitaly.md) +is recommended over NFS wherever possible for improved performance. + ## NFS Server features ### Required features diff --git a/doc/ci/docker/using_docker_build.md b/doc/ci/docker/using_docker_build.md index dca8d030820..11a672fe1c6 100644 --- a/doc/ci/docker/using_docker_build.md +++ b/doc/ci/docker/using_docker_build.md @@ -114,9 +114,9 @@ Docker-in-Docker works well, and is the recommended configuration, but it is not without its own challenges: - When using docker-in-docker, each job is in a clean environment without the past - history. Concurrent jobs work fine because every build gets it's own + history. Concurrent jobs work fine because every build gets its own instance of Docker engine so they won't conflict with each other. But this - also means jobs can be slower because there's no caching of layers. + also means that jobs can be slower because there's no caching of layers. - By default, Docker 17.09 and higher uses `--storage-driver overlay2` which is the recommended storage driver. See [Using the overlayfs driver](#using-the-overlayfs-driver) for details. diff --git a/doc/development/api_graphql_styleguide.md b/doc/development/api_graphql_styleguide.md index 96b256dc022..6d9d375166b 100644 --- a/doc/development/api_graphql_styleguide.md +++ b/doc/development/api_graphql_styleguide.md @@ -261,7 +261,7 @@ the field depending on if the feature has been enabled or not. GraphQL feature flags use the common [GitLab feature flag](../development/feature_flags.md) system, and can be added to a -field using the `feature_key` property. +field using the `feature_flag` property. For example: @@ -269,11 +269,11 @@ For example: field :test_field, type: GraphQL::STRING_TYPE, null: false, description: 'Some test field', - feature_key: :some_feature_key + feature_flag: :some_feature_flag ``` In the above example, the `test_field` field will only be returned if -the `some_feature_key` feature flag is enabled. +the `some_feature_flag` feature flag is enabled. If the feature flag is not enabled, an error will be returned saying the field does not exist. diff --git a/doc/install/aws/index.md b/doc/install/aws/index.md index 63308f10421..9d712fccb30 100644 --- a/doc/install/aws/index.md +++ b/doc/install/aws/index.md @@ -33,6 +33,9 @@ In addition to having a basic familiarity with [AWS](https://docs.aws.amazon.com - [To create or upload an SSH key](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-key-pairs.html) to connect to the instance via SSH - A domain name for the GitLab instance +- An SSL/TLS certificate to secure your domain. If you do not already own one, you can provision a free public SSL/TLS certificate through [AWS Certificate Manager](https://aws.amazon.com/certificate-manager/)(ACM) for use with the [Elastic Load Balancer](#load-balancer) we'll create. + +NOTE: **Note:** It can take a few hours to validate a certificate provisioned through ACM. To avoid delays later, request your certificate as soon as possible. ## Architecture @@ -317,6 +320,20 @@ After the Load Balancer is up and running, you can revisit your Security Groups to refine the access only through the ELB and any other requirements you might have. +### Configure DNS for Load Balancer + +On the Route 53 dashboard, click **Hosted zones** in the left navigation bar: + +1. Select an existing hosted zone or, if you do not already have one for your domain, click **Create Hosted Zone**, enter your domain name, and click **Create**. +1. Click **Create Record Set** and provide the following values: + 1. **Name:** Use the domain name (the default value) or enter a subdomain. + 1. **Type:** Select **A - IPv4 address**. + 1. **Alias Target:** Find the **ELB Classic Load Balancers** section and select the classic load balancer we created earlier. + 1. **Routing Policy:** We'll use **Simple** but you can choose a different policy based on your use case. + 1. **Evaluate Target Health:** We'll set this to **No** but you can choose to have the load balancer route traffic based on target health. + 1. Click **Create**. +1. Update your DNS records with your domain registrar. The steps for doing this vary depending on which registrar you use and is beyond the scope of this guide. + ## Deploying GitLab inside an auto scaling group We'll use AWS's wizard to deploy GitLab and then SSH into the instance to diff --git a/doc/user/application_security/container_scanning/index.md b/doc/user/application_security/container_scanning/index.md index d779e2a9c38..161f16c5100 100644 --- a/doc/user/application_security/container_scanning/index.md +++ b/doc/user/application_security/container_scanning/index.md @@ -103,7 +103,7 @@ artifact available. Behind the scenes, the [GitLab Klar analyzer](https://gitlab.com/gitlab-org/security-products/analyzers/klar/) is used and runs the scans. -## Example +### Example The following is a sample `.gitlab-ci.yml` that will build your Docker Image, push it to the container registry and run Container Scanning. @@ -133,7 +133,7 @@ build: - docker push $IMAGE ``` -## Vulnerability Whitelisting +### Vulnerability Whitelisting If you want to whitelist specific vulnerabilities, you'll need to: @@ -214,7 +214,7 @@ Container Scanning can be executed on an offline air-gapped GitLab Ultimate inst 1. Host the following Docker images on a [local Docker container registry](../../packages/container_registry/index.md): - [arminc/clair-db vulnerabilities database](https://hub.docker.com/r/arminc/clair-db) - - [GitLab klar analyzer](https://gitlab.com/gitlab-org/security-products/analyzers/klar) + - GitLab klar analyzer: `registry.gitlab.com/gitlab-org/security-products/analyzers/klar` 1. [Override the container scanning template](#overriding-the-container-scanning-template) in your `.gitlab-ci.yml` file to refer to the Docker images hosted on your local Docker container registry: ```yaml diff --git a/doc/user/application_security/dependency_scanning/index.md b/doc/user/application_security/dependency_scanning/index.md index 07b5da1fd93..bac1b6a5a59 100644 --- a/doc/user/application_security/dependency_scanning/index.md +++ b/doc/user/application_security/dependency_scanning/index.md @@ -14,6 +14,7 @@ application is using an external (open source) library which is known to be vuln If you are using [GitLab CI/CD](../../../ci/README.md), you can analyze your dependencies for known vulnerabilities using Dependency Scanning. +All dependencies are scanned, including the transitive dependencies (also known as nested dependencies). You can take advantage of Dependency Scanning by either [including the CI job](#configuration) in your existing `.gitlab-ci.yml` file or by implicitly using @@ -153,6 +154,8 @@ using environment variables. | `BUNDLER_AUDIT_UPDATE_DISABLED` | Disable automatic updates for the `bundler-audit` analyzer (default: `"false"`). Useful if you're running Dependency Scanning in an offline, air-gapped environment.| | `BUNDLER_AUDIT_ADVISORY_DB_URL` | URL of the advisory database used by bundler-audit (default: `https://github.com/rubysec/ruby-advisory-db`). | | `BUNDLER_AUDIT_ADVISORY_DB_REF_NAME` | Git ref for the advisory database specified by `BUNDLER_AUDIT_ADVISORY_DB_URL` (default: `master`). | +| `RETIREJS_JS_ADVISORY_DB` | Path or URL to Retire.js [`jsrepository.json`](https://raw.githubusercontent.com/RetireJS/retire.js/master/repository/jsrepository.json) vulnerability data file. | +| `RETIREJS_NODE_ADVISORY_DB` | Path or URL to Retire.js [`npmrepository.json`](https://raw.githubusercontent.com/RetireJS/retire.js/master/repository/npmrepository.json) vulnerability data file. | ### Using private Maven repos diff --git a/doc/user/application_security/index.md b/doc/user/application_security/index.md index 13ea45816b8..805c2d0c140 100644 --- a/doc/user/application_security/index.md +++ b/doc/user/application_security/index.md @@ -175,7 +175,9 @@ An approval is optional when a security report: - Contains no new vulnerabilities. - Contains only new vulnerabilities of `low` or `medium` severity. -### Enabling License Approvals within a project +## Enabling License Approvals within a project + +> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/13067) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 12.3. To enable License Approvals, a [project approval rule](../project/merge_requests/merge_request_approvals.md#multiple-approval-rules-premium) must be created with the case-sensitive name `License-Check`. This approval group must be set diff --git a/doc/user/application_security/sast/index.md b/doc/user/application_security/sast/index.md index 7bf61981db9..675fc6c4f2a 100644 --- a/doc/user/application_security/sast/index.md +++ b/doc/user/application_security/sast/index.md @@ -278,14 +278,14 @@ The following are Docker image-related variables. Some analyzers make it possible to filter out vulnerabilities under a given threshold. -| Environment variable | Default value | Description | -|----------------------|---------------|-------------| +| Environment variable | Default value | Description | +|-------------------------|---------------|-------------| +| `SAST_EXCLUDED_PATHS` | - | Exclude vulnerabilities from output based on the paths. This is a comma-separated list of patterns. Patterns can be globs, or file or folder paths (for example, `doc,spec` ). Parent directories will also match patterns. | | `SAST_BANDIT_EXCLUDED_PATHS` | - | comma-separated list of paths to exclude from scan. Uses Python's [`fnmatch` syntax](https://docs.python.org/2/library/fnmatch.html) | | `SAST_BRAKEMAN_LEVEL` | 1 | Ignore Brakeman vulnerabilities under given confidence level. Integer, 1=Low 3=High. | | `SAST_FLAWFINDER_LEVEL` | 1 | Ignore Flawfinder vulnerabilities under given risk level. Integer, 0=No risk, 5=High risk. | | `SAST_GITLEAKS_ENTROPY_LEVEL` | 8.0 | Minimum entropy for secret detection. Float, 0.0 = low, 8.0 = high. | | `SAST_GOSEC_LEVEL` | 0 | Ignore gosec vulnerabilities under given confidence level. Integer, 0=Undefined, 1=Low, 2=Medium, 3=High. | -| `SAST_EXCLUDED_PATHS` | - | Exclude vulnerabilities from output based on the paths. This is a comma-separated list of patterns. Patterns can be globs, file or folder paths (e.g., `doc,spec` ). Parent directories will also match patterns. | #### Timeouts diff --git a/doc/user/project/integrations/prometheus.md b/doc/user/project/integrations/prometheus.md index 475eb4a55b9..43ff5d9895a 100644 --- a/doc/user/project/integrations/prometheus.md +++ b/doc/user/project/integrations/prometheus.md @@ -102,16 +102,27 @@ Installing and configuring Prometheus to monitor applications is fairly straight #### Configuration in GitLab The actual configuration of Prometheus integration within GitLab is very simple. -All you will need is the DNS or IP address of the Prometheus server you'd like +All you will need is the domain name or IP address of the Prometheus server you'd like to integrate with. -1. Navigate to the [Integrations page](project_services.md#accessing-the-project-services) -1. Click the **Prometheus** service -1. Provide the base URL of your server, for example `http://prometheus.example.com/` -1. Click **Save changes** +1. Navigate to the [Integrations page](project_services.md#accessing-the-project-services). +1. Click the **Prometheus** service. +1. Provide the domain name or IP address of your server, for example `http://prometheus.example.com/` or `http://192.0.2.1/`. +1. Click **Save changes**. ![Configure Prometheus Service](img/prometheus_service_configuration.png) +#### Thanos configuration in GitLab + +You can configure [Thanos](https://thanos.io/) as a drop-in replacement for Prometheus +with GitLab. You will need the domain name or IP address of the Thanos server you'd like +to integrate with. + +1. Navigate to the [Integrations page](project_services.md#accessing-the-project-services). +1. Click the **Prometheus** service. +1. Provide the domain name or IP address of your server, for example `http://thanos.example.com/` or `http://192.0.2.1/`. +1. Click **Save changes**. + ## Monitoring CI/CD Environments Once configured, GitLab will attempt to retrieve performance metrics for any diff --git a/lib/gitlab/auth/o_auth/provider.rb b/lib/gitlab/auth/o_auth/provider.rb index 3d44c83736a..e7d758353bf 100644 --- a/lib/gitlab/auth/o_auth/provider.rb +++ b/lib/gitlab/auth/o_auth/provider.rb @@ -7,7 +7,8 @@ module Gitlab LABELS = { "github" => "GitHub", "gitlab" => "GitLab.com", - "google_oauth2" => "Google" + "google_oauth2" => "Google", + "azure_oauth2" => "Azure AD" }.freeze def self.authentication(user, provider) diff --git a/spec/models/concerns/bulk_insert_safe_spec.rb b/spec/models/concerns/bulk_insert_safe_spec.rb index 91884680738..9ebaedcf252 100644 --- a/spec/models/concerns/bulk_insert_safe_spec.rb +++ b/spec/models/concerns/bulk_insert_safe_spec.rb @@ -5,6 +5,8 @@ require 'spec_helper' describe BulkInsertSafe do class BulkInsertItem < ApplicationRecord include BulkInsertSafe + + validates :name, presence: true end module InheritedUnsafeMethods @@ -23,7 +25,36 @@ describe BulkInsertSafe do end end - it_behaves_like 'a BulkInsertSafe model', BulkInsertItem + before(:all) do + ActiveRecord::Schema.define do + create_table :bulk_insert_items, force: true do |t| + t.string :name, null: true + end + end + end + + after(:all) do + ActiveRecord::Schema.define do + drop_table :bulk_insert_items, force: true + end + end + + def build_valid_items_for_bulk_insertion + Array.new(10) do |n| + BulkInsertItem.new(name: "item-#{n}") + end + end + + def build_invalid_items_for_bulk_insertion + Array.new(10) do + BulkInsertItem.new # requires `name` to be set + end + end + + it_behaves_like 'a BulkInsertSafe model', BulkInsertItem do + let(:valid_items_for_bulk_insertion) { build_valid_items_for_bulk_insertion } + let(:invalid_items_for_bulk_insertion) { build_invalid_items_for_bulk_insertion } + end context 'when inheriting class methods' do it 'raises an error when method is not bulk-insert safe' do @@ -35,4 +66,40 @@ describe BulkInsertSafe do expect { BulkInsertItem.include(InheritedSafeMethods) }.not_to raise_error end end + + context 'primary keys' do + it 'raises error if primary keys are set prior to insertion' do + items = build_valid_items_for_bulk_insertion + items.each_with_index do |item, n| + item.id = n + end + + expect { BulkInsertItem.bulk_insert!(items) }.to raise_error(subject::PrimaryKeySetError) + end + end + + describe '.bulk_insert!' do + it 'inserts items in the given number of batches' do + items = build_valid_items_for_bulk_insertion + expect(items.size).to eq(10) + expect(BulkInsertItem).to receive(:insert_all!).twice + + BulkInsertItem.bulk_insert!(items, batch_size: 5) + end + + it 'rolls back the transaction when any item is invalid' do + # second batch is bad + all_items = build_valid_items_for_bulk_insertion + build_invalid_items_for_bulk_insertion + batch_size = all_items.size / 2 + + expect do + BulkInsertItem.bulk_insert!(all_items, batch_size: batch_size) rescue nil + end.not_to change { BulkInsertItem.count } + end + + it 'does nothing and returns true when items are empty' do + expect(BulkInsertItem.bulk_insert!([])).to be(true) + expect(BulkInsertItem.count).to eq(0) + end + end end diff --git a/spec/models/label_link_spec.rb b/spec/models/label_link_spec.rb index 0a5cb5374b0..7a179dcb419 100644 --- a/spec/models/label_link_spec.rb +++ b/spec/models/label_link_spec.rb @@ -8,5 +8,8 @@ describe LabelLink do it { is_expected.to belong_to(:label) } it { is_expected.to belong_to(:target) } - it_behaves_like 'a BulkInsertSafe model', LabelLink + it_behaves_like 'a BulkInsertSafe model', LabelLink do + let(:valid_items_for_bulk_insertion) { build_list(:label_link, 10) } + let(:invalid_items_for_bulk_insertion) { [] } # class does not have any validations defined + end end diff --git a/spec/models/merge_request_diff_commit_spec.rb b/spec/models/merge_request_diff_commit_spec.rb index a296122ae09..8b51c6fae08 100644 --- a/spec/models/merge_request_diff_commit_spec.rb +++ b/spec/models/merge_request_diff_commit_spec.rb @@ -6,7 +6,10 @@ describe MergeRequestDiffCommit do let(:merge_request) { create(:merge_request) } let(:project) { merge_request.project } - it_behaves_like 'a BulkInsertSafe model', MergeRequestDiffCommit + it_behaves_like 'a BulkInsertSafe model', MergeRequestDiffCommit do + let(:valid_items_for_bulk_insertion) { build_list(:merge_request_diff_commit, 10) } + let(:invalid_items_for_bulk_insertion) { [] } # class does not have any validations defined + end describe '#to_hash' do subject { merge_request.commits.first } diff --git a/spec/models/merge_request_diff_file_spec.rb b/spec/models/merge_request_diff_file_spec.rb index 6ecbc5bf832..40f7be5dc8f 100644 --- a/spec/models/merge_request_diff_file_spec.rb +++ b/spec/models/merge_request_diff_file_spec.rb @@ -3,7 +3,10 @@ require 'spec_helper' describe MergeRequestDiffFile do - it_behaves_like 'a BulkInsertSafe model', MergeRequestDiffFile + it_behaves_like 'a BulkInsertSafe model', MergeRequestDiffFile do + let(:valid_items_for_bulk_insertion) { build_list(:merge_request_diff_file, 10) } + let(:invalid_items_for_bulk_insertion) { [] } # class does not have any validations defined + end describe '#diff' do context 'when diff is not stored' do diff --git a/spec/support/shared_examples/models/concerns/bulk_insert_safe_shared_examples.rb b/spec/support/shared_examples/models/concerns/bulk_insert_safe_shared_examples.rb index 78d0945ea63..c6180a5a196 100644 --- a/spec/support/shared_examples/models/concerns/bulk_insert_safe_shared_examples.rb +++ b/spec/support/shared_examples/models/concerns/bulk_insert_safe_shared_examples.rb @@ -35,4 +35,44 @@ RSpec.shared_examples 'a BulkInsertSafe model' do |klass| expect { target_class.belongs_to(:other_record) }.not_to raise_error end end + + describe '.bulk_insert!' do + context 'when all items are valid' do + it 'inserts them all' do + items = valid_items_for_bulk_insertion + + expect(items).not_to be_empty + expect { target_class.bulk_insert!(items) }.to change { target_class.count }.by(items.size) + end + + it 'returns true' do + items = valid_items_for_bulk_insertion + + expect(items).not_to be_empty + expect(target_class.bulk_insert!(items)).to be true + end + end + + context 'when some items are invalid' do + it 'does not insert any of them and raises an error' do + items = invalid_items_for_bulk_insertion + + # it is not always possible to create invalid items + if items.any? + expect { target_class.bulk_insert!(items) }.to raise_error(ActiveRecord::RecordInvalid) + expect(target_class.count).to eq(0) + end + end + + it 'inserts them anyway when bypassing validations' do + items = invalid_items_for_bulk_insertion + + # it is not always possible to create invalid items + if items.any? + expect(target_class.bulk_insert!(items, validate: false)).to be(true) + expect(target_class.count).to eq(items.size) + end + end + end + end end |