summaryrefslogtreecommitdiff
path: root/spec/lib/gitlab
diff options
context:
space:
mode:
Diffstat (limited to 'spec/lib/gitlab')
-rw-r--r--spec/lib/gitlab/asciidoc_spec.rb68
-rw-r--r--spec/lib/gitlab/auth/ip_rate_limiter_spec.rb65
-rw-r--r--spec/lib/gitlab/auth_spec.rb9
-rw-r--r--spec/lib/gitlab/background_migration/create_fork_network_memberships_range_spec.rb125
-rw-r--r--spec/lib/gitlab/background_migration/delete_conflicting_redirect_routes_range_spec.rb35
-rw-r--r--spec/lib/gitlab/background_migration/migrate_events_to_push_event_payloads_spec.rb433
-rw-r--r--spec/lib/gitlab/background_migration/migrate_stage_status_spec.rb92
-rw-r--r--spec/lib/gitlab/background_migration/migrate_system_uploads_to_new_folder_spec.rb21
-rw-r--r--spec/lib/gitlab/background_migration/move_personal_snippet_files_spec.rb74
-rw-r--r--spec/lib/gitlab/background_migration/normalize_ldap_extern_uids_range_spec.rb36
-rw-r--r--spec/lib/gitlab/background_migration/populate_fork_networks_range_spec.rb97
-rw-r--r--spec/lib/gitlab/background_migration/populate_merge_requests_latest_merge_request_diff_id_spec.rb62
-rw-r--r--spec/lib/gitlab/ci/ansi2html_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb22
-rw-r--r--spec/lib/gitlab/ci/config_spec.rb21
-rw-r--r--spec/lib/gitlab/ci/trace/stream_spec.rb10
-rw-r--r--spec/lib/gitlab/cleanup/orphan_job_artifact_files_batch_spec.rb66
-rw-r--r--spec/lib/gitlab/cleanup/orphan_job_artifact_files_spec.rb68
-rw-r--r--spec/lib/gitlab/config/loader/yaml_spec.rb72
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb18
-rw-r--r--spec/lib/gitlab/diff/lines_unfolder_spec.rb33
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb18
-rw-r--r--spec/lib/gitlab/gitaly_client/commit_service_spec.rb13
-rw-r--r--spec/lib/gitlab/graphql/authorize/authorize_field_service_spec.rb82
-rw-r--r--spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb53
-rw-r--r--spec/lib/gitlab/graphql/copy_field_description_spec.rb21
-rw-r--r--spec/lib/gitlab/graphql/find_argument_in_parent_spec.rb44
-rw-r--r--spec/lib/gitlab/graphql/loaders/pipeline_for_sha_loader_spec.rb20
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml1
-rw-r--r--spec/lib/gitlab/import_export/project.json2
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml2
-rw-r--r--spec/lib/gitlab/issuable_metadata_spec.rb8
-rw-r--r--spec/lib/gitlab/issuable_sorter_spec.rb2
-rw-r--r--spec/lib/gitlab/json_cache_spec.rb35
-rw-r--r--spec/lib/gitlab/legacy_github_import/importer_spec.rb2
-rw-r--r--spec/lib/gitlab/legacy_github_import/release_formatter_spec.rb4
-rw-r--r--spec/lib/gitlab/lets_encrypt/client_spec.rb36
-rw-r--r--spec/lib/gitlab/lets_encrypt_spec.rb56
-rw-r--r--spec/lib/gitlab/metrics/dashboard/dynamic_dashboard_service_spec.rb8
-rw-r--r--spec/lib/gitlab/metrics/dashboard/finder_spec.rb11
-rw-r--r--spec/lib/gitlab/metrics/dashboard/project_dashboard_service_spec.rb6
-rw-r--r--spec/lib/gitlab/metrics/dashboard/system_dashboard_service_spec.rb10
-rw-r--r--spec/lib/gitlab/metrics/system_spec.rb4
-rw-r--r--spec/lib/gitlab/performance_bar_spec.rb27
-rw-r--r--spec/lib/gitlab/reference_extractor_spec.rb6
-rw-r--r--spec/lib/gitlab/time_tracking_formatter_spec.rb43
-rw-r--r--spec/lib/gitlab/utils/deep_size_spec.rb43
47 files changed, 871 insertions, 1121 deletions
diff --git a/spec/lib/gitlab/asciidoc_spec.rb b/spec/lib/gitlab/asciidoc_spec.rb
index 0f933ac5464..8f2434acd26 100644
--- a/spec/lib/gitlab/asciidoc_spec.rb
+++ b/spec/lib/gitlab/asciidoc_spec.rb
@@ -56,7 +56,7 @@ module Gitlab
},
'pre' => {
input: '```mypre"><script>alert(3)</script>',
- output: "<div>\n<div>\n<pre lang=\"mypre\">\"&gt;<code></code></pre>\n</div>\n</div>"
+ output: "<div>\n<div>\n<pre class=\"code highlight js-syntax-highlight plaintext\" lang=\"plaintext\" v-pre=\"true\"><code><span id=\"LC1\" class=\"line\" lang=\"plaintext\">\"&gt;</span></code></pre>\n</div>\n</div>"
}
}
@@ -67,6 +67,72 @@ module Gitlab
end
end
+ context 'with fenced block' do
+ it 'highlights syntax' do
+ input = <<~ADOC
+ ```js
+ console.log('hello world')
+ ```
+ ADOC
+
+ output = <<~HTML
+ <div>
+ <div>
+ <pre class="code highlight js-syntax-highlight javascript" lang="javascript" v-pre="true"><code><span id="LC1" class="line" lang="javascript"><span class="nx">console</span><span class="p">.</span><span class="nx">log</span><span class="p">(</span><span class="dl">'</span><span class="s1">hello world</span><span class="dl">'</span><span class="p">)</span></span></code></pre>
+ </div>
+ </div>
+ HTML
+
+ expect(render(input, context)).to include(output.strip)
+ end
+ end
+
+ context 'with listing block' do
+ it 'highlights syntax' do
+ input = <<~ADOC
+ [source,c++]
+ .class.cpp
+ ----
+ #include <stdio.h>
+
+ for (int i = 0; i < 5; i++) {
+ std::cout<<"*"<<std::endl;
+ }
+ ----
+ ADOC
+
+ output = <<~HTML
+ <div>
+ <div>class.cpp</div>
+ <div>
+ <pre class="code highlight js-syntax-highlight cpp" lang="cpp" v-pre="true"><code><span id="LC1" class="line" lang="cpp"><span class="cp">#include &lt;stdio.h&gt;</span></span>
+ <span id="LC2" class="line" lang="cpp"></span>
+ <span id="LC3" class="line" lang="cpp"><span class="k">for</span> <span class="p">(</span><span class="kt">int</span> <span class="n">i</span> <span class="o">=</span> <span class="mi">0</span><span class="p">;</span> <span class="n">i</span> <span class="o">&lt;</span> <span class="mi">5</span><span class="p">;</span> <span class="n">i</span><span class="o">++</span><span class="p">)</span> <span class="p">{</span></span>
+ <span id="LC4" class="line" lang="cpp"> <span class="n">std</span><span class="o">::</span><span class="n">cout</span><span class="o">&lt;&lt;</span><span class="s">"*"</span><span class="o">&lt;&lt;</span><span class="n">std</span><span class="o">::</span><span class="n">endl</span><span class="p">;</span></span>
+ <span id="LC5" class="line" lang="cpp"><span class="p">}</span></span></code></pre>
+ </div>
+ </div>
+ HTML
+
+ expect(render(input, context)).to include(output.strip)
+ end
+ end
+
+ context 'with stem block' do
+ it 'does not apply syntax highlighting' do
+ input = <<~ADOC
+ [stem]
+ ++++
+ \sqrt{4} = 2
+ ++++
+ ADOC
+
+ output = "<div>\n<div>\n\\$ qrt{4} = 2\\$\n</div>\n</div>"
+
+ expect(render(input, context)).to include(output)
+ end
+ end
+
context 'external links' do
it 'adds the `rel` attribute to the link' do
output = render('link:https://google.com[Google]', context)
diff --git a/spec/lib/gitlab/auth/ip_rate_limiter_spec.rb b/spec/lib/gitlab/auth/ip_rate_limiter_spec.rb
new file mode 100644
index 00000000000..8d6bf45ab30
--- /dev/null
+++ b/spec/lib/gitlab/auth/ip_rate_limiter_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Auth::IpRateLimiter, :use_clean_rails_memory_store_caching do
+ let(:ip) { '10.2.2.3' }
+ let(:whitelist) { ['127.0.0.1'] }
+ let(:options) do
+ {
+ enabled: true,
+ ip_whitelist: whitelist,
+ bantime: 1.minute,
+ findtime: 1.minute,
+ maxretry: 2
+ }
+ end
+
+ subject { described_class.new(ip) }
+
+ before do
+ stub_rack_attack_setting(options)
+ end
+
+ after do
+ subject.reset!
+ end
+
+ describe '#register_fail!' do
+ it 'bans after 3 consecutive failures' do
+ expect(subject.banned?).to be_falsey
+
+ 3.times { subject.register_fail! }
+
+ expect(subject.banned?).to be_truthy
+ end
+
+ shared_examples 'whitelisted IPs' do
+ it 'does not ban after max retry limit' do
+ expect(subject.banned?).to be_falsey
+
+ 3.times { subject.register_fail! }
+
+ expect(subject.banned?).to be_falsey
+ end
+ end
+
+ context 'with a whitelisted netmask' do
+ before do
+ options[:ip_whitelist] = ['127.0.0.1', '10.2.2.0/24', 'bad']
+ stub_rack_attack_setting(options)
+ end
+
+ it_behaves_like 'whitelisted IPs'
+ end
+
+ context 'with a whitelisted IP' do
+ before do
+ options[:ip_whitelist] = ['10.2.2.3']
+ stub_rack_attack_setting(options)
+ end
+
+ it_behaves_like 'whitelisted IPs'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb
index 3b5ca7c950c..d9c73cff01e 100644
--- a/spec/lib/gitlab/auth_spec.rb
+++ b/spec/lib/gitlab/auth_spec.rb
@@ -309,6 +309,15 @@ describe Gitlab::Auth do
.to eq(auth_success)
end
+ it 'succeeds when custom login and token are valid' do
+ deploy_token = create(:deploy_token, username: 'deployer', read_registry: false, projects: [project])
+ auth_success = Gitlab::Auth::Result.new(deploy_token, project, :deploy_token, [:download_code])
+
+ expect(gl_auth).to receive(:rate_limit!).with('ip', success: true, login: 'deployer')
+ expect(gl_auth.find_for_git_client('deployer', deploy_token.token, project: project, ip: 'ip'))
+ .to eq(auth_success)
+ end
+
it 'fails when login is not valid' do
expect(gl_auth).to receive(:rate_limit!).with('ip', success: false, login: 'random_login')
expect(gl_auth.find_for_git_client('random_login', deploy_token.token, project: project, ip: 'ip'))
diff --git a/spec/lib/gitlab/background_migration/create_fork_network_memberships_range_spec.rb b/spec/lib/gitlab/background_migration/create_fork_network_memberships_range_spec.rb
deleted file mode 100644
index 5076996474f..00000000000
--- a/spec/lib/gitlab/background_migration/create_fork_network_memberships_range_spec.rb
+++ /dev/null
@@ -1,125 +0,0 @@
-require 'spec_helper'
-
-describe Gitlab::BackgroundMigration::CreateForkNetworkMembershipsRange, :migration, schema: 20170929131201 do
- let(:migration) { described_class.new }
- let(:projects) { table(:projects) }
-
- let(:base1) { projects.create }
- let(:base1_fork1) { projects.create }
- let(:base1_fork2) { projects.create }
-
- let(:base2) { projects.create }
- let(:base2_fork1) { projects.create }
- let(:base2_fork2) { projects.create }
-
- let(:fork_of_fork) { projects.create }
- let(:fork_of_fork2) { projects.create }
- let(:second_level_fork) { projects.create }
- let(:third_level_fork) { projects.create }
-
- let(:fork_network1) { fork_networks.find_by(root_project_id: base1.id) }
- let(:fork_network2) { fork_networks.find_by(root_project_id: base2.id) }
-
- let!(:forked_project_links) { table(:forked_project_links) }
- let!(:fork_networks) { table(:fork_networks) }
- let!(:fork_network_members) { table(:fork_network_members) }
-
- before do
- # The fork-network relation created for the forked project
- fork_networks.create(id: 1, root_project_id: base1.id)
- fork_network_members.create(project_id: base1.id, fork_network_id: 1)
- fork_networks.create(id: 2, root_project_id: base2.id)
- fork_network_members.create(project_id: base2.id, fork_network_id: 2)
-
- # Normal fork links
- forked_project_links.create(id: 1, forked_from_project_id: base1.id, forked_to_project_id: base1_fork1.id)
- forked_project_links.create(id: 2, forked_from_project_id: base1.id, forked_to_project_id: base1_fork2.id)
- forked_project_links.create(id: 3, forked_from_project_id: base2.id, forked_to_project_id: base2_fork1.id)
- forked_project_links.create(id: 4, forked_from_project_id: base2.id, forked_to_project_id: base2_fork2.id)
-
- # Fork links
- forked_project_links.create(id: 5, forked_from_project_id: base1_fork1.id, forked_to_project_id: fork_of_fork.id)
- forked_project_links.create(id: 6, forked_from_project_id: base1_fork1.id, forked_to_project_id: fork_of_fork2.id)
-
- # Forks 3 levels down
- forked_project_links.create(id: 7, forked_from_project_id: fork_of_fork.id, forked_to_project_id: second_level_fork.id)
- forked_project_links.create(id: 8, forked_from_project_id: second_level_fork.id, forked_to_project_id: third_level_fork.id)
-
- migration.perform(1, 8)
- end
-
- it 'creates a memberships for the direct forks' do
- base1_fork1_membership = fork_network_members.find_by(fork_network_id: fork_network1.id,
- project_id: base1_fork1.id)
- base1_fork2_membership = fork_network_members.find_by(fork_network_id: fork_network1.id,
- project_id: base1_fork2.id)
- base2_fork1_membership = fork_network_members.find_by(fork_network_id: fork_network2.id,
- project_id: base2_fork1.id)
- base2_fork2_membership = fork_network_members.find_by(fork_network_id: fork_network2.id,
- project_id: base2_fork2.id)
-
- expect(base1_fork1_membership.forked_from_project_id).to eq(base1.id)
- expect(base1_fork2_membership.forked_from_project_id).to eq(base1.id)
- expect(base2_fork1_membership.forked_from_project_id).to eq(base2.id)
- expect(base2_fork2_membership.forked_from_project_id).to eq(base2.id)
- end
-
- it 'adds the fork network members for forks of forks' do
- fork_of_fork_membership = fork_network_members.find_by(project_id: fork_of_fork.id,
- fork_network_id: fork_network1.id)
- fork_of_fork2_membership = fork_network_members.find_by(project_id: fork_of_fork2.id,
- fork_network_id: fork_network1.id)
- second_level_fork_membership = fork_network_members.find_by(project_id: second_level_fork.id,
- fork_network_id: fork_network1.id)
- third_level_fork_membership = fork_network_members.find_by(project_id: third_level_fork.id,
- fork_network_id: fork_network1.id)
-
- expect(fork_of_fork_membership.forked_from_project_id).to eq(base1_fork1.id)
- expect(fork_of_fork2_membership.forked_from_project_id).to eq(base1_fork1.id)
- expect(second_level_fork_membership.forked_from_project_id).to eq(fork_of_fork.id)
- expect(third_level_fork_membership.forked_from_project_id).to eq(second_level_fork.id)
- end
-
- it 'reschedules itself when there are missing members' do
- allow(migration).to receive(:missing_members?).and_return(true)
-
- expect(BackgroundMigrationWorker)
- .to receive(:perform_in).with(described_class::RESCHEDULE_DELAY, "CreateForkNetworkMembershipsRange", [1, 3])
-
- migration.perform(1, 3)
- end
-
- it 'can be repeated without effect' do
- expect { fork_network_members.count }.not_to change { migration.perform(1, 7) }
- end
-
- it 'knows it is finished for this range' do
- expect(migration.missing_members?(1, 8)).to be_falsy
- end
-
- it 'does not miss members for forks of forks for which the root was deleted' do
- forked_project_links.create(id: 9, forked_from_project_id: base1_fork1.id, forked_to_project_id: projects.create.id)
- base1.destroy
-
- expect(migration.missing_members?(7, 10)).to be_falsy
- end
-
- context 'with more forks' do
- before do
- forked_project_links.create(id: 9, forked_from_project_id: fork_of_fork.id, forked_to_project_id: projects.create.id)
- forked_project_links.create(id: 10, forked_from_project_id: fork_of_fork.id, forked_to_project_id: projects.create.id)
- end
-
- it 'only processes a single batch of links at a time' do
- expect(fork_network_members.count).to eq(10)
-
- migration.perform(8, 10)
-
- expect(fork_network_members.count).to eq(12)
- end
-
- it 'knows when not all memberships within a batch have been created' do
- expect(migration.missing_members?(8, 10)).to be_truthy
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/delete_conflicting_redirect_routes_range_spec.rb b/spec/lib/gitlab/background_migration/delete_conflicting_redirect_routes_range_spec.rb
deleted file mode 100644
index 9bae7e53b71..00000000000
--- a/spec/lib/gitlab/background_migration/delete_conflicting_redirect_routes_range_spec.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-require 'spec_helper'
-
-describe Gitlab::BackgroundMigration::DeleteConflictingRedirectRoutesRange, :migration, schema: 20170907170235 do
- let!(:redirect_routes) { table(:redirect_routes) }
- let!(:routes) { table(:routes) }
-
- before do
- routes.create!(id: 1, source_id: 1, source_type: 'Namespace', path: 'foo1')
- routes.create!(id: 2, source_id: 2, source_type: 'Namespace', path: 'foo2')
- routes.create!(id: 3, source_id: 3, source_type: 'Namespace', path: 'foo3')
- routes.create!(id: 4, source_id: 4, source_type: 'Namespace', path: 'foo4')
- routes.create!(id: 5, source_id: 5, source_type: 'Namespace', path: 'foo5')
-
- # Valid redirects
- redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'bar')
- redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'bar2')
- redirect_routes.create!(source_id: 2, source_type: 'Namespace', path: 'bar3')
-
- # Conflicting redirects
- redirect_routes.create!(source_id: 2, source_type: 'Namespace', path: 'foo1')
- redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'foo2')
- redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'foo3')
- redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'foo4')
- redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'foo5')
- end
-
- # No-op. See https://gitlab.com/gitlab-com/infrastructure/issues/3460#note_53223252
- it 'NO-OP: does not delete any redirect_routes' do
- expect(redirect_routes.count).to eq(8)
-
- described_class.new.perform(1, 5)
-
- expect(redirect_routes.count).to eq(8)
- end
-end
diff --git a/spec/lib/gitlab/background_migration/migrate_events_to_push_event_payloads_spec.rb b/spec/lib/gitlab/background_migration/migrate_events_to_push_event_payloads_spec.rb
deleted file mode 100644
index 188969951a6..00000000000
--- a/spec/lib/gitlab/background_migration/migrate_events_to_push_event_payloads_spec.rb
+++ /dev/null
@@ -1,433 +0,0 @@
-require 'spec_helper'
-
-# rubocop:disable RSpec/FactoriesInMigrationSpecs
-describe Gitlab::BackgroundMigration::MigrateEventsToPushEventPayloads::Event, :migration, schema: 20170608152748 do
- describe '#commit_title' do
- it 'returns nil when there are no commits' do
- expect(described_class.new.commit_title).to be_nil
- end
-
- it 'returns nil when there are commits without commit messages' do
- event = described_class.new
-
- allow(event).to receive(:commits).and_return([{ id: '123' }])
-
- expect(event.commit_title).to be_nil
- end
-
- it 'returns the commit message when it is less than 70 characters long' do
- event = described_class.new
-
- allow(event).to receive(:commits).and_return([{ message: 'Hello world' }])
-
- expect(event.commit_title).to eq('Hello world')
- end
-
- it 'returns the first line of a commit message if multiple lines are present' do
- event = described_class.new
-
- allow(event).to receive(:commits).and_return([{ message: "Hello\n\nworld" }])
-
- expect(event.commit_title).to eq('Hello')
- end
-
- it 'truncates the commit to 70 characters when it is too long' do
- event = described_class.new
-
- allow(event).to receive(:commits).and_return([{ message: 'a' * 100 }])
-
- expect(event.commit_title).to eq(('a' * 67) + '...')
- end
- end
-
- describe '#commit_from_sha' do
- it 'returns nil when pushing to a new ref' do
- event = described_class.new
-
- allow(event).to receive(:create?).and_return(true)
-
- expect(event.commit_from_sha).to be_nil
- end
-
- it 'returns the ID of the first commit when pushing to an existing ref' do
- event = described_class.new
-
- allow(event).to receive(:create?).and_return(false)
- allow(event).to receive(:data).and_return(before: '123')
-
- expect(event.commit_from_sha).to eq('123')
- end
- end
-
- describe '#commit_to_sha' do
- it 'returns nil when removing an existing ref' do
- event = described_class.new
-
- allow(event).to receive(:remove?).and_return(true)
-
- expect(event.commit_to_sha).to be_nil
- end
-
- it 'returns the ID of the last commit when pushing to an existing ref' do
- event = described_class.new
-
- allow(event).to receive(:remove?).and_return(false)
- allow(event).to receive(:data).and_return(after: '123')
-
- expect(event.commit_to_sha).to eq('123')
- end
- end
-
- describe '#data' do
- it 'returns the deserialized data' do
- event = described_class.new(data: { before: '123' })
-
- expect(event.data).to eq(before: '123')
- end
-
- it 'returns an empty hash when no data is present' do
- event = described_class.new
-
- expect(event.data).to eq({})
- end
- end
-
- describe '#commits' do
- it 'returns an Array of commits' do
- event = described_class.new(data: { commits: [{ id: '123' }] })
-
- expect(event.commits).to eq([{ id: '123' }])
- end
-
- it 'returns an empty array when no data is present' do
- event = described_class.new
-
- expect(event.commits).to eq([])
- end
- end
-
- describe '#commit_count' do
- it 'returns the number of commits' do
- event = described_class.new(data: { total_commits_count: 2 })
-
- expect(event.commit_count).to eq(2)
- end
-
- it 'returns 0 when no data is present' do
- event = described_class.new
-
- expect(event.commit_count).to eq(0)
- end
- end
-
- describe '#ref' do
- it 'returns the name of the ref' do
- event = described_class.new(data: { ref: 'refs/heads/master' })
-
- expect(event.ref).to eq('refs/heads/master')
- end
- end
-
- describe '#trimmed_ref_name' do
- it 'returns the trimmed ref name for a branch' do
- event = described_class.new(data: { ref: 'refs/heads/master' })
-
- expect(event.trimmed_ref_name).to eq('master')
- end
-
- it 'returns the trimmed ref name for a tag' do
- event = described_class.new(data: { ref: 'refs/tags/v1.2' })
-
- expect(event.trimmed_ref_name).to eq('v1.2')
- end
- end
-
- describe '#create?' do
- it 'returns true when creating a new ref' do
- event = described_class.new(data: { before: described_class::BLANK_REF })
-
- expect(event.create?).to eq(true)
- end
-
- it 'returns false when pushing to an existing ref' do
- event = described_class.new(data: { before: '123' })
-
- expect(event.create?).to eq(false)
- end
- end
-
- describe '#remove?' do
- it 'returns true when removing an existing ref' do
- event = described_class.new(data: { after: described_class::BLANK_REF })
-
- expect(event.remove?).to eq(true)
- end
-
- it 'returns false when pushing to an existing ref' do
- event = described_class.new(data: { after: '123' })
-
- expect(event.remove?).to eq(false)
- end
- end
-
- describe '#push_action' do
- let(:event) { described_class.new }
-
- it 'returns :created when creating a new ref' do
- allow(event).to receive(:create?).and_return(true)
-
- expect(event.push_action).to eq(:created)
- end
-
- it 'returns :removed when removing an existing ref' do
- allow(event).to receive(:create?).and_return(false)
- allow(event).to receive(:remove?).and_return(true)
-
- expect(event.push_action).to eq(:removed)
- end
-
- it 'returns :pushed when pushing to an existing ref' do
- allow(event).to receive(:create?).and_return(false)
- allow(event).to receive(:remove?).and_return(false)
-
- expect(event.push_action).to eq(:pushed)
- end
- end
-
- describe '#ref_type' do
- let(:event) { described_class.new }
-
- it 'returns :tag for a tag' do
- allow(event).to receive(:ref).and_return('refs/tags/1.2')
-
- expect(event.ref_type).to eq(:tag)
- end
-
- it 'returns :branch for a branch' do
- allow(event).to receive(:ref).and_return('refs/heads/1.2')
-
- expect(event.ref_type).to eq(:branch)
- end
- end
-end
-
-##
-# The background migration relies on a temporary table, hence we're migrating
-# to a specific version of the database where said table is still present.
-#
-describe Gitlab::BackgroundMigration::MigrateEventsToPushEventPayloads, :migration, schema: 20170825154015 do
- let(:user_class) do
- Class.new(ActiveRecord::Base) do
- self.table_name = 'users'
- end
- end
-
- let(:migration) { described_class.new }
- let(:user_class) { table(:users) }
- let(:author) { build(:user).becomes(user_class).tap(&:save!).becomes(User) }
- let(:namespace) { create(:namespace, owner: author) }
- let(:projects) { table(:projects) }
- let(:project) { projects.create(namespace_id: namespace.id, creator_id: author.id) }
-
- # We can not rely on FactoryBot as the state of Event may change in ways that
- # the background migration does not expect, hence we use the Event class of
- # the migration itself.
- def create_push_event(project, author, data = nil)
- klass = Gitlab::BackgroundMigration::MigrateEventsToPushEventPayloads::Event
-
- klass.create!(
- action: klass::PUSHED,
- project_id: project.id,
- author_id: author.id,
- data: data
- )
- end
-
- describe '#perform' do
- it 'returns if data should not be migrated' do
- allow(migration).to receive(:migrate?).and_return(false)
-
- expect(migration).not_to receive(:find_events)
-
- migration.perform(1, 10)
- end
-
- it 'migrates the range of events if data is to be migrated' do
- event1 = create_push_event(project, author, { commits: [] })
- event2 = create_push_event(project, author, { commits: [] })
-
- allow(migration).to receive(:migrate?).and_return(true)
-
- expect(migration).to receive(:process_event).twice
-
- migration.perform(event1.id, event2.id)
- end
- end
-
- describe '#process_event' do
- it 'processes a regular event' do
- event = double(:event, push_event?: false)
-
- expect(migration).to receive(:replicate_event)
- expect(migration).not_to receive(:create_push_event_payload)
-
- migration.process_event(event)
- end
-
- it 'processes a push event' do
- event = double(:event, push_event?: true)
-
- expect(migration).to receive(:replicate_event)
- expect(migration).to receive(:create_push_event_payload)
-
- migration.process_event(event)
- end
-
- it 'handles an error gracefully' do
- event1 = create_push_event(project, author, { commits: [] })
-
- expect(migration).to receive(:replicate_event).and_call_original
- expect(migration).to receive(:create_push_event_payload).and_raise(ActiveRecord::InvalidForeignKey, 'invalid foreign key')
-
- migration.process_event(event1)
-
- expect(described_class::EventForMigration.all.count).to eq(0)
- end
- end
-
- describe '#replicate_event' do
- it 'replicates the event to the "events_for_migration" table' do
- event = create_push_event(
- project,
- author,
- data: { commits: [] },
- title: 'bla'
- )
-
- attributes = event
- .attributes.with_indifferent_access.except(:title, :data)
-
- expect(described_class::EventForMigration)
- .to receive(:create!)
- .with(attributes)
-
- migration.replicate_event(event)
- end
- end
-
- describe '#create_push_event_payload' do
- let(:push_data) do
- {
- commits: [],
- ref: 'refs/heads/master',
- before: '156e0e9adc587a383a7eeb5b21ddecb9044768a8',
- after: '0' * 40,
- total_commits_count: 1
- }
- end
-
- let(:event) do
- create_push_event(project, author, push_data)
- end
-
- before do
- # The foreign key in push_event_payloads at this point points to the
- # "events_for_migration" table so we need to make sure a row exists in
- # said table.
- migration.replicate_event(event)
- end
-
- it 'creates a push event payload for an event' do
- payload = migration.create_push_event_payload(event)
-
- expect(PushEventPayload.count).to eq(1)
- expect(payload.valid?).to eq(true)
- end
-
- it 'does not create push event payloads for removed events' do
- allow(event).to receive(:id).and_return(-1)
-
- expect { migration.create_push_event_payload(event) }.to raise_error(ActiveRecord::InvalidForeignKey)
-
- expect(PushEventPayload.count).to eq(0)
- end
-
- it 'encodes and decodes the commit IDs from and to binary data' do
- payload = migration.create_push_event_payload(event)
- packed = migration.pack(push_data[:before])
-
- expect(payload.commit_from).to eq(packed)
- expect(payload.commit_to).to be_nil
- end
- end
-
- describe '#find_events' do
- it 'returns the events for the given ID range' do
- event1 = create_push_event(project, author, { commits: [] })
- event2 = create_push_event(project, author, { commits: [] })
- event3 = create_push_event(project, author, { commits: [] })
- events = migration.find_events(event1.id, event2.id)
-
- expect(events.length).to eq(2)
- expect(events.pluck(:id)).not_to include(event3.id)
- end
- end
-
- describe '#migrate?' do
- it 'returns true when data should be migrated' do
- allow(described_class::Event)
- .to receive(:table_exists?).and_return(true)
-
- allow(described_class::PushEventPayload)
- .to receive(:table_exists?).and_return(true)
-
- allow(described_class::EventForMigration)
- .to receive(:table_exists?).and_return(true)
-
- expect(migration.migrate?).to eq(true)
- end
-
- it 'returns false if the "events" table does not exist' do
- allow(described_class::Event)
- .to receive(:table_exists?).and_return(false)
-
- expect(migration.migrate?).to eq(false)
- end
-
- it 'returns false if the "push_event_payloads" table does not exist' do
- allow(described_class::Event)
- .to receive(:table_exists?).and_return(true)
-
- allow(described_class::PushEventPayload)
- .to receive(:table_exists?).and_return(false)
-
- expect(migration.migrate?).to eq(false)
- end
-
- it 'returns false when the "events_for_migration" table does not exist' do
- allow(described_class::Event)
- .to receive(:table_exists?).and_return(true)
-
- allow(described_class::PushEventPayload)
- .to receive(:table_exists?).and_return(true)
-
- allow(described_class::EventForMigration)
- .to receive(:table_exists?).and_return(false)
-
- expect(migration.migrate?).to eq(false)
- end
- end
-
- describe '#pack' do
- it 'packs a SHA1 into a 20 byte binary string' do
- packed = migration.pack('156e0e9adc587a383a7eeb5b21ddecb9044768a8')
-
- expect(packed.bytesize).to eq(20)
- end
-
- it 'returns nil if the input value is nil' do
- expect(migration.pack(nil)).to be_nil
- end
- end
-end
-# rubocop:enable RSpec/FactoriesInMigrationSpecs
diff --git a/spec/lib/gitlab/background_migration/migrate_stage_status_spec.rb b/spec/lib/gitlab/background_migration/migrate_stage_status_spec.rb
deleted file mode 100644
index 89b56906ed0..00000000000
--- a/spec/lib/gitlab/background_migration/migrate_stage_status_spec.rb
+++ /dev/null
@@ -1,92 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe Gitlab::BackgroundMigration::MigrateStageStatus, :migration, schema: 20170711145320 do
- let(:projects) { table(:projects) }
- let(:pipelines) { table(:ci_pipelines) }
- let(:stages) { table(:ci_stages) }
- let(:jobs) { table(:ci_builds) }
-
- let(:statuses) do
- {
- created: 0,
- pending: 1,
- running: 2,
- success: 3,
- failed: 4,
- canceled: 5,
- skipped: 6,
- manual: 7
- }
- end
-
- before do
- projects.create!(id: 1, name: 'gitlab1', path: 'gitlab1')
- pipelines.create!(id: 1, project_id: 1, ref: 'master', sha: 'adf43c3a')
- stages.create!(id: 1, pipeline_id: 1, project_id: 1, name: 'test', status: nil)
- stages.create!(id: 2, pipeline_id: 1, project_id: 1, name: 'deploy', status: nil)
- end
-
- context 'when stage status is known' do
- before do
- create_job(project: 1, pipeline: 1, stage: 'test', status: 'success')
- create_job(project: 1, pipeline: 1, stage: 'test', status: 'running')
- create_job(project: 1, pipeline: 1, stage: 'deploy', status: 'failed')
- end
-
- it 'sets a correct stage status' do
- described_class.new.perform(1, 2)
-
- expect(stages.first.status).to eq statuses[:running]
- expect(stages.second.status).to eq statuses[:failed]
- end
- end
-
- context 'when stage status is not known' do
- it 'sets a skipped stage status' do
- described_class.new.perform(1, 2)
-
- expect(stages.first.status).to eq statuses[:skipped]
- expect(stages.second.status).to eq statuses[:skipped]
- end
- end
-
- context 'when stage status includes status of a retried job' do
- before do
- create_job(project: 1, pipeline: 1, stage: 'test', status: 'canceled')
- create_job(project: 1, pipeline: 1, stage: 'deploy', status: 'failed', retried: true)
- create_job(project: 1, pipeline: 1, stage: 'deploy', status: 'success')
- end
-
- it 'sets a correct stage status' do
- described_class.new.perform(1, 2)
-
- expect(stages.first.status).to eq statuses[:canceled]
- expect(stages.second.status).to eq statuses[:success]
- end
- end
-
- context 'when some job in the stage is blocked / manual' do
- before do
- create_job(project: 1, pipeline: 1, stage: 'test', status: 'failed')
- create_job(project: 1, pipeline: 1, stage: 'test', status: 'manual')
- create_job(project: 1, pipeline: 1, stage: 'deploy', status: 'success', when: 'manual')
- end
-
- it 'sets a correct stage status' do
- described_class.new.perform(1, 2)
-
- expect(stages.first.status).to eq statuses[:manual]
- expect(stages.second.status).to eq statuses[:success]
- end
- end
-
- def create_job(project:, pipeline:, stage:, status:, **opts)
- stages = { test: 1, build: 2, deploy: 3 }
-
- jobs.create!(project_id: project, commit_id: pipeline,
- stage_idx: stages[stage.to_sym], stage: stage,
- status: status, **opts)
- end
-end
diff --git a/spec/lib/gitlab/background_migration/migrate_system_uploads_to_new_folder_spec.rb b/spec/lib/gitlab/background_migration/migrate_system_uploads_to_new_folder_spec.rb
deleted file mode 100644
index ea8bdd48e72..00000000000
--- a/spec/lib/gitlab/background_migration/migrate_system_uploads_to_new_folder_spec.rb
+++ /dev/null
@@ -1,21 +0,0 @@
-require 'spec_helper'
-
-# rubocop:disable RSpec/FactoriesInMigrationSpecs
-describe Gitlab::BackgroundMigration::MigrateSystemUploadsToNewFolder, :delete do
- let(:migration) { described_class.new }
-
- before do
- allow(migration).to receive(:logger).and_return(Logger.new(nil))
- end
-
- describe '#perform' do
- it 'renames the path of system-uploads' do
- upload = create(:upload, model: create(:project), path: 'uploads/system/project/avatar.jpg')
-
- migration.perform('uploads/system/', 'uploads/-/system/')
-
- expect(upload.reload.path).to eq('uploads/-/system/project/avatar.jpg')
- end
- end
-end
-# rubocop:enable RSpec/FactoriesInMigrationSpecs
diff --git a/spec/lib/gitlab/background_migration/move_personal_snippet_files_spec.rb b/spec/lib/gitlab/background_migration/move_personal_snippet_files_spec.rb
deleted file mode 100644
index 593486fc56c..00000000000
--- a/spec/lib/gitlab/background_migration/move_personal_snippet_files_spec.rb
+++ /dev/null
@@ -1,74 +0,0 @@
-require 'spec_helper'
-
-# rubocop:disable RSpec/FactoriesInMigrationSpecs
-describe Gitlab::BackgroundMigration::MovePersonalSnippetFiles do
- let(:test_dir) { File.join(Rails.root, 'tmp', 'tests', 'move_snippet_files_test') }
- let(:old_uploads_dir) { File.join('uploads', 'system', 'personal_snippet') }
- let(:new_uploads_dir) { File.join('uploads', '-', 'system', 'personal_snippet') }
- let(:snippet) do
- snippet = create(:personal_snippet)
- create_upload_for_snippet(snippet)
- snippet.update!(description: markdown_linking_file(snippet))
- snippet
- end
-
- let(:migration) { described_class.new }
-
- before do
- allow(migration).to receive(:base_directory) { test_dir }
- end
-
- describe '#perform' do
- it 'moves the file on the disk' do
- expected_path = File.join(test_dir, new_uploads_dir, snippet.id.to_s, "secret#{snippet.id}", 'upload.txt')
-
- migration.perform(old_uploads_dir, new_uploads_dir)
-
- expect(File.exist?(expected_path)).to be_truthy
- end
-
- it 'updates the markdown of the snippet' do
- expected_path = File.join(new_uploads_dir, snippet.id.to_s, "secret#{snippet.id}", 'upload.txt')
- expected_markdown = "[an upload](#{expected_path})"
-
- migration.perform(old_uploads_dir, new_uploads_dir)
-
- expect(snippet.reload.description).to eq(expected_markdown)
- end
-
- it 'updates the markdown of notes' do
- expected_path = File.join(new_uploads_dir, snippet.id.to_s, "secret#{snippet.id}", 'upload.txt')
- expected_markdown = "with [an upload](#{expected_path})"
-
- note = create(:note_on_personal_snippet, noteable: snippet, note: "with #{markdown_linking_file(snippet)}")
-
- migration.perform(old_uploads_dir, new_uploads_dir)
-
- expect(note.reload.note).to eq(expected_markdown)
- end
- end
-
- def create_upload_for_snippet(snippet)
- snippet_path = path_for_file_in_snippet(snippet)
- path = File.join(old_uploads_dir, snippet.id.to_s, snippet_path)
- absolute_path = File.join(test_dir, path)
-
- FileUtils.mkdir_p(File.dirname(absolute_path))
- FileUtils.touch(absolute_path)
-
- create(:upload, model: snippet, path: snippet_path, uploader: PersonalFileUploader)
- end
-
- def path_for_file_in_snippet(snippet)
- secret = "secret#{snippet.id}"
- filename = 'upload.txt'
-
- File.join(secret, filename)
- end
-
- def markdown_linking_file(snippet)
- path = File.join(old_uploads_dir, snippet.id.to_s, path_for_file_in_snippet(snippet))
- "[an upload](#{path})"
- end
-end
-# rubocop:enable RSpec/FactoriesInMigrationSpecs
diff --git a/spec/lib/gitlab/background_migration/normalize_ldap_extern_uids_range_spec.rb b/spec/lib/gitlab/background_migration/normalize_ldap_extern_uids_range_spec.rb
deleted file mode 100644
index dfbf1bb681a..00000000000
--- a/spec/lib/gitlab/background_migration/normalize_ldap_extern_uids_range_spec.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-require 'spec_helper'
-
-describe Gitlab::BackgroundMigration::NormalizeLdapExternUidsRange, :migration, schema: 20170921101004 do
- let!(:identities) { table(:identities) }
-
- before do
- # LDAP identities
- (1..4).each do |i|
- identities.create!(id: i, provider: 'ldapmain', extern_uid: " uid = foo #{i}, ou = People, dc = example, dc = com ", user_id: i)
- end
-
- # Non-LDAP identity
- identities.create!(id: 5, provider: 'foo', extern_uid: " uid = foo 5, ou = People, dc = example, dc = com ", user_id: 5)
-
- # Another LDAP identity
- identities.create!(id: 6, provider: 'ldapmain', extern_uid: " uid = foo 6, ou = People, dc = example, dc = com ", user_id: 6)
- end
-
- it 'normalizes the LDAP identities in the range' do
- described_class.new.perform(1, 3)
- expect(identities.find(1).extern_uid).to eq("uid=foo 1,ou=people,dc=example,dc=com")
- expect(identities.find(2).extern_uid).to eq("uid=foo 2,ou=people,dc=example,dc=com")
- expect(identities.find(3).extern_uid).to eq("uid=foo 3,ou=people,dc=example,dc=com")
- expect(identities.find(4).extern_uid).to eq(" uid = foo 4, ou = People, dc = example, dc = com ")
- expect(identities.find(5).extern_uid).to eq(" uid = foo 5, ou = People, dc = example, dc = com ")
- expect(identities.find(6).extern_uid).to eq(" uid = foo 6, ou = People, dc = example, dc = com ")
-
- described_class.new.perform(4, 6)
- expect(identities.find(1).extern_uid).to eq("uid=foo 1,ou=people,dc=example,dc=com")
- expect(identities.find(2).extern_uid).to eq("uid=foo 2,ou=people,dc=example,dc=com")
- expect(identities.find(3).extern_uid).to eq("uid=foo 3,ou=people,dc=example,dc=com")
- expect(identities.find(4).extern_uid).to eq("uid=foo 4,ou=people,dc=example,dc=com")
- expect(identities.find(5).extern_uid).to eq(" uid = foo 5, ou = People, dc = example, dc = com ")
- expect(identities.find(6).extern_uid).to eq("uid=foo 6,ou=people,dc=example,dc=com")
- end
-end
diff --git a/spec/lib/gitlab/background_migration/populate_fork_networks_range_spec.rb b/spec/lib/gitlab/background_migration/populate_fork_networks_range_spec.rb
deleted file mode 100644
index 0e73c8c59c9..00000000000
--- a/spec/lib/gitlab/background_migration/populate_fork_networks_range_spec.rb
+++ /dev/null
@@ -1,97 +0,0 @@
-require 'spec_helper'
-
-describe Gitlab::BackgroundMigration::PopulateForkNetworksRange, :migration, schema: 20170929131201 do
- let(:migration) { described_class.new }
- let(:projects) { table(:projects) }
- let(:base1) { projects.create }
-
- let(:base2) { projects.create }
- let(:base2_fork1) { projects.create }
-
- let!(:forked_project_links) { table(:forked_project_links) }
- let!(:fork_networks) { table(:fork_networks) }
- let!(:fork_network_members) { table(:fork_network_members) }
-
- let(:fork_network1) { fork_networks.find_by(root_project_id: base1.id) }
- let(:fork_network2) { fork_networks.find_by(root_project_id: base2.id) }
-
- before do
- # A normal fork link
- forked_project_links.create(id: 1,
- forked_from_project_id: base1.id,
- forked_to_project_id: projects.create.id)
- forked_project_links.create(id: 2,
- forked_from_project_id: base1.id,
- forked_to_project_id: projects.create.id)
- forked_project_links.create(id: 3,
- forked_from_project_id: base2.id,
- forked_to_project_id: base2_fork1.id)
-
- # create a fork of a fork
- forked_project_links.create(id: 4,
- forked_from_project_id: base2_fork1.id,
- forked_to_project_id: projects.create.id)
- forked_project_links.create(id: 5,
- forked_from_project_id: projects.create.id,
- forked_to_project_id: projects.create.id)
-
- # Stub out the calls to the other migrations
- allow(BackgroundMigrationWorker).to receive(:perform_in)
-
- migration.perform(1, 3)
- end
-
- it 'creates the fork network' do
- expect(fork_network1).not_to be_nil
- expect(fork_network2).not_to be_nil
- end
-
- it 'does not create a fork network for a fork-of-fork' do
- # perfrom the entire batch
- migration.perform(1, 5)
-
- expect(fork_networks.find_by(root_project_id: base2_fork1.id)).to be_nil
- end
-
- it 'creates memberships for the root of fork networks' do
- base1_membership = fork_network_members.find_by(fork_network_id: fork_network1.id,
- project_id: base1.id)
- base2_membership = fork_network_members.find_by(fork_network_id: fork_network2.id,
- project_id: base2.id)
-
- expect(base1_membership).not_to be_nil
- expect(base2_membership).not_to be_nil
- end
-
- it 'creates a fork network for the fork of which the source was deleted' do
- fork = projects.create
- forked_project_links.create(id: 6, forked_from_project_id: 99999, forked_to_project_id: fork.id)
-
- migration.perform(5, 8)
-
- expect(fork_networks.find_by(root_project_id: 99999)).to be_nil
- expect(fork_networks.find_by(root_project_id: fork.id)).not_to be_nil
- expect(fork_network_members.find_by(project_id: fork.id)).not_to be_nil
- end
-
- it 'schedules a job for inserting memberships for forks-of-forks' do
- delay = Gitlab::BackgroundMigration::CreateForkNetworkMembershipsRange::RESCHEDULE_DELAY
-
- expect(BackgroundMigrationWorker)
- .to receive(:perform_in).with(delay, "CreateForkNetworkMembershipsRange", [1, 3])
-
- migration.perform(1, 3)
- end
-
- it 'only processes a single batch of links at a time' do
- expect(fork_networks.count).to eq(2)
-
- migration.perform(3, 5)
-
- expect(fork_networks.count).to eq(3)
- end
-
- it 'can be repeated without effect' do
- expect { migration.perform(1, 3) }.not_to change { fork_network_members.count }
- end
-end
diff --git a/spec/lib/gitlab/background_migration/populate_merge_requests_latest_merge_request_diff_id_spec.rb b/spec/lib/gitlab/background_migration/populate_merge_requests_latest_merge_request_diff_id_spec.rb
deleted file mode 100644
index 0cb753c5853..00000000000
--- a/spec/lib/gitlab/background_migration/populate_merge_requests_latest_merge_request_diff_id_spec.rb
+++ /dev/null
@@ -1,62 +0,0 @@
-require 'spec_helper'
-
-describe Gitlab::BackgroundMigration::PopulateMergeRequestsLatestMergeRequestDiffId, :migration, schema: 20171026082505 do
- let(:projects_table) { table(:projects) }
- let(:merge_requests_table) { table(:merge_requests) }
- let(:merge_request_diffs_table) { table(:merge_request_diffs) }
-
- let(:project) { projects_table.create!(name: 'gitlab', path: 'gitlab-org/gitlab-ce') }
-
- def create_mr!(name, diffs: 0)
- merge_request =
- merge_requests_table.create!(target_project_id: project.id,
- target_branch: 'master',
- source_project_id: project.id,
- source_branch: name,
- title: name)
-
- diffs.times do
- merge_request_diffs_table.create!(merge_request_id: merge_request.id)
- end
-
- merge_request
- end
-
- def diffs_for(merge_request)
- merge_request_diffs_table.where(merge_request_id: merge_request.id)
- end
-
- describe '#perform' do
- it 'ignores MRs without diffs' do
- merge_request_without_diff = create_mr!('without_diff')
- mr_id = merge_request_without_diff.id
-
- expect(merge_request_without_diff.latest_merge_request_diff_id).to be_nil
-
- expect { subject.perform(mr_id, mr_id) }
- .not_to change { merge_request_without_diff.reload.latest_merge_request_diff_id }
- end
-
- it 'ignores MRs that have a diff ID already set' do
- merge_request_with_multiple_diffs = create_mr!('with_multiple_diffs', diffs: 3)
- diff_id = diffs_for(merge_request_with_multiple_diffs).minimum(:id)
- mr_id = merge_request_with_multiple_diffs.id
-
- merge_request_with_multiple_diffs.update!(latest_merge_request_diff_id: diff_id)
-
- expect { subject.perform(mr_id, mr_id) }
- .not_to change { merge_request_with_multiple_diffs.reload.latest_merge_request_diff_id }
- end
-
- it 'migrates multiple MR diffs to the correct values' do
- merge_requests = Array.new(3).map.with_index { |_, i| create_mr!(i, diffs: 3) }
-
- subject.perform(merge_requests.first.id, merge_requests.last.id)
-
- merge_requests.each do |merge_request|
- expect(merge_request.reload.latest_merge_request_diff_id)
- .to eq(diffs_for(merge_request).maximum(:id))
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/ci/ansi2html_spec.rb b/spec/lib/gitlab/ci/ansi2html_spec.rb
index aa4e358b148..3d57ce431ab 100644
--- a/spec/lib/gitlab/ci/ansi2html_spec.rb
+++ b/spec/lib/gitlab/ci/ansi2html_spec.rb
@@ -141,11 +141,11 @@ describe Gitlab::Ci::Ansi2html do
end
it "replaces newlines with line break tags" do
- expect(convert_html("\n")).to eq('<span class=""></span><br/><span class=""></span>')
+ expect(convert_html("\n")).to eq('<span class=""><br/><span class=""></span></span>')
end
it "groups carriage returns with newlines" do
- expect(convert_html("\r\n")).to eq('<span class=""></span><br/><span class=""></span>')
+ expect(convert_html("\r\n")).to eq('<span class=""><br/><span class=""></span></span>')
end
describe "incremental update" do
@@ -193,7 +193,7 @@ describe Gitlab::Ci::Ansi2html do
let(:pre_text) { "Hello\r" }
let(:pre_html) { "<span class=\"\">Hello\r</span>" }
let(:text) { "\nWorld" }
- let(:html) { "<span class=\"\"></span><br/><span class=\"\">World</span>" }
+ let(:html) { "<span class=\"\"><br/><span class=\"\">World</span></span>" }
it_behaves_like 'stateable converter'
end
@@ -232,7 +232,7 @@ describe Gitlab::Ci::Ansi2html do
it 'prints light red' do
text = "#{section_start}\e[91mHello\e[0m\n#{section_end}"
header = %{<span class="term-fg-l-red section js-section-header section-header js-s-#{class_name(section_name)}">Hello</span>}
- line_break = %{<span class="section js-section-header section-header js-s-#{class_name(section_name)}"></span><br/>}
+ line_break = %{<span class="section js-section-header section-header js-s-#{class_name(section_name)}"><br/></span>}
line = %{<span class="section line s_#{class_name(section_name)}"></span>}
empty_line = %{<span class="section js-s-#{class_name(section_name)}"></span>}
html = "#{section_start_html}#{header}#{line_break}#{line}#{empty_line}#{section_end_html}"
diff --git a/spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb b/spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb
index c5bc81a2b9e..d88a2097ba2 100644
--- a/spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb
+++ b/spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb
@@ -17,15 +17,12 @@ describe Gitlab::Ci::Build::Prerequisite::KubernetesNamespace do
end
context 'build has a deployment' do
- let!(:deployment) { create(:deployment, deployable: build) }
+ let!(:deployment) { create(:deployment, deployable: build, cluster: cluster) }
+ let(:cluster) { nil }
context 'and a cluster to deploy to' do
let(:cluster) { create(:cluster, :group) }
- before do
- allow(build.deployment).to receive(:cluster).and_return(cluster)
- end
-
it { is_expected.to be_truthy }
context 'and the cluster is not managed' do
@@ -48,28 +45,21 @@ describe Gitlab::Ci::Build::Prerequisite::KubernetesNamespace do
end
context 'and no cluster to deploy to' do
- before do
- expect(deployment.cluster).to be_nil
- end
-
it { is_expected.to be_falsey }
end
end
end
describe '#complete!' do
- let!(:deployment) { create(:deployment, deployable: build) }
+ let!(:deployment) { create(:deployment, deployable: build, cluster: cluster) }
let(:service) { double(execute: true) }
+ let(:cluster) { nil }
subject { described_class.new(build).complete! }
context 'completion is required' do
let(:cluster) { create(:cluster, :group) }
- before do
- allow(build.deployment).to receive(:cluster).and_return(cluster)
- end
-
it 'creates a kubernetes namespace' do
expect(Clusters::Gcp::Kubernetes::CreateOrUpdateNamespaceService)
.to receive(:new)
@@ -83,10 +73,6 @@ describe Gitlab::Ci::Build::Prerequisite::KubernetesNamespace do
end
context 'completion is not required' do
- before do
- expect(deployment.cluster).to be_nil
- end
-
it 'does not create a namespace' do
expect(Clusters::Gcp::Kubernetes::CreateOrUpdateNamespaceService).not_to receive(:new)
diff --git a/spec/lib/gitlab/ci/config_spec.rb b/spec/lib/gitlab/ci/config_spec.rb
index 7f336ee853e..4e8bff3d738 100644
--- a/spec/lib/gitlab/ci/config_spec.rb
+++ b/spec/lib/gitlab/ci/config_spec.rb
@@ -90,6 +90,27 @@ describe Gitlab::Ci::Config do
end
end
+ context 'when yml is too big' do
+ let(:yml) do
+ <<~YAML
+ --- &1
+ - hi
+ - *1
+ YAML
+ end
+
+ describe '.new' do
+ it 'raises error' do
+ expect(Gitlab::Sentry).to receive(:track_exception)
+
+ expect { config }.to raise_error(
+ described_class::ConfigError,
+ /The parsed YAML is too big/
+ )
+ end
+ end
+ end
+
context 'when config logic is incorrect' do
let(:yml) { 'before_script: "ls"' }
diff --git a/spec/lib/gitlab/ci/trace/stream_spec.rb b/spec/lib/gitlab/ci/trace/stream_spec.rb
index 0d03eef99c8..35250632e86 100644
--- a/spec/lib/gitlab/ci/trace/stream_spec.rb
+++ b/spec/lib/gitlab/ci/trace/stream_spec.rb
@@ -65,9 +65,9 @@ describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
result = stream.html
expect(result).to eq(
- "<span class=\"\">ヾ(´༎ຶД༎ຶ`)ノ</span><br/><span class=\"\"></span>"\
- "<span class=\"term-fg-green\">許功蓋</span><span class=\"\"></span><br/>"\
- "<span class=\"\"></span>")
+ "<span class=\"\">ヾ(´༎ຶД༎ຶ`)ノ<br/><span class=\"\"></span></span>"\
+ "<span class=\"term-fg-green\">許功蓋</span><span class=\"\"><br/>"\
+ "<span class=\"\"></span></span>")
expect(result.encoding).to eq(Encoding.default_external)
end
end
@@ -306,8 +306,8 @@ describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
shared_examples_for 'htmls' do
it "returns html" do
expect(stream.html).to eq(
- "<span class=\"\">12</span><br/><span class=\"\">34</span><br/>"\
- "<span class=\"\">56</span>")
+ "<span class=\"\">12<br/><span class=\"\">34<br/>"\
+ "<span class=\"\">56</span></span></span>")
end
it "returns html for last line only" do
diff --git a/spec/lib/gitlab/cleanup/orphan_job_artifact_files_batch_spec.rb b/spec/lib/gitlab/cleanup/orphan_job_artifact_files_batch_spec.rb
new file mode 100644
index 00000000000..4d8edfeac80
--- /dev/null
+++ b/spec/lib/gitlab/cleanup/orphan_job_artifact_files_batch_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Cleanup::OrphanJobArtifactFilesBatch do
+ let(:batch_size) { 10 }
+ let(:dry_run) { true }
+
+ subject(:batch) { described_class.new(batch_size: batch_size, dry_run: dry_run) }
+
+ context 'no dry run' do
+ let(:dry_run) { false }
+
+ it 'deletes only orphan job artifacts from disk' do
+ job_artifact = create(:ci_job_artifact, :archive)
+ orphan_artifact = create(:ci_job_artifact, :archive)
+ batch << artifact_path(job_artifact)
+ batch << artifact_path(orphan_artifact)
+ orphan_artifact.delete
+
+ batch.clean!
+
+ expect(batch.artifact_files.count).to eq(2)
+ expect(batch.lost_and_found.count).to eq(1)
+ expect(batch.lost_and_found.first.artifact_id).to eq(orphan_artifact.id)
+ end
+
+ it 'does not mix up job ID and artifact ID' do
+ # take maximum ID of both tables to avoid any collision
+ max_id = [Ci::Build.maximum(:id), Ci::JobArtifact.maximum(:id)].compact.max.to_i
+ job_a = create(:ci_build, id: max_id + 1)
+ job_b = create(:ci_build, id: max_id + 2)
+ # reuse the build IDs for the job artifact IDs, but swap them
+ job_artifact_b = create(:ci_job_artifact, :archive, job: job_b, id: max_id + 1)
+ job_artifact_a = create(:ci_job_artifact, :archive, job: job_a, id: max_id + 2)
+
+ batch << artifact_path(job_artifact_a)
+ batch << artifact_path(job_artifact_b)
+
+ job_artifact_b.delete
+
+ batch.clean!
+
+ expect(File.exist?(job_artifact_a.file.path)).to be_truthy
+ expect(File.exist?(job_artifact_b.file.path)).to be_falsey
+ end
+ end
+
+ context 'with dry run' do
+ it 'does not remove files' do
+ job_artifact = create(:ci_job_artifact, :archive)
+ batch << job_artifact.file.path
+ job_artifact.delete
+
+ expect(batch).not_to receive(:remove_file!)
+
+ batch.clean!
+
+ expect(File.exist?(job_artifact.file.path)).to be_truthy
+ end
+ end
+
+ def artifact_path(job_artifact)
+ Pathname.new(job_artifact.file.path).parent.to_s
+ end
+end
diff --git a/spec/lib/gitlab/cleanup/orphan_job_artifact_files_spec.rb b/spec/lib/gitlab/cleanup/orphan_job_artifact_files_spec.rb
new file mode 100644
index 00000000000..974cc2c4660
--- /dev/null
+++ b/spec/lib/gitlab/cleanup/orphan_job_artifact_files_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Cleanup::OrphanJobArtifactFiles do
+ let(:null_logger) { Logger.new('/dev/null') }
+ subject(:cleanup) { described_class.new(logger: null_logger) }
+
+ before do
+ allow(null_logger).to receive(:info)
+ end
+
+ it 'passes on dry_run' do
+ expect(Gitlab::Cleanup::OrphanJobArtifactFilesBatch)
+ .to receive(:new)
+ .with(dry_run: false, batch_size: anything, logger: anything)
+ .at_least(:once)
+ .and_call_original
+
+ described_class.new(dry_run: false).run!
+ end
+
+ it 'errors when invalid niceness is given' do
+ cleanup = described_class.new(logger: null_logger, niceness: 'FooBar')
+
+ expect(null_logger).to receive(:error).with(/FooBar/)
+
+ cleanup.run!
+ end
+
+ it 'finds artifacts on disk' do
+ artifact = create(:ci_job_artifact, :archive)
+
+ expect(cleanup).to receive(:find_artifacts).and_yield(artifact.file.path)
+ cleanup.run!
+ end
+
+ it 'stops when limit is reached' do
+ cleanup = described_class.new(limit: 1)
+
+ mock_artifacts_found(cleanup, 'tmp/foo/bar/1', 'tmp/foo/bar/2')
+
+ cleanup.run!
+
+ expect(cleanup.total_found).to eq(1)
+ end
+
+ it 'cleans even if batch is not full' do
+ mock_artifacts_found(cleanup, 'tmp/foo/bar/1')
+
+ expect(cleanup).to receive(:clean_batch!).and_call_original
+ cleanup.run!
+ end
+
+ it 'cleans in batches' do
+ stub_const("#{described_class.name}::BATCH_SIZE", 2)
+ mock_artifacts_found(cleanup, 'tmp/foo/bar/1', 'tmp/foo/bar/2', 'tmp/foo/bar/3')
+
+ expect(cleanup).to receive(:clean_batch!).twice.and_call_original
+ cleanup.run!
+ end
+
+ def mock_artifacts_found(cleanup, *files)
+ mock = allow(cleanup).to receive(:find_artifacts)
+
+ files.each { |file| mock.and_yield(file) }
+ end
+end
diff --git a/spec/lib/gitlab/config/loader/yaml_spec.rb b/spec/lib/gitlab/config/loader/yaml_spec.rb
index 44c9a3896a8..0affeb01f6a 100644
--- a/spec/lib/gitlab/config/loader/yaml_spec.rb
+++ b/spec/lib/gitlab/config/loader/yaml_spec.rb
@@ -8,7 +8,7 @@ describe Gitlab::Config::Loader::Yaml do
describe '#valid?' do
it 'returns true' do
- expect(loader.valid?).to be true
+ expect(loader).to be_valid
end
end
@@ -24,7 +24,7 @@ describe Gitlab::Config::Loader::Yaml do
describe '#valid?' do
it 'returns false' do
- expect(loader.valid?).to be false
+ expect(loader).not_to be_valid
end
end
@@ -43,7 +43,10 @@ describe Gitlab::Config::Loader::Yaml do
describe '#initialize' do
it 'raises FormatError' do
- expect { loader }.to raise_error(Gitlab::Config::Loader::FormatError, 'Unknown alias: bad_alias')
+ expect { loader }.to raise_error(
+ Gitlab::Config::Loader::FormatError,
+ 'Unknown alias: bad_alias'
+ )
end
end
end
@@ -53,7 +56,68 @@ describe Gitlab::Config::Loader::Yaml do
describe '#valid?' do
it 'returns false' do
- expect(loader.valid?).to be false
+ expect(loader).not_to be_valid
+ end
+ end
+ end
+
+ # Prevent Billion Laughs attack: https://gitlab.com/gitlab-org/gitlab-ce/issues/56018
+ context 'when yaml size is too large' do
+ let(:yml) do
+ <<~YAML
+ a: &a ["lol","lol","lol","lol","lol","lol","lol","lol","lol"]
+ b: &b [*a,*a,*a,*a,*a,*a,*a,*a,*a]
+ c: &c [*b,*b,*b,*b,*b,*b,*b,*b,*b]
+ d: &d [*c,*c,*c,*c,*c,*c,*c,*c,*c]
+ e: &e [*d,*d,*d,*d,*d,*d,*d,*d,*d]
+ f: &f [*e,*e,*e,*e,*e,*e,*e,*e,*e]
+ g: &g [*f,*f,*f,*f,*f,*f,*f,*f,*f]
+ h: &h [*g,*g,*g,*g,*g,*g,*g,*g,*g]
+ i: &i [*h,*h,*h,*h,*h,*h,*h,*h,*h]
+ YAML
+ end
+
+ describe '#valid?' do
+ it 'returns false' do
+ expect(loader).not_to be_valid
+ end
+
+ it 'returns true if "ci_yaml_limit_size" feature flag is disabled' do
+ stub_feature_flags(ci_yaml_limit_size: false)
+
+ expect(loader).to be_valid
+ end
+ end
+
+ describe '#load!' do
+ it 'raises FormatError' do
+ expect { loader.load! }.to raise_error(
+ Gitlab::Config::Loader::FormatError,
+ 'The parsed YAML is too big'
+ )
+ end
+ end
+ end
+
+ # Prevent Billion Laughs attack: https://gitlab.com/gitlab-org/gitlab-ce/issues/56018
+ context 'when yaml has cyclic data structure' do
+ let(:yml) do
+ <<~YAML
+ --- &1
+ - hi
+ - *1
+ YAML
+ end
+
+ describe '#valid?' do
+ it 'returns false' do
+ expect(loader.valid?).to be(false)
+ end
+ end
+
+ describe '#load!' do
+ it 'raises FormatError' do
+ expect { loader.load! }.to raise_error(Gitlab::Config::Loader::FormatError, 'The parsed YAML is too big')
end
end
end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 3cf3d032bf4..7409572288c 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -583,6 +583,24 @@ describe Gitlab::Database::MigrationHelpers do
model.add_column_with_default(:projects, :foo, :integer, default: 10, limit: 8)
end
end
+
+ it 'adds a column with an array default value for a jsonb type' do
+ create(:project)
+ allow(model).to receive(:transaction_open?).and_return(false)
+ allow(model).to receive(:transaction).and_yield
+ expect(model).to receive(:update_column_in_batches).with(:projects, :foo, '[{"foo":"json"}]').and_call_original
+
+ model.add_column_with_default(:projects, :foo, :jsonb, default: [{ foo: "json" }])
+ end
+
+ it 'adds a column with an object default value for a jsonb type' do
+ create(:project)
+ allow(model).to receive(:transaction_open?).and_return(false)
+ allow(model).to receive(:transaction).and_yield
+ expect(model).to receive(:update_column_in_batches).with(:projects, :foo, '{"foo":"json"}').and_call_original
+
+ model.add_column_with_default(:projects, :foo, :jsonb, default: { foo: "json" })
+ end
end
context 'inside a transaction' do
diff --git a/spec/lib/gitlab/diff/lines_unfolder_spec.rb b/spec/lib/gitlab/diff/lines_unfolder_spec.rb
index 8a470e12d04..3134ff3d817 100644
--- a/spec/lib/gitlab/diff/lines_unfolder_spec.rb
+++ b/spec/lib/gitlab/diff/lines_unfolder_spec.rb
@@ -842,4 +842,37 @@ describe Gitlab::Diff::LinesUnfolder do
end
end
end
+
+ context 'positioned on an image' do
+ let(:position) do
+ Gitlab::Diff::Position.new(
+ base_sha: '1c59dfa64afbea8c721bb09a06a9d326c952ea19',
+ start_sha: '1c59dfa64afbea8c721bb09a06a9d326c952ea19',
+ head_sha: '1487062132228de836236c522fe52fed4980a46c',
+ old_path: 'image.jpg',
+ new_path: 'image.jpg',
+ position_type: 'image'
+ )
+ end
+
+ before do
+ allow(old_blob).to receive(:binary?).and_return(binary?)
+ end
+
+ context 'diff file is not text' do
+ let(:binary?) { true }
+
+ it 'returns nil' do
+ expect(subject.unfolded_diff_lines).to be_nil
+ end
+ end
+
+ context 'diff file is text' do
+ let(:binary?) { false }
+
+ it 'returns nil' do
+ expect(subject.unfolded_diff_lines).to be_nil
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index e72fb9c6fbc..cceeae8afe6 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -2038,24 +2038,24 @@ describe Gitlab::Git::Repository, :seed_helper do
end
describe '#clean_stale_repository_files' do
- let(:worktree_path) { File.join(repository_path, 'worktrees', 'delete-me') }
+ let(:worktree_id) { 'rebase-1' }
+ let(:gitlab_worktree_path) { File.join(repository_path, 'gitlab-worktree', worktree_id) }
+ let(:admin_dir) { File.join(repository_path, 'worktrees') }
it 'cleans up the files' do
- create_worktree = %W[git -C #{repository_path} worktree add --detach #{worktree_path} master]
+ create_worktree = %W[git -C #{repository_path} worktree add --detach #{gitlab_worktree_path} master]
raise 'preparation failed' unless system(*create_worktree, err: '/dev/null')
- FileUtils.touch(worktree_path, mtime: Time.now - 8.hours)
+ FileUtils.touch(gitlab_worktree_path, mtime: Time.now - 8.hours)
# git rev-list --all will fail in git 2.16 if HEAD is pointing to a non-existent object,
# but the HEAD must be 40 characters long or git will ignore it.
- File.write(File.join(worktree_path, 'HEAD'), Gitlab::Git::BLANK_SHA)
-
- # git 2.16 fails with "fatal: bad object HEAD"
- expect(rev_list_all).to be false
+ File.write(File.join(admin_dir, worktree_id, 'HEAD'), Gitlab::Git::BLANK_SHA)
+ expect(rev_list_all).to be(false)
repository.clean_stale_repository_files
- expect(rev_list_all).to be true
- expect(File.exist?(worktree_path)).to be_falsey
+ expect(rev_list_all).to be(true)
+ expect(File.exist?(gitlab_worktree_path)).to be_falsey
end
def rev_list_all
diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
index 6d6107ca3e7..ba6abba4e61 100644
--- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
@@ -223,6 +223,19 @@ describe Gitlab::GitalyClient::CommitService do
end
context 'when caching of the ref name is enabled' do
+ it 'caches negative entries' do
+ expect_any_instance_of(Gitaly::CommitService::Stub).to receive(:find_commit).once.and_return(double(commit: nil))
+
+ commit = nil
+ 2.times do
+ ::Gitlab::GitalyClient.allow_ref_name_caching do
+ commit = described_class.new(repository).find_commit('master')
+ end
+ end
+
+ expect(commit).to eq(nil)
+ end
+
it 'returns a cached commit' do
expect_any_instance_of(Gitaly::CommitService::Stub).to receive(:find_commit).once.and_return(double(commit: commit_dbl))
diff --git a/spec/lib/gitlab/graphql/authorize/authorize_field_service_spec.rb b/spec/lib/gitlab/graphql/authorize/authorize_field_service_spec.rb
index aec9c4baf0a..d60d1b7559a 100644
--- a/spec/lib/gitlab/graphql/authorize/authorize_field_service_spec.rb
+++ b/spec/lib/gitlab/graphql/authorize/authorize_field_service_spec.rb
@@ -7,35 +7,39 @@ require 'spec_helper'
describe Gitlab::Graphql::Authorize::AuthorizeFieldService do
def type(type_authorizations = [])
Class.new(Types::BaseObject) do
- graphql_name "TestType"
+ graphql_name 'TestType'
authorize type_authorizations
end
end
- def type_with_field(field_type, field_authorizations = [], resolved_value = "Resolved value")
+ def type_with_field(field_type, field_authorizations = [], resolved_value = 'Resolved value', **options)
Class.new(Types::BaseObject) do
- graphql_name "TestTypeWithField"
- field :test_field, field_type, null: true, authorize: field_authorizations, resolve: -> (_, _, _) { resolved_value}
+ graphql_name 'TestTypeWithField'
+ options.reverse_merge!(null: true)
+ field :test_field, field_type,
+ authorize: field_authorizations,
+ resolve: -> (_, _, _) { resolved_value },
+ **options
end
end
let(:current_user) { double(:current_user) }
subject(:service) { described_class.new(field) }
- describe "#authorized_resolve" do
- let(:presented_object) { double("presented object") }
- let(:presented_type) { double("parent type", object: presented_object) }
+ describe '#authorized_resolve' do
+ let(:presented_object) { double('presented object') }
+ let(:presented_type) { double('parent type', object: presented_object) }
subject(:resolved) { service.authorized_resolve.call(presented_type, {}, { current_user: current_user }) }
- context "scalar types" do
- shared_examples "checking permissions on the presented object" do
- it "checks the abilities on the object being presented and returns the value" do
+ context 'scalar types' do
+ shared_examples 'checking permissions on the presented object' do
+ it 'checks the abilities on the object being presented and returns the value' do
expected_permissions.each do |permission|
spy_ability_check_for(permission, presented_object, passed: true)
end
- expect(resolved).to eq("Resolved value")
+ expect(resolved).to eq('Resolved value')
end
it "returns nil if the value wasn't authorized" do
@@ -45,61 +49,71 @@ describe Gitlab::Graphql::Authorize::AuthorizeFieldService do
end
end
- context "when the field is a built-in scalar type" do
- let(:field) { type_with_field(GraphQL::STRING_TYPE, :read_field).fields["testField"].to_graphql }
+ context 'when the field is a built-in scalar type' do
+ let(:field) { type_with_field(GraphQL::STRING_TYPE, :read_field).fields['testField'].to_graphql }
let(:expected_permissions) { [:read_field] }
- it_behaves_like "checking permissions on the presented object"
+ it_behaves_like 'checking permissions on the presented object'
end
- context "when the field is a list of scalar types" do
- let(:field) { type_with_field([GraphQL::STRING_TYPE], :read_field).fields["testField"].to_graphql }
+ context 'when the field is a list of scalar types' do
+ let(:field) { type_with_field([GraphQL::STRING_TYPE], :read_field).fields['testField'].to_graphql }
let(:expected_permissions) { [:read_field] }
- it_behaves_like "checking permissions on the presented object"
+ it_behaves_like 'checking permissions on the presented object'
end
- context "when the field is sub-classed scalar type" do
- let(:field) { type_with_field(Types::TimeType, :read_field).fields["testField"].to_graphql }
+ context 'when the field is sub-classed scalar type' do
+ let(:field) { type_with_field(Types::TimeType, :read_field).fields['testField'].to_graphql }
let(:expected_permissions) { [:read_field] }
- it_behaves_like "checking permissions on the presented object"
+ it_behaves_like 'checking permissions on the presented object'
end
- context "when the field is a list of sub-classed scalar types" do
- let(:field) { type_with_field([Types::TimeType], :read_field).fields["testField"].to_graphql }
+ context 'when the field is a list of sub-classed scalar types' do
+ let(:field) { type_with_field([Types::TimeType], :read_field).fields['testField'].to_graphql }
let(:expected_permissions) { [:read_field] }
- it_behaves_like "checking permissions on the presented object"
+ it_behaves_like 'checking permissions on the presented object'
end
end
- context "when the field is a specific type" do
+ context 'when the field is a specific type' do
let(:custom_type) { type(:read_type) }
- let(:object_in_field) { double("presented in field") }
- let(:field) { type_with_field(custom_type, :read_field, object_in_field).fields["testField"].to_graphql }
+ let(:object_in_field) { double('presented in field') }
+ let(:field) { type_with_field(custom_type, :read_field, object_in_field).fields['testField'].to_graphql }
- it "checks both field & type permissions" do
+ it 'checks both field & type permissions' do
spy_ability_check_for(:read_field, object_in_field, passed: true)
spy_ability_check_for(:read_type, object_in_field, passed: true)
expect(resolved).to eq(object_in_field)
end
- it "returns nil if viewing was not allowed" do
+ it 'returns nil if viewing was not allowed' do
spy_ability_check_for(:read_field, object_in_field, passed: false)
spy_ability_check_for(:read_type, object_in_field, passed: true)
expect(resolved).to be_nil
end
- context "when the field is a list" do
- let(:object_1) { double("presented in field 1") }
- let(:object_2) { double("presented in field 2") }
+ context 'when the field is not nullable' do
+ let(:field) { type_with_field(custom_type, [], object_in_field, null: false).fields['testField'].to_graphql }
+
+ it 'returns nil when viewing is not allowed' do
+ spy_ability_check_for(:read_type, object_in_field, passed: false)
+
+ expect(resolved).to be_nil
+ end
+ end
+
+ context 'when the field is a list' do
+ let(:object_1) { double('presented in field 1') }
+ let(:object_2) { double('presented in field 2') }
let(:presented_types) { [double(object: object_1), double(object: object_2)] }
- let(:field) { type_with_field([custom_type], :read_field, presented_types).fields["testField"].to_graphql }
+ let(:field) { type_with_field([custom_type], :read_field, presented_types).fields['testField'].to_graphql }
- it "checks all permissions" do
+ it 'checks all permissions' do
allow(Ability).to receive(:allowed?) { true }
spy_ability_check_for(:read_field, object_1, passed: true)
@@ -110,7 +124,7 @@ describe Gitlab::Graphql::Authorize::AuthorizeFieldService do
expect(resolved).to eq(presented_types)
end
- it "filters out objects that the user cannot see" do
+ it 'filters out objects that the user cannot see' do
allow(Ability).to receive(:allowed?) { true }
spy_ability_check_for(:read_type, object_1, passed: false)
diff --git a/spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb b/spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb
index 13cf52fd795..50138d272c4 100644
--- a/spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb
+++ b/spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb
@@ -34,12 +34,6 @@ describe Gitlab::Graphql::Authorize::AuthorizeResource do
end
end
- describe '#authorized_find' do
- it 'returns the object' do
- expect(loading_resource.authorized_find).to eq(project)
- end
- end
-
describe '#authorized_find!' do
it 'returns the object' do
expect(loading_resource.authorized_find!).to eq(project)
@@ -66,12 +60,6 @@ describe Gitlab::Graphql::Authorize::AuthorizeResource do
end
end
- describe '#authorized_find' do
- it 'returns `nil`' do
- expect(loading_resource.authorized_find).to be_nil
- end
- end
-
describe '#authorized_find!' do
it 'raises an error' do
expect { loading_resource.authorize!(project) }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
@@ -79,7 +67,7 @@ describe Gitlab::Graphql::Authorize::AuthorizeResource do
end
describe '#authorize!' do
- it 'does not raise an error' do
+ it 'raises an error' do
expect { loading_resource.authorize!(project) }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
end
end
@@ -101,6 +89,45 @@ describe Gitlab::Graphql::Authorize::AuthorizeResource do
end
end
+ context 'when the class does not define authorize' do
+ let(:fake_class) do
+ Class.new do
+ include Gitlab::Graphql::Authorize::AuthorizeResource
+
+ attr_reader :user, :found_object
+
+ def initialize(user, found_object)
+ @user, @found_object = user, found_object
+ end
+
+ def find_object(*_args)
+ found_object
+ end
+
+ def current_user
+ user
+ end
+
+ def self.name
+ 'TestClass'
+ end
+ end
+ end
+ let(:error) { /#{fake_class.name} has no authorizations/ }
+
+ describe '#authorized_find!' do
+ it 'raises a comprehensive error message' do
+ expect { loading_resource.authorized_find! }.to raise_error(error)
+ end
+ end
+
+ describe '#authorized?' do
+ it 'raises a comprehensive error message' do
+ expect { loading_resource.authorized?(project) }.to raise_error(error)
+ end
+ end
+ end
+
describe '#authorize' do
it 'adds permissions from subclasses to those of superclasses when used on classes' do
base_class = Class.new do
diff --git a/spec/lib/gitlab/graphql/copy_field_description_spec.rb b/spec/lib/gitlab/graphql/copy_field_description_spec.rb
new file mode 100644
index 00000000000..e7462c5b954
--- /dev/null
+++ b/spec/lib/gitlab/graphql/copy_field_description_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Graphql::CopyFieldDescription do
+ subject { Class.new.include(described_class) }
+
+ describe '.copy_field_description' do
+ let(:type) do
+ Class.new(Types::BaseObject) do
+ graphql_name "TestType"
+
+ field :field_name, GraphQL::STRING_TYPE, null: true, description: 'Foo'
+ end
+ end
+
+ it 'returns the correct description' do
+ expect(subject.copy_field_description(type, :field_name)).to eq('Foo')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/graphql/find_argument_in_parent_spec.rb b/spec/lib/gitlab/graphql/find_argument_in_parent_spec.rb
new file mode 100644
index 00000000000..91e90315b3e
--- /dev/null
+++ b/spec/lib/gitlab/graphql/find_argument_in_parent_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Graphql::FindArgumentInParent do
+ describe '#find' do
+ def build_node(parent = nil, args: {})
+ props = { irep_node: double(arguments: args) }
+ props[:parent] = parent if parent # The root node shouldn't respond to parent
+
+ double(props)
+ end
+
+ let(:parent) do
+ build_node(
+ build_node(
+ build_node(
+ build_node,
+ args: { myArg: 1 }
+ )
+ )
+ )
+ end
+ let(:arg_name) { :my_arg }
+
+ it 'searches parents and returns the argument' do
+ expect(described_class.find(parent, :my_arg)).to eq(1)
+ end
+
+ it 'can find argument when passed in as both Ruby and GraphQL-formatted symbols and strings' do
+ [:my_arg, :myArg, 'my_arg', 'myArg'].each do |arg|
+ expect(described_class.find(parent, arg)).to eq(1)
+ end
+ end
+
+ it 'returns nil if no arguments found in parents' do
+ expect(described_class.find(parent, :bar)).to eq(nil)
+ end
+
+ it 'can limit the depth it searches to' do
+ expect(described_class.find(parent, :my_arg, limit_depth: 1)).to eq(nil)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/graphql/loaders/pipeline_for_sha_loader_spec.rb b/spec/lib/gitlab/graphql/loaders/pipeline_for_sha_loader_spec.rb
new file mode 100644
index 00000000000..927476cc655
--- /dev/null
+++ b/spec/lib/gitlab/graphql/loaders/pipeline_for_sha_loader_spec.rb
@@ -0,0 +1,20 @@
+require 'spec_helper'
+
+describe Gitlab::Graphql::Loaders::PipelineForShaLoader do
+ include GraphqlHelpers
+
+ describe '#find_last' do
+ it 'batch-resolves latest pipeline' do
+ project = create(:project, :repository)
+ pipeline1 = create(:ci_pipeline, project: project, ref: project.default_branch, sha: project.commit.sha)
+ pipeline2 = create(:ci_pipeline, project: project, ref: project.default_branch, sha: project.commit.sha)
+ pipeline3 = create(:ci_pipeline, project: project, ref: 'improve/awesome', sha: project.commit('improve/awesome').sha)
+
+ result = batch(max_queries: 1) do
+ [pipeline1.sha, pipeline3.sha].map { |sha| described_class.new(project, sha).find_last }
+ end
+
+ expect(result).to contain_exactly(pipeline2, pipeline3)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 7a250603b6b..7baa52ffb4f 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -397,6 +397,7 @@ project:
- incident_management_setting
- merge_trains
- designs
+- project_aliases
award_emoji:
- awardable
- user
diff --git a/spec/lib/gitlab/import_export/project.json b/spec/lib/gitlab/import_export/project.json
index 6512fe80a3b..8be074f4b9b 100644
--- a/spec/lib/gitlab/import_export/project.json
+++ b/spec/lib/gitlab/import_export/project.json
@@ -6760,7 +6760,7 @@
},
{
"id": 95,
- "title": "JIRA",
+ "title": "Jira",
"project_id": 5,
"created_at": "2016-06-14T15:01:51.255Z",
"updated_at": "2016-06-14T15:01:51.255Z",
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index a406c25b1d8..28b187c3676 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -123,6 +123,7 @@ Release:
- project_id
- created_at
- updated_at
+- released_at
Releases::Link:
- id
- release_id
@@ -429,6 +430,7 @@ Service:
- confidential_issues_events
- confidential_note_events
- deployment_events
+- description
ProjectHook:
- id
- url
diff --git a/spec/lib/gitlab/issuable_metadata_spec.rb b/spec/lib/gitlab/issuable_metadata_spec.rb
index 916f3876a8e..032467b8b4e 100644
--- a/spec/lib/gitlab/issuable_metadata_spec.rb
+++ b/spec/lib/gitlab/issuable_metadata_spec.rb
@@ -7,11 +7,11 @@ describe Gitlab::IssuableMetadata do
subject { Class.new { include Gitlab::IssuableMetadata }.new }
it 'returns an empty Hash if an empty collection is provided' do
- expect(subject.issuable_meta_data(Issue.none, 'Issue')).to eq({})
+ expect(subject.issuable_meta_data(Issue.none, 'Issue', user)).to eq({})
end
it 'raises an error when given a collection with no limit' do
- expect { subject.issuable_meta_data(Issue.all, 'Issue') }.to raise_error(/must have a limit/)
+ expect { subject.issuable_meta_data(Issue.all, 'Issue', user) }.to raise_error(/must have a limit/)
end
context 'issues' do
@@ -23,7 +23,7 @@ describe Gitlab::IssuableMetadata do
let!(:closing_issues) { create(:merge_requests_closing_issues, issue: issue, merge_request: merge_request) }
it 'aggregates stats on issues' do
- data = subject.issuable_meta_data(Issue.all.limit(10), 'Issue')
+ data = subject.issuable_meta_data(Issue.all.limit(10), 'Issue', user)
expect(data.count).to eq(2)
expect(data[issue.id].upvotes).to eq(1)
@@ -46,7 +46,7 @@ describe Gitlab::IssuableMetadata do
let!(:note) { create(:note_on_merge_request, author: user, project: project, noteable: merge_request, note: "a comment on a MR") }
it 'aggregates stats on merge requests' do
- data = subject.issuable_meta_data(MergeRequest.all.limit(10), 'MergeRequest')
+ data = subject.issuable_meta_data(MergeRequest.all.limit(10), 'MergeRequest', user)
expect(data.count).to eq(2)
expect(data[merge_request.id].upvotes).to eq(1)
diff --git a/spec/lib/gitlab/issuable_sorter_spec.rb b/spec/lib/gitlab/issuable_sorter_spec.rb
index 642a6cb6caa..5bd76bc6081 100644
--- a/spec/lib/gitlab/issuable_sorter_spec.rb
+++ b/spec/lib/gitlab/issuable_sorter_spec.rb
@@ -26,7 +26,7 @@ describe Gitlab::IssuableSorter do
expect(described_class.sort(project1, unsorted)).to eq(sorted)
end
- context 'for JIRA issues' do
+ context 'for Jira issues' do
let(:sorted) do
[ExternalIssue.new('JIRA-1', project1),
ExternalIssue.new('JIRA-2', project1),
diff --git a/spec/lib/gitlab/json_cache_spec.rb b/spec/lib/gitlab/json_cache_spec.rb
index 59160741c45..39cdd42088e 100644
--- a/spec/lib/gitlab/json_cache_spec.rb
+++ b/spec/lib/gitlab/json_cache_spec.rb
@@ -129,19 +129,52 @@ describe Gitlab::JsonCache do
.with(expanded_key)
.and_return(nil)
+ expect(ActiveSupport::JSON).not_to receive(:decode)
expect(cache.read(key)).to be_nil
end
- context 'when the cached value is a boolean' do
+ context 'when the cached value is true' do
it 'parses the cached value' do
allow(backend).to receive(:read)
.with(expanded_key)
.and_return(true)
+ expect(ActiveSupport::JSON).to receive(:decode).with("true").and_call_original
expect(cache.read(key, BroadcastMessage)).to eq(true)
end
end
+ context 'when the cached value is false' do
+ it 'parses the cached value' do
+ allow(backend).to receive(:read)
+ .with(expanded_key)
+ .and_return(false)
+
+ expect(ActiveSupport::JSON).to receive(:decode).with("false").and_call_original
+ expect(cache.read(key, BroadcastMessage)).to eq(false)
+ end
+ end
+
+ context 'when the cached value is a JSON true value' do
+ it 'parses the cached value' do
+ allow(backend).to receive(:read)
+ .with(expanded_key)
+ .and_return("true")
+
+ expect(cache.read(key, BroadcastMessage)).to eq(true)
+ end
+ end
+
+ context 'when the cached value is a JSON false value' do
+ it 'parses the cached value' do
+ allow(backend).to receive(:read)
+ .with(expanded_key)
+ .and_return("false")
+
+ expect(cache.read(key, BroadcastMessage)).to eq(false)
+ end
+ end
+
context 'when the cached value is a hash' do
it 'parses the cached value' do
allow(backend).to receive(:read)
diff --git a/spec/lib/gitlab/legacy_github_import/importer_spec.rb b/spec/lib/gitlab/legacy_github_import/importer_spec.rb
index a0c664da185..9163019514b 100644
--- a/spec/lib/gitlab/legacy_github_import/importer_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/importer_spec.rb
@@ -132,6 +132,7 @@ describe Gitlab::LegacyGithubImport::Importer do
body: 'Release v1.0.0',
draft: false,
created_at: created_at,
+ published_at: created_at,
updated_at: updated_at,
url: "#{api_root}/repos/octocat/Hello-World/releases/1"
)
@@ -144,6 +145,7 @@ describe Gitlab::LegacyGithubImport::Importer do
body: nil,
draft: false,
created_at: created_at,
+ published_at: created_at,
updated_at: updated_at,
url: "#{api_root}/repos/octocat/Hello-World/releases/2"
)
diff --git a/spec/lib/gitlab/legacy_github_import/release_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/release_formatter_spec.rb
index c57b96fb00d..534cf219520 100644
--- a/spec/lib/gitlab/legacy_github_import/release_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/release_formatter_spec.rb
@@ -4,6 +4,7 @@ describe Gitlab::LegacyGithubImport::ReleaseFormatter do
let!(:project) { create(:project, namespace: create(:namespace, path: 'octocat')) }
let(:octocat) { double(id: 123456, login: 'octocat') }
let(:created_at) { DateTime.strptime('2011-01-26T19:01:12Z') }
+ let(:published_at) { DateTime.strptime('2011-01-26T20:00:00Z') }
let(:base_data) do
{
@@ -11,7 +12,7 @@ describe Gitlab::LegacyGithubImport::ReleaseFormatter do
name: 'First release',
draft: false,
created_at: created_at,
- published_at: created_at,
+ published_at: published_at,
body: 'Release v1.0.0'
}
end
@@ -28,6 +29,7 @@ describe Gitlab::LegacyGithubImport::ReleaseFormatter do
name: 'First release',
description: 'Release v1.0.0',
created_at: created_at,
+ released_at: published_at,
updated_at: created_at
}
diff --git a/spec/lib/gitlab/lets_encrypt/client_spec.rb b/spec/lib/gitlab/lets_encrypt/client_spec.rb
index 5454d9c1af4..cbb862cb0c9 100644
--- a/spec/lib/gitlab/lets_encrypt/client_spec.rb
+++ b/spec/lib/gitlab/lets_encrypt/client_spec.rb
@@ -116,42 +116,6 @@ describe ::Gitlab::LetsEncrypt::Client do
end
end
- describe '#enabled?' do
- subject { client.enabled? }
-
- context 'when terms of service are accepted' do
- it { is_expected.to eq(true) }
-
- context "when private_key isn't present and database is read only" do
- before do
- allow(::Gitlab::Database).to receive(:read_only?).and_return(true)
- end
-
- it 'returns false' do
- expect(::Gitlab::CurrentSettings.lets_encrypt_private_key).to eq(nil)
-
- is_expected.to eq(false)
- end
- end
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(pages_auto_ssl: false)
- end
-
- it { is_expected.to eq(false) }
- end
- end
-
- context 'when terms of service are not accepted' do
- before do
- stub_application_setting(lets_encrypt_terms_of_service_accepted: false)
- end
-
- it { is_expected.to eq(false) }
- end
- end
-
describe '#terms_of_service_url' do
subject { client.terms_of_service_url }
diff --git a/spec/lib/gitlab/lets_encrypt_spec.rb b/spec/lib/gitlab/lets_encrypt_spec.rb
new file mode 100644
index 00000000000..674b114e9d3
--- /dev/null
+++ b/spec/lib/gitlab/lets_encrypt_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ::Gitlab::LetsEncrypt do
+ include LetsEncryptHelpers
+
+ before do
+ stub_lets_encrypt_settings
+ end
+
+ describe '.enabled?' do
+ let(:project) { create(:project) }
+ let(:pages_domain) { create(:pages_domain, project: project) }
+
+ subject { described_class.enabled?(pages_domain) }
+
+ context 'when terms of service are accepted' do
+ it { is_expected.to eq(true) }
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(pages_auto_ssl: false)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ context 'when terms of service are not accepted' do
+ before do
+ stub_application_setting(lets_encrypt_terms_of_service_accepted: false)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when feature flag for project is disabled' do
+ before do
+ stub_feature_flags(pages_auto_ssl_for_project: false)
+ end
+
+ it 'returns false' do
+ is_expected.to eq(false)
+ end
+ end
+
+ context 'when domain has not project' do
+ let(:pages_domain) { create(:pages_domain) }
+
+ it 'returns false' do
+ is_expected.to eq(false)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/dashboard/dynamic_dashboard_service_spec.rb b/spec/lib/gitlab/metrics/dashboard/dynamic_dashboard_service_spec.rb
index eecd257b38d..79a78df44ae 100644
--- a/spec/lib/gitlab/metrics/dashboard/dynamic_dashboard_service_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/dynamic_dashboard_service_spec.rb
@@ -6,13 +6,19 @@ describe Gitlab::Metrics::Dashboard::DynamicDashboardService, :use_clean_rails_m
include MetricsDashboardHelpers
set(:project) { build(:project) }
+ set(:user) { create(:user) }
set(:environment) { create(:environment, project: project) }
+ before do
+ project.add_maintainer(user)
+ end
+
describe '#get_dashboard' do
- let(:service_params) { [project, nil, { environment: environment, dashboard_path: nil }] }
+ let(:service_params) { [project, user, { environment: environment, dashboard_path: nil }] }
let(:service_call) { described_class.new(*service_params).get_dashboard }
it_behaves_like 'valid embedded dashboard service response'
+ it_behaves_like 'raises error for users with insufficient permissions'
it 'caches the unprocessed dashboard for subsequent calls' do
expect(YAML).to receive(:safe_load).once.and_call_original
diff --git a/spec/lib/gitlab/metrics/dashboard/finder_spec.rb b/spec/lib/gitlab/metrics/dashboard/finder_spec.rb
index b9a5ee9c2b3..d8ed54c0248 100644
--- a/spec/lib/gitlab/metrics/dashboard/finder_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/finder_spec.rb
@@ -6,12 +6,17 @@ describe Gitlab::Metrics::Dashboard::Finder, :use_clean_rails_memory_store_cachi
include MetricsDashboardHelpers
set(:project) { build(:project) }
+ set(:user) { create(:user) }
set(:environment) { create(:environment, project: project) }
let(:system_dashboard_path) { Gitlab::Metrics::Dashboard::SystemDashboardService::SYSTEM_DASHBOARD_PATH}
+ before do
+ project.add_maintainer(user)
+ end
+
describe '.find' do
let(:dashboard_path) { '.gitlab/dashboards/test.yml' }
- let(:service_call) { described_class.find(project, nil, environment, dashboard_path: dashboard_path) }
+ let(:service_call) { described_class.find(project, user, environment, dashboard_path: dashboard_path) }
it_behaves_like 'misconfigured dashboard service response', :not_found
@@ -41,13 +46,13 @@ describe Gitlab::Metrics::Dashboard::Finder, :use_clean_rails_memory_store_cachi
end
context 'when no dashboard is specified' do
- let(:service_call) { described_class.find(project, nil, environment) }
+ let(:service_call) { described_class.find(project, user, environment) }
it_behaves_like 'valid dashboard service response'
end
context 'when the dashboard is expected to be embedded' do
- let(:service_call) { described_class.find(project, nil, environment, dashboard_path: nil, embedded: true) }
+ let(:service_call) { described_class.find(project, user, environment, dashboard_path: nil, embedded: true) }
it_behaves_like 'valid embedded dashboard service response'
end
diff --git a/spec/lib/gitlab/metrics/dashboard/project_dashboard_service_spec.rb b/spec/lib/gitlab/metrics/dashboard/project_dashboard_service_spec.rb
index 57d82421b5d..468e8ec9ef2 100644
--- a/spec/lib/gitlab/metrics/dashboard/project_dashboard_service_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/project_dashboard_service_spec.rb
@@ -5,8 +5,8 @@ require 'rails_helper'
describe Gitlab::Metrics::Dashboard::ProjectDashboardService, :use_clean_rails_memory_store_caching do
include MetricsDashboardHelpers
- set(:user) { build(:user) }
- set(:project) { build(:project) }
+ set(:user) { create(:user) }
+ set(:project) { create(:project) }
set(:environment) { create(:environment, project: project) }
before do
@@ -22,6 +22,8 @@ describe Gitlab::Metrics::Dashboard::ProjectDashboardService, :use_clean_rails_m
it_behaves_like 'misconfigured dashboard service response', :not_found
end
+ it_behaves_like 'raises error for users with insufficient permissions'
+
context 'when the dashboard exists' do
let(:project) { project_with_dashboard(dashboard_path) }
diff --git a/spec/lib/gitlab/metrics/dashboard/system_dashboard_service_spec.rb b/spec/lib/gitlab/metrics/dashboard/system_dashboard_service_spec.rb
index 2af745bd4d7..13f22dd01c5 100644
--- a/spec/lib/gitlab/metrics/dashboard/system_dashboard_service_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/system_dashboard_service_spec.rb
@@ -5,15 +5,21 @@ require 'spec_helper'
describe Gitlab::Metrics::Dashboard::SystemDashboardService, :use_clean_rails_memory_store_caching do
include MetricsDashboardHelpers
- set(:project) { build(:project) }
+ set(:user) { create(:user) }
+ set(:project) { create(:project) }
set(:environment) { create(:environment, project: project) }
+ before do
+ project.add_maintainer(user)
+ end
+
describe 'get_dashboard' do
let(:dashboard_path) { described_class::SYSTEM_DASHBOARD_PATH }
- let(:service_params) { [project, nil, { environment: environment, dashboard_path: dashboard_path }] }
+ let(:service_params) { [project, user, { environment: environment, dashboard_path: dashboard_path }] }
let(:service_call) { described_class.new(*service_params).get_dashboard }
it_behaves_like 'valid dashboard service response'
+ it_behaves_like 'raises error for users with insufficient permissions'
it 'caches the unprocessed dashboard for subsequent calls' do
expect(YAML).to receive(:safe_load).once.and_call_original
diff --git a/spec/lib/gitlab/metrics/system_spec.rb b/spec/lib/gitlab/metrics/system_spec.rb
index b0603d96eb2..da87df15746 100644
--- a/spec/lib/gitlab/metrics/system_spec.rb
+++ b/spec/lib/gitlab/metrics/system_spec.rb
@@ -52,13 +52,13 @@ describe Gitlab::Metrics::System do
end
describe '.cpu_time' do
- it 'returns a Fixnum' do
+ it 'returns a Float' do
expect(described_class.cpu_time).to be_an(Float)
end
end
describe '.real_time' do
- it 'returns a Fixnum' do
+ it 'returns a Float' do
expect(described_class.real_time).to be_an(Float)
end
end
diff --git a/spec/lib/gitlab/performance_bar_spec.rb b/spec/lib/gitlab/performance_bar_spec.rb
index f480376acb4..ee3c571c9c0 100644
--- a/spec/lib/gitlab/performance_bar_spec.rb
+++ b/spec/lib/gitlab/performance_bar_spec.rb
@@ -3,17 +3,42 @@ require 'spec_helper'
describe Gitlab::PerformanceBar do
shared_examples 'allowed user IDs are cached' do
before do
- # Warm the Redis cache
+ # Warm the caches
described_class.enabled?(user)
end
it 'caches the allowed user IDs in cache', :use_clean_rails_memory_store_caching do
expect do
+ expect(described_class.l1_cache_backend).to receive(:fetch).and_call_original
+ expect(described_class.l2_cache_backend).not_to receive(:fetch)
expect(described_class.enabled?(user)).to be_truthy
end.not_to exceed_query_limit(0)
end
+
+ it 'caches the allowed user IDs in L1 cache for 1 minute', :use_clean_rails_memory_store_caching do
+ Timecop.travel 2.minutes do
+ expect do
+ expect(described_class.l1_cache_backend).to receive(:fetch).and_call_original
+ expect(described_class.l2_cache_backend).to receive(:fetch).and_call_original
+ expect(described_class.enabled?(user)).to be_truthy
+ end.not_to exceed_query_limit(0)
+ end
+ end
+
+ it 'caches the allowed user IDs in L2 cache for 5 minutes', :use_clean_rails_memory_store_caching do
+ Timecop.travel 6.minutes do
+ expect do
+ expect(described_class.l1_cache_backend).to receive(:fetch).and_call_original
+ expect(described_class.l2_cache_backend).to receive(:fetch).and_call_original
+ expect(described_class.enabled?(user)).to be_truthy
+ end.not_to exceed_query_limit(2)
+ end
+ end
end
+ it { expect(described_class.l1_cache_backend).to eq(Gitlab::ThreadMemoryCache.cache_backend) }
+ it { expect(described_class.l2_cache_backend).to eq(Rails.cache) }
+
describe '.enabled?' do
let(:user) { create(:user) }
diff --git a/spec/lib/gitlab/reference_extractor_spec.rb b/spec/lib/gitlab/reference_extractor_spec.rb
index d982053d92e..7513dbeeb6f 100644
--- a/spec/lib/gitlab/reference_extractor_spec.rb
+++ b/spec/lib/gitlab/reference_extractor_spec.rb
@@ -197,14 +197,14 @@ describe Gitlab::ReferenceExtractor do
let(:issue) { create(:issue, project: project) }
context 'when GitLab issues are enabled' do
- it 'returns both JIRA and internal issues' do
+ it 'returns both Jira and internal issues' do
subject.analyze("JIRA-123 and FOOBAR-4567 and #{issue.to_reference}")
expect(subject.issues).to eq [ExternalIssue.new('JIRA-123', project),
ExternalIssue.new('FOOBAR-4567', project),
issue]
end
- it 'returns only JIRA issues if the internal one does not exists' do
+ it 'returns only Jira issues if the internal one does not exists' do
subject.analyze("JIRA-123 and FOOBAR-4567 and #999")
expect(subject.issues).to eq [ExternalIssue.new('JIRA-123', project),
ExternalIssue.new('FOOBAR-4567', project)]
@@ -217,7 +217,7 @@ describe Gitlab::ReferenceExtractor do
project.save!
end
- it 'returns only JIRA issues' do
+ it 'returns only Jira issues' do
subject.analyze("JIRA-123 and FOOBAR-4567 and #{issue.to_reference}")
expect(subject.issues).to eq [ExternalIssue.new('JIRA-123', project),
ExternalIssue.new('FOOBAR-4567', project)]
diff --git a/spec/lib/gitlab/time_tracking_formatter_spec.rb b/spec/lib/gitlab/time_tracking_formatter_spec.rb
new file mode 100644
index 00000000000..a85d418777f
--- /dev/null
+++ b/spec/lib/gitlab/time_tracking_formatter_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::TimeTrackingFormatter do
+ describe '#parse' do
+ subject { described_class.parse(duration_string) }
+
+ context 'positive durations' do
+ let(:duration_string) { '3h 20m' }
+
+ it { expect(subject).to eq(12_000) }
+ end
+
+ context 'negative durations' do
+ let(:duration_string) { '-3h 20m' }
+
+ it { expect(subject).to eq(-12_000) }
+ end
+ end
+
+ describe '#output' do
+ let(:num_seconds) { 178_800 }
+
+ subject { described_class.output(num_seconds) }
+
+ context 'time_tracking_limit_to_hours setting is true' do
+ before do
+ stub_application_setting(time_tracking_limit_to_hours: true)
+ end
+
+ it { expect(subject).to eq('49h 40m') }
+ end
+
+ context 'time_tracking_limit_to_hours setting is false' do
+ before do
+ stub_application_setting(time_tracking_limit_to_hours: false)
+ end
+
+ it { expect(subject).to eq('1w 1d 1h 40m') }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/utils/deep_size_spec.rb b/spec/lib/gitlab/utils/deep_size_spec.rb
new file mode 100644
index 00000000000..1e619a15980
--- /dev/null
+++ b/spec/lib/gitlab/utils/deep_size_spec.rb
@@ -0,0 +1,43 @@
+require 'spec_helper'
+
+describe Gitlab::Utils::DeepSize do
+ let(:data) do
+ {
+ a: [1, 2, 3],
+ b: {
+ c: [4, 5],
+ d: [
+ { e: [[6], [7]] }
+ ]
+ }
+ }
+ end
+
+ let(:max_size) { 1.kilobyte }
+ let(:max_depth) { 10 }
+ let(:deep_size) { described_class.new(data, max_size: max_size, max_depth: max_depth) }
+
+ describe '#evaluate' do
+ context 'when data within size and depth limits' do
+ it 'returns true' do
+ expect(deep_size).to be_valid
+ end
+ end
+
+ context 'when data not within size limit' do
+ let(:max_size) { 200.bytes }
+
+ it 'returns false' do
+ expect(deep_size).not_to be_valid
+ end
+ end
+
+ context 'when data not within depth limit' do
+ let(:max_depth) { 2 }
+
+ it 'returns false' do
+ expect(deep_size).not_to be_valid
+ end
+ end
+ end
+end