summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-03-17 18:09:44 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2020-03-17 18:09:44 +0000
commit2c156e3c7bbade01c36eee18327f1ced6eebea79 (patch)
tree115fa8dbf6bc05037378b380311d31acb805f54c /spec
parent8e129497b2565b8c595ef4f806d9a9595ca654e5 (diff)
downloadgitlab-ce-2c156e3c7bbade01c36eee18327f1ced6eebea79.tar.gz
Add latest changes from gitlab-org/gitlab@master
Diffstat (limited to 'spec')
-rw-r--r--spec/controllers/projects/merge_requests_controller_spec.rb130
-rw-r--r--spec/factories/ci/builds.rb16
-rw-r--r--spec/factories/ci/job_artifacts.rb30
-rw-r--r--spec/factories/ci/pipelines.rb8
-rw-r--r--spec/factories/merge_requests.rb12
-rw-r--r--spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb2
-rw-r--r--spec/fixtures/api/schemas/entities/user.json3
-rw-r--r--spec/fixtures/cobertura/coverage.xml43
-rw-r--r--spec/fixtures/cobertura/coverage.xml.gzbin0 -> 576 bytes
-rw-r--r--spec/fixtures/cobertura/coverage_gocov_xml.xml216
-rw-r--r--spec/fixtures/cobertura/coverage_gocov_xml.xml.gzbin0 -> 1103 bytes
-rw-r--r--spec/fixtures/cobertura/coverage_with_corrupted_data.xml50
-rw-r--r--spec/fixtures/cobertura/coverage_with_corrupted_data.xml.gzbin0 -> 571 bytes
-rw-r--r--spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js120
-rw-r--r--spec/frontend/create_cluster/eks_cluster/store/getters_spec.js13
-rw-r--r--spec/frontend/diffs/components/app_spec.js5
-rw-r--r--spec/javascripts/diffs/components/inline_diff_table_row_spec.js61
-rw-r--r--spec/javascripts/diffs/components/parallel_diff_table_row_spec.js62
-rw-r--r--spec/javascripts/diffs/store/actions_spec.js43
-rw-r--r--spec/javascripts/diffs/store/getters_spec.js30
-rw-r--r--spec/javascripts/diffs/store/mutations_spec.js11
-rw-r--r--spec/lib/gitlab/ci/config/entry/reports_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/parsers/coverage/cobertura_spec.rb176
-rw-r--r--spec/lib/gitlab/ci/parsers_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/reports/coverage_reports_spec.rb66
-rw-r--r--spec/lib/gitlab/import_export/group/tree_saver_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/json/legacy_writer_spec.rb79
-rw-r--r--spec/lib/gitlab/import_export/legacy_relation_tree_saver_spec.rb (renamed from spec/lib/gitlab/import_export/relation_tree_saver_spec.rb)2
-rw-r--r--spec/lib/gitlab/import_export/project/legacy_tree_saver_spec.rb397
-rw-r--r--spec/lib/gitlab/import_export/project/tree_saver_spec.rb53
-rw-r--r--spec/models/ci/build_spec.rb47
-rw-r--r--spec/models/ci/job_artifact_spec.rb16
-rw-r--r--spec/models/ci/pipeline_spec.rb53
-rw-r--r--spec/models/merge_request_spec.rb82
-rw-r--r--spec/models/user_spec.rb23
-rw-r--r--spec/presenters/projects/import_export/project_export_presenter_spec.rb92
-rw-r--r--spec/services/ci/retry_build_service_spec.rb3
-rw-r--r--spec/services/projects/import_export/export_service_spec.rb24
-rw-r--r--spec/support/shared_examples/requests/api/discussions_shared_examples.rb56
39 files changed, 1929 insertions, 108 deletions
diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb
index 3684571ff9c..2b1890f6cbd 100644
--- a/spec/controllers/projects/merge_requests_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests_controller_spec.rb
@@ -984,6 +984,136 @@ describe Projects::MergeRequestsController do
end
end
+ describe 'GET coverage_reports' do
+ let(:merge_request) do
+ create(:merge_request,
+ :with_merge_request_pipeline,
+ target_project: project,
+ source_project: project)
+ end
+
+ let(:pipeline) do
+ create(:ci_pipeline,
+ :success,
+ project: merge_request.source_project,
+ ref: merge_request.source_branch,
+ sha: merge_request.diff_head_sha)
+ end
+
+ before do
+ allow_any_instance_of(MergeRequest)
+ .to receive(:find_coverage_reports)
+ .and_return(report)
+
+ allow_any_instance_of(MergeRequest)
+ .to receive(:actual_head_pipeline)
+ .and_return(pipeline)
+ end
+
+ subject do
+ get :coverage_reports, params: {
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ id: merge_request.iid
+ },
+ format: :json
+ end
+
+ describe 'permissions on a public project with private CI/CD' do
+ let(:project) { create :project, :repository, :public, :builds_private }
+ let(:report) { { status: :parsed, data: [] } }
+
+ context 'while signed out' do
+ before do
+ sign_out(user)
+ end
+
+ it 'responds with a 404' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(response.body).to be_blank
+ end
+ end
+
+ context 'while signed in as an unrelated user' do
+ before do
+ sign_in(create(:user))
+ end
+
+ it 'responds with a 404' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(response.body).to be_blank
+ end
+ end
+ end
+
+ context 'when pipeline has jobs with coverage reports' do
+ before do
+ allow_any_instance_of(MergeRequest)
+ .to receive(:has_coverage_reports?)
+ .and_return(true)
+ end
+
+ context 'when processing coverage reports is in progress' do
+ let(:report) { { status: :parsing } }
+
+ it 'sends polling interval' do
+ expect(Gitlab::PollingInterval).to receive(:set_header)
+
+ subject
+ end
+
+ it 'returns 204 HTTP status' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+ end
+
+ context 'when processing coverage reports is completed' do
+ let(:report) { { status: :parsed, data: pipeline.coverage_reports } }
+
+ it 'returns coverage reports' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq({ 'files' => {} })
+ end
+ end
+
+ context 'when user created corrupted coverage reports' do
+ let(:report) { { status: :error, status_reason: 'Failed to parse coverage reports' } }
+
+ it 'does not send polling interval' do
+ expect(Gitlab::PollingInterval).not_to receive(:set_header)
+
+ subject
+ end
+
+ it 'returns 400 HTTP status' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response).to eq({ 'status_reason' => 'Failed to parse coverage reports' })
+ end
+ end
+ end
+
+ context 'when pipeline does not have jobs with coverage reports' do
+ let(:report) { double }
+
+ it 'returns no content' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ expect(response.body).to be_empty
+ end
+ end
+ end
+
describe 'GET test_reports' do
let(:merge_request) do
create(:merge_request,
diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb
index b6f18240b9e..446c1c59030 100644
--- a/spec/factories/ci/builds.rb
+++ b/spec/factories/ci/builds.rb
@@ -311,6 +311,12 @@ FactoryBot.define do
end
end
+ trait :coverage_reports do
+ after(:build) do |build|
+ build.job_artifacts << create(:ci_job_artifact, :cobertura, job: build)
+ end
+ end
+
trait :expired do
artifacts_expire_at { 1.minute.ago }
end
@@ -355,6 +361,8 @@ FactoryBot.define do
options { {} }
end
+ # TODO: move Security traits to ee_ci_build
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/210486
trait :dast do
options do
{
@@ -395,6 +403,14 @@ FactoryBot.define do
end
end
+ trait :license_scanning do
+ options do
+ {
+ artifacts: { reports: { license_management: 'gl-license-scanning-report.json' } }
+ }
+ end
+ end
+
trait :non_playable do
status { 'created' }
self.when { 'manual' }
diff --git a/spec/factories/ci/job_artifacts.rb b/spec/factories/ci/job_artifacts.rb
index e0942bf0ac3..8fbf242a607 100644
--- a/spec/factories/ci/job_artifacts.rb
+++ b/spec/factories/ci/job_artifacts.rb
@@ -129,6 +129,36 @@ FactoryBot.define do
end
end
+ trait :cobertura do
+ file_type { :cobertura }
+ file_format { :gzip }
+
+ after(:build) do |artifact, evaluator|
+ artifact.file = fixture_file_upload(
+ Rails.root.join('spec/fixtures/cobertura/coverage.xml.gz'), 'application/x-gzip')
+ end
+ end
+
+ trait :coverage_gocov_xml do
+ file_type { :cobertura }
+ file_format { :gzip }
+
+ after(:build) do |artifact, evaluator|
+ artifact.file = fixture_file_upload(
+ Rails.root.join('spec/fixtures/cobertura/coverage_gocov_xml.xml.gz'), 'application/x-gzip')
+ end
+ end
+
+ trait :coverage_with_corrupted_data do
+ file_type { :cobertura }
+ file_format { :gzip }
+
+ after(:build) do |artifact, evaluator|
+ artifact.file = fixture_file_upload(
+ Rails.root.join('spec/fixtures/cobertura/coverage_with_corrupted_data.xml.gz'), 'application/x-gzip')
+ end
+ end
+
trait :codequality do
file_type { :codequality }
file_format { :raw }
diff --git a/spec/factories/ci/pipelines.rb b/spec/factories/ci/pipelines.rb
index 40b2aa3042e..11686ed5277 100644
--- a/spec/factories/ci/pipelines.rb
+++ b/spec/factories/ci/pipelines.rb
@@ -67,6 +67,14 @@ FactoryBot.define do
end
end
+ trait :with_coverage_reports do
+ status { :success }
+
+ after(:build) do |pipeline, evaluator|
+ pipeline.builds << build(:ci_build, :coverage_reports, pipeline: pipeline, project: pipeline.project)
+ end
+ end
+
trait :with_exposed_artifacts do
status { :success }
diff --git a/spec/factories/merge_requests.rb b/spec/factories/merge_requests.rb
index 2344ffffa65..f717bab5f2a 100644
--- a/spec/factories/merge_requests.rb
+++ b/spec/factories/merge_requests.rb
@@ -121,6 +121,18 @@ FactoryBot.define do
end
end
+ trait :with_coverage_reports do
+ after(:build) do |merge_request|
+ merge_request.head_pipeline = build(
+ :ci_pipeline,
+ :success,
+ :with_coverage_reports,
+ project: merge_request.source_project,
+ ref: merge_request.source_branch,
+ sha: merge_request.diff_head_sha)
+ end
+ end
+
trait :with_exposed_artifacts do
after(:build) do |merge_request|
merge_request.head_pipeline = build(
diff --git a/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb b/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
index c482d783bab..21599164ac3 100644
--- a/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
+++ b/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
@@ -190,7 +190,7 @@ describe 'Merge request > User sees avatars on diff notes', :js do
def find_line(line_code)
line = find("[id='#{line_code}']")
- line = line.find(:xpath, 'preceding-sibling::*[1][self::td]') if line.tag_name == 'td'
+ line = line.find(:xpath, 'preceding-sibling::*[1][self::td]/preceding-sibling::*[1][self::td]') if line.tag_name == 'td'
line
end
end
diff --git a/spec/fixtures/api/schemas/entities/user.json b/spec/fixtures/api/schemas/entities/user.json
index 82d80b75cef..1e0c8885609 100644
--- a/spec/fixtures/api/schemas/entities/user.json
+++ b/spec/fixtures/api/schemas/entities/user.json
@@ -17,7 +17,8 @@
"path": { "type": "string" },
"name": { "type": "string" },
"username": { "type": "string" },
- "status_tooltip_html": { "$ref": "../types/nullable_string.json" }
+ "status_tooltip_html": { "$ref": "../types/nullable_string.json" },
+ "is_gitlab_employee": { "type": "boolean" }
},
"additionalProperties": false
}
diff --git a/spec/fixtures/cobertura/coverage.xml b/spec/fixtures/cobertura/coverage.xml
new file mode 100644
index 00000000000..01e8085b8d8
--- /dev/null
+++ b/spec/fixtures/cobertura/coverage.xml
@@ -0,0 +1,43 @@
+<?xml version='1.0'?>
+<!DOCTYPE coverage SYSTEM "http://cobertura.sourceforge.net/xml/coverage-04.dtd">
+<!-- cobertura example file - generated by simplecov-cobertura - subset of gitlab-org/gitlab - manually modified -->
+<!-- Generated by simplecov-cobertura version 1.3.1 (https://github.com/dashingrocket/simplecov-cobertura) -->
+<coverage line-rate="0.5" branch-rate="0" lines-covered="73865" lines-valid="147397" branches-covered="0" branches-valid="0" complexity="0" version="0" timestamp="1577128350">
+ <sources>
+ <source>/tmp/projects/gitlab-ce/gitlab</source>
+ </sources>
+ <packages>
+ <package name="Controllers" line-rate="0.43" branch-rate="0" complexity="0">
+ <classes>
+ <class name="abuse_reports_controller" filename="app/controllers/abuse_reports_controller.rb" line-rate="0.3" branch-rate="0" complexity="0">
+ <methods/>
+ <lines>
+ <line number="3" branch="false" hits="1"/>
+ <line number="4" branch="false" hits="1"/>
+ <line number="6" branch="false" hits="1"/>
+ <line number="7" branch="false" hits="0"/>
+ <line number="8" branch="false" hits="0"/>
+ <line number="9" branch="false" hits="0"/>
+ <line number="12" branch="false" hits="1"/>
+ <line number="13" branch="false" hits="0"/>
+ <line number="14" branch="false" hits="0"/>
+ <line number="16" branch="false" hits="0"/>
+ <line number="17" branch="false" hits="0"/>
+ <line number="19" branch="false" hits="0"/>
+ <line number="20" branch="false" hits="0"/>
+ <line number="22" branch="false" hits="0"/>
+ <line number="26" branch="false" hits="1"/>
+ <line number="28" branch="false" hits="1"/>
+ <line number="29" branch="false" hits="0"/>
+ <line number="36" branch="false" hits="1"/>
+ <line number="37" branch="false" hits="0"/>
+ <line number="39" branch="false" hits="0"/>
+ <line number="40" branch="false" hits="0"/>
+ <line number="41" branch="false" hits="0"/>
+ <line number="42" branch="false" hits="0"/>
+ </lines>
+ </class>
+ </classes>
+ </package>
+ </packages>
+</coverage> \ No newline at end of file
diff --git a/spec/fixtures/cobertura/coverage.xml.gz b/spec/fixtures/cobertura/coverage.xml.gz
new file mode 100644
index 00000000000..1a5a5f02ced
--- /dev/null
+++ b/spec/fixtures/cobertura/coverage.xml.gz
Binary files differ
diff --git a/spec/fixtures/cobertura/coverage_gocov_xml.xml b/spec/fixtures/cobertura/coverage_gocov_xml.xml
new file mode 100644
index 00000000000..c4da14efb40
--- /dev/null
+++ b/spec/fixtures/cobertura/coverage_gocov_xml.xml
@@ -0,0 +1,216 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE coverage SYSTEM "http://cobertura.sourceforge.net/xml/coverage-04.dtd">
+<!-- cobertura example file - generated by gocov-xml - subset of gitlab-org/gitaly -->
+<coverage line-rate="0.7966102" branch-rate="0" lines-covered="47" lines-valid="59" branches-covered="0" branches-valid="0" complexity="0" version="" timestamp="1577127162320">
+ <packages>
+ <package name="gitlab.com/gitlab-org/gitaly/auth" line-rate="0.7966102" branch-rate="0" complexity="0" line-count="59" line-hits="47">
+ <classes>
+ <class name="-" filename="auth/rpccredentials.go" line-rate="0.2" branch-rate="0" complexity="0" line-count="5" line-hits="1">
+ <methods>
+ <method name="RPCCredentials" signature="" line-rate="1" branch-rate="0" complexity="0" line-count="0" line-hits="0">
+ <lines>
+ <line number="17" hits="1"></line>
+ </lines>
+ </method>
+ <method name="RPCCredentialsV2" signature="" line-rate="0" branch-rate="0" complexity="0" line-count="0" line-hits="0">
+ <lines>
+ <line number="34" hits="0"></line>
+ </lines>
+ </method>
+ <method name="hmacToken" signature="" line-rate="0" branch-rate="0" complexity="0" line-count="0" line-hits="0">
+ <lines>
+ <line number="52" hits="0"></line>
+ <line number="53" hits="0"></line>
+ <line number="55" hits="0"></line>
+ </lines>
+ </method>
+ </methods>
+ <lines>
+ <line number="17" hits="1"></line>
+ <line number="34" hits="0"></line>
+ <line number="52" hits="0"></line>
+ <line number="53" hits="0"></line>
+ <line number="55" hits="0"></line>
+ </lines>
+ </class>
+ <class name="rpcCredentials" filename="auth/rpccredentials.go" line-rate="0.5" branch-rate="0" complexity="0" line-count="2" line-hits="1">
+ <methods>
+ <method name="RequireTransportSecurity" signature="" line-rate="0" branch-rate="0" complexity="0" line-count="0" line-hits="0">
+ <lines>
+ <line number="24" hits="0"></line>
+ </lines>
+ </method>
+ <method name="GetRequestMetadata" signature="" line-rate="1" branch-rate="0" complexity="0" line-count="0" line-hits="0">
+ <lines>
+ <line number="27" hits="1"></line>
+ </lines>
+ </method>
+ </methods>
+ <lines>
+ <line number="24" hits="0"></line>
+ <line number="27" hits="1"></line>
+ </lines>
+ </class>
+ <class name="rpcCredentialsV2" filename="auth/rpccredentials.go" line-rate="0" branch-rate="0" complexity="0" line-count="3" line-hits="0">
+ <methods>
+ <method name="RequireTransportSecurity" signature="" line-rate="0" branch-rate="0" complexity="0" line-count="0" line-hits="0">
+ <lines>
+ <line number="41" hits="0"></line>
+ </lines>
+ </method>
+ <method name="GetRequestMetadata" signature="" line-rate="0" branch-rate="0" complexity="0" line-count="0" line-hits="0">
+ <lines>
+ <line number="44" hits="0"></line>
+ </lines>
+ </method>
+ <method name="hmacToken" signature="" line-rate="0" branch-rate="0" complexity="0" line-count="0" line-hits="0">
+ <lines>
+ <line number="48" hits="0"></line>
+ </lines>
+ </method>
+ </methods>
+ <lines>
+ <line number="41" hits="0"></line>
+ <line number="44" hits="0"></line>
+ <line number="48" hits="0"></line>
+ </lines>
+ </class>
+ <class name="-" filename="auth/token.go" line-rate="0.9183673" branch-rate="0" complexity="0" line-count="49" line-hits="45">
+ <methods>
+ <method name="init" signature="" line-rate="1" branch-rate="0" complexity="0" line-count="0" line-hits="0">
+ <lines>
+ <line number="38" hits="1"></line>
+ </lines>
+ </method>
+ <method name="CheckToken" signature="" line-rate="0.9285714" branch-rate="0" complexity="0" line-count="0" line-hits="0">
+ <lines>
+ <line number="52" hits="1"></line>
+ <line number="53" hits="0"></line>
+ <line number="56" hits="1"></line>
+ <line number="57" hits="1"></line>
+ <line number="58" hits="1"></line>
+ <line number="61" hits="1"></line>
+ <line number="63" hits="1"></line>
+ <line number="64" hits="1"></line>
+ <line number="65" hits="1"></line>
+ <line number="68" hits="1"></line>
+ <line number="69" hits="1"></line>
+ <line number="72" hits="1"></line>
+ <line number="73" hits="1"></line>
+ <line number="77" hits="1"></line>
+ </lines>
+ </method>
+ <method name="tokensEqual" signature="" line-rate="1" branch-rate="0" complexity="0" line-count="0" line-hits="0">
+ <lines>
+ <line number="81" hits="1"></line>
+ </lines>
+ </method>
+ <method name="ExtractAuthInfo" signature="" line-rate="0.90909094" branch-rate="0" complexity="0" line-count="0" line-hits="0">
+ <lines>
+ <line number="86" hits="1"></line>
+ <line number="88" hits="1"></line>
+ <line number="89" hits="1"></line>
+ <line number="92" hits="1"></line>
+ <line number="96" hits="1"></line>
+ <line number="97" hits="1"></line>
+ <line number="100" hits="1"></line>
+ <line number="101" hits="1"></line>
+ <line number="102" hits="1"></line>
+ <line number="103" hits="0"></line>
+ <line number="106" hits="1"></line>
+ </lines>
+ </method>
+ <method name="countV2Error" signature="" line-rate="1" branch-rate="0" complexity="0" line-count="0" line-hits="0">
+ <lines>
+ <line number="109" hits="1"></line>
+ </lines>
+ </method>
+ <method name="v2HmacInfoValid" signature="" line-rate="0.8888889" branch-rate="0" complexity="0" line-count="0" line-hits="0">
+ <lines>
+ <line number="112" hits="1"></line>
+ <line number="113" hits="1"></line>
+ <line number="114" hits="1"></line>
+ <line number="115" hits="1"></line>
+ <line number="118" hits="1"></line>
+ <line number="119" hits="1"></line>
+ <line number="120" hits="0"></line>
+ <line number="121" hits="0"></line>
+ <line number="124" hits="1"></line>
+ <line number="125" hits="1"></line>
+ <line number="126" hits="1"></line>
+ <line number="128" hits="1"></line>
+ <line number="129" hits="1"></line>
+ <line number="130" hits="1"></line>
+ <line number="133" hits="1"></line>
+ <line number="134" hits="1"></line>
+ <line number="135" hits="1"></line>
+ <line number="138" hits="1"></line>
+ </lines>
+ </method>
+ <method name="hmacSign" signature="" line-rate="1" branch-rate="0" complexity="0" line-count="0" line-hits="0">
+ <lines>
+ <line number="142" hits="1"></line>
+ <line number="143" hits="1"></line>
+ <line number="145" hits="1"></line>
+ </lines>
+ </method>
+ </methods>
+ <lines>
+ <line number="38" hits="1"></line>
+ <line number="52" hits="1"></line>
+ <line number="53" hits="0"></line>
+ <line number="56" hits="1"></line>
+ <line number="57" hits="1"></line>
+ <line number="58" hits="1"></line>
+ <line number="61" hits="1"></line>
+ <line number="63" hits="1"></line>
+ <line number="64" hits="1"></line>
+ <line number="65" hits="1"></line>
+ <line number="68" hits="1"></line>
+ <line number="69" hits="1"></line>
+ <line number="72" hits="1"></line>
+ <line number="73" hits="1"></line>
+ <line number="77" hits="1"></line>
+ <line number="81" hits="1"></line>
+ <line number="86" hits="1"></line>
+ <line number="88" hits="1"></line>
+ <line number="89" hits="1"></line>
+ <line number="92" hits="1"></line>
+ <line number="96" hits="1"></line>
+ <line number="97" hits="1"></line>
+ <line number="100" hits="1"></line>
+ <line number="101" hits="1"></line>
+ <line number="102" hits="1"></line>
+ <line number="103" hits="0"></line>
+ <line number="106" hits="1"></line>
+ <line number="109" hits="1"></line>
+ <line number="112" hits="1"></line>
+ <line number="113" hits="1"></line>
+ <line number="114" hits="1"></line>
+ <line number="115" hits="1"></line>
+ <line number="118" hits="1"></line>
+ <line number="119" hits="1"></line>
+ <line number="120" hits="0"></line>
+ <line number="121" hits="0"></line>
+ <line number="124" hits="1"></line>
+ <line number="125" hits="1"></line>
+ <line number="126" hits="1"></line>
+ <line number="128" hits="1"></line>
+ <line number="129" hits="1"></line>
+ <line number="130" hits="1"></line>
+ <line number="133" hits="1"></line>
+ <line number="134" hits="1"></line>
+ <line number="135" hits="1"></line>
+ <line number="138" hits="1"></line>
+ <line number="142" hits="1"></line>
+ <line number="143" hits="1"></line>
+ <line number="145" hits="1"></line>
+ </lines>
+ </class>
+ </classes>
+ </package>
+ </packages>
+ <sources>
+ <source>/tmp/projects/gitlab-ce/gitaly/src/gitlab.com/gitlab-org/gitaly</source>
+ </sources>
+</coverage>
diff --git a/spec/fixtures/cobertura/coverage_gocov_xml.xml.gz b/spec/fixtures/cobertura/coverage_gocov_xml.xml.gz
new file mode 100644
index 00000000000..e51dc50c2ed
--- /dev/null
+++ b/spec/fixtures/cobertura/coverage_gocov_xml.xml.gz
Binary files differ
diff --git a/spec/fixtures/cobertura/coverage_with_corrupted_data.xml b/spec/fixtures/cobertura/coverage_with_corrupted_data.xml
new file mode 100644
index 00000000000..ab0973eba28
--- /dev/null
+++ b/spec/fixtures/cobertura/coverage_with_corrupted_data.xml
@@ -0,0 +1,50 @@
+<?xml version="1.0" ?>
+<!DOCTYPE coverage SYSTEM "http://cobertura.sourceforge.net/xml/coverage-04.dtd">
+<!-- cobertura example file - generated by NYC - manually modified -->
+<coverage lines-valid="22" lines-covered="16" line-rate="0.7273000000000001" branches-valid="4" branches-covered="2" branch-rate="0.5" timestamp="1576756029756" complexity="0" version="0.1">
+ <sources>
+ <source>/tmp/projects/coverage-test</source>
+ </sources>
+ <packages>
+ <package name="coverage-test" line-rate="0.6842" branch-rate="0.5">
+ <classes>
+ <class name="index.js" filename="index.js" line-rate="0.6842" branch-rate="0.5">
+ <methods>
+ <method name="(anonymous_3)" hits="0" signature="()V">
+ <lines>
+ <line number="21" hits="0"/>
+ </lines>
+ </method>
+ </methods>
+ <lines>
+ <line number="21" hits="1" branch="false"/>
+ <line number="22" hits="0" branch="false"/>
+ <line number="25" hits="1" branch="true" condition-coverage="50% (1/2)"/>
+ <line number="26" hits="0" branch="false"/>
+ <line number="27" hits="0" branch="false"/>
+ <line number="28" hits="0" branch="false"/>
+ <line number="29" hits="0" branch="false"/>
+ </lines>
+ </class>
+ </classes>
+ </package>
+ <package name="coverage-test.lib.math" line-rate="1" branch-rate="1">
+ <classes>
+ <class name="add.js" filename="lib/math/add.js" line-rate="1" branch-rate="1">
+ <methods>
+ <method name="(anonymous_0)" hits="1" signature="()V">
+ <lines>
+ <line number="1" hits="1"/>
+ </lines>
+ </method>
+ </methods>
+ <lines>
+ <line null="test" hits="1" branch="false"/>
+ <line number="2" hits="1" branch="false"/>
+ <line number="3" hits="1" branch="false"/>
+ </lines>
+ </class>
+ </classes>
+ </package>
+ </packages>
+</coverage>
diff --git a/spec/fixtures/cobertura/coverage_with_corrupted_data.xml.gz b/spec/fixtures/cobertura/coverage_with_corrupted_data.xml.gz
new file mode 100644
index 00000000000..4d06c42ba0b
--- /dev/null
+++ b/spec/fixtures/cobertura/coverage_with_corrupted_data.xml.gz
Binary files differ
diff --git a/spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js b/spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js
index 25034dcf5ad..34d9ee733c4 100644
--- a/spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js
+++ b/spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js
@@ -13,6 +13,7 @@ localVue.use(Vuex);
describe('EksClusterConfigurationForm', () => {
let store;
let actions;
+ let getters;
let state;
let rolesState;
let regionsState;
@@ -29,8 +30,7 @@ describe('EksClusterConfigurationForm', () => {
let securityGroupsActions;
let vm;
- beforeEach(() => {
- state = eksClusterFormState();
+ const createStore = (config = {}) => {
actions = {
createCluster: jest.fn(),
setClusterName: jest.fn(),
@@ -64,29 +64,44 @@ describe('EksClusterConfigurationForm', () => {
securityGroupsActions = {
fetchItems: jest.fn(),
};
+ state = {
+ ...eksClusterFormState(),
+ ...config.initialState,
+ };
rolesState = {
...clusterDropdownStoreState(),
+ ...config.rolesState,
};
regionsState = {
...clusterDropdownStoreState(),
+ ...config.regionsState,
};
vpcsState = {
...clusterDropdownStoreState(),
+ ...config.vpcsState,
};
subnetsState = {
...clusterDropdownStoreState(),
+ ...config.subnetsState,
};
keyPairsState = {
...clusterDropdownStoreState(),
+ ...config.keyPairsState,
};
securityGroupsState = {
...clusterDropdownStoreState(),
+ ...config.securityGroupsState,
};
instanceTypesState = {
...clusterDropdownStoreState(),
+ ...config.instanceTypesState,
+ };
+ getters = {
+ subnetValid: config?.getters?.subnetValid || (() => false),
};
store = new Vuex.Store({
state,
+ getters,
actions,
modules: {
vpcs: {
@@ -125,9 +140,29 @@ describe('EksClusterConfigurationForm', () => {
},
},
});
- });
+ };
- beforeEach(() => {
+ const createValidStateStore = initialState => {
+ createStore({
+ initialState: {
+ clusterName: 'cluster name',
+ environmentScope: '*',
+ selectedRegion: 'region',
+ selectedRole: 'role',
+ selectedKeyPair: 'key pair',
+ selectedVpc: 'vpc',
+ selectedSubnet: ['subnet 1', 'subnet 2'],
+ selectedSecurityGroup: 'group',
+ selectedInstanceType: 'small-1',
+ ...initialState,
+ },
+ getters: {
+ subnetValid: () => true,
+ },
+ });
+ };
+
+ const buildWrapper = () => {
vm = shallowMount(EksClusterConfigurationForm, {
localVue,
store,
@@ -137,27 +172,17 @@ describe('EksClusterConfigurationForm', () => {
externalLinkIcon: '',
},
});
+ };
+
+ beforeEach(() => {
+ createStore();
+ buildWrapper();
});
afterEach(() => {
vm.destroy();
});
- const setAllConfigurationFields = () => {
- store.replaceState({
- ...state,
- clusterName: 'cluster name',
- environmentScope: '*',
- selectedRegion: 'region',
- selectedRole: 'role',
- selectedKeyPair: 'key pair',
- selectedVpc: 'vpc',
- selectedSubnet: 'subnet',
- selectedSecurityGroup: 'group',
- selectedInstanceType: 'small-1',
- });
- };
-
const findCreateClusterButton = () => vm.find('.js-create-cluster');
const findClusterNameInput = () => vm.find('[id=eks-cluster-name]');
const findEnvironmentScopeInput = () => vm.find('[id=eks-environment-scope]');
@@ -310,12 +335,29 @@ describe('EksClusterConfigurationForm', () => {
expect(findSubnetDropdown().props('items')).toBe(subnetsState.items);
});
- it('sets SubnetDropdown hasErrors to true when loading subnets fails', () => {
- subnetsState.loadingItemsError = new Error();
+ it('displays a validation error in the subnet dropdown when loading subnets fails', () => {
+ createStore({
+ subnetsState: {
+ loadingItemsError: new Error(),
+ },
+ });
+ buildWrapper();
- return Vue.nextTick().then(() => {
- expect(findSubnetDropdown().props('hasErrors')).toEqual(true);
+ expect(findSubnetDropdown().props('hasErrors')).toEqual(true);
+ });
+
+ it('displays a validation error in the subnet dropdown when a single subnet is selected', () => {
+ createStore({
+ initialState: {
+ selectedSubnet: ['subnet 1'],
+ },
});
+ buildWrapper();
+
+ expect(findSubnetDropdown().props('hasErrors')).toEqual(true);
+ expect(findSubnetDropdown().props('errorMessage')).toEqual(
+ 'You should select at least two subnets',
+ );
});
it('disables SecurityGroupDropdown when no vpc is selected', () => {
@@ -386,11 +428,7 @@ describe('EksClusterConfigurationForm', () => {
});
it('cleans selected subnet', () => {
- expect(actions.setSubnet).toHaveBeenCalledWith(
- expect.anything(),
- { subnet: null },
- undefined,
- );
+ expect(actions.setSubnet).toHaveBeenCalledWith(expect.anything(), { subnet: [] }, undefined);
});
it('cleans selected security group', () => {
@@ -464,11 +502,7 @@ describe('EksClusterConfigurationForm', () => {
});
it('cleans selected subnet', () => {
- expect(actions.setSubnet).toHaveBeenCalledWith(
- expect.anything(),
- { subnet: null },
- undefined,
- );
+ expect(actions.setSubnet).toHaveBeenCalledWith(expect.anything(), { subnet: [] }, undefined);
});
it('cleans selected security group', () => {
@@ -573,22 +607,19 @@ describe('EksClusterConfigurationForm', () => {
});
describe('when all cluster configuration fields are set', () => {
- beforeEach(() => {
- setAllConfigurationFields();
- });
-
it('enables create cluster button', () => {
+ createValidStateStore();
+ buildWrapper();
expect(findCreateClusterButton().props('disabled')).toBe(false);
});
});
describe('when at least one cluster configuration field is not set', () => {
beforeEach(() => {
- setAllConfigurationFields();
- store.replaceState({
- ...state,
- clusterName: '',
+ createValidStateStore({
+ clusterName: null,
});
+ buildWrapper();
});
it('disables create cluster button', () => {
@@ -596,13 +627,12 @@ describe('EksClusterConfigurationForm', () => {
});
});
- describe('when isCreatingCluster', () => {
+ describe('when is creating cluster', () => {
beforeEach(() => {
- setAllConfigurationFields();
- store.replaceState({
- ...state,
+ createValidStateStore({
isCreatingCluster: true,
});
+ buildWrapper();
});
it('sets create cluster button as loading', () => {
diff --git a/spec/frontend/create_cluster/eks_cluster/store/getters_spec.js b/spec/frontend/create_cluster/eks_cluster/store/getters_spec.js
new file mode 100644
index 00000000000..7c26aeb9b93
--- /dev/null
+++ b/spec/frontend/create_cluster/eks_cluster/store/getters_spec.js
@@ -0,0 +1,13 @@
+import { subnetValid } from '~/create_cluster/eks_cluster/store/getters';
+
+describe('EKS Cluster Store Getters', () => {
+ describe('subnetValid', () => {
+ it('returns true if there are 2 or more selected subnets', () => {
+ expect(subnetValid({ selectedSubnet: [1, 2] })).toBe(true);
+ });
+
+ it.each([[[], [1]]])('returns false if there are 1 or less selected subnets', subnets => {
+ expect(subnetValid({ selectedSubnet: subnets })).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/diffs/components/app_spec.js b/spec/frontend/diffs/components/app_spec.js
index 15f91871437..78e3ff4a60c 100644
--- a/spec/frontend/diffs/components/app_spec.js
+++ b/spec/frontend/diffs/components/app_spec.js
@@ -41,6 +41,7 @@ describe('diffs/components/app', () => {
endpoint: TEST_ENDPOINT,
endpointMetadata: `${TEST_HOST}/diff/endpointMetadata`,
endpointBatch: `${TEST_HOST}/diff/endpointBatch`,
+ endpointCoverage: `${TEST_HOST}/diff/endpointCoverage`,
projectPath: 'namespace/project',
currentUser: {},
changesEmptyStateIllustration: '',
@@ -95,6 +96,7 @@ describe('diffs/components/app', () => {
jest.spyOn(wrapper.vm, 'fetchDiffFiles').mockImplementation(fetchResolver);
jest.spyOn(wrapper.vm, 'fetchDiffFilesMeta').mockImplementation(fetchResolver);
jest.spyOn(wrapper.vm, 'fetchDiffFilesBatch').mockImplementation(fetchResolver);
+ jest.spyOn(wrapper.vm, 'fetchCoverageFiles').mockImplementation(fetchResolver);
jest.spyOn(wrapper.vm, 'setDiscussions').mockImplementation(() => {});
jest.spyOn(wrapper.vm, 'startRenderDiffsQueue').mockImplementation(() => {});
jest.spyOn(wrapper.vm, 'unwatchDiscussions').mockImplementation(() => {});
@@ -250,6 +252,7 @@ describe('diffs/components/app', () => {
expect(wrapper.vm.startRenderDiffsQueue).toHaveBeenCalled();
expect(wrapper.vm.fetchDiffFilesMeta).not.toHaveBeenCalled();
expect(wrapper.vm.fetchDiffFilesBatch).not.toHaveBeenCalled();
+ expect(wrapper.vm.fetchCoverageFiles).toHaveBeenCalled();
expect(wrapper.vm.unwatchDiscussions).toHaveBeenCalled();
expect(wrapper.vm.diffFilesLength).toEqual(100);
expect(wrapper.vm.unwatchRetrievingBatches).toHaveBeenCalled();
@@ -269,6 +272,7 @@ describe('diffs/components/app', () => {
expect(wrapper.vm.startRenderDiffsQueue).toHaveBeenCalled();
expect(wrapper.vm.fetchDiffFilesMeta).toHaveBeenCalled();
expect(wrapper.vm.fetchDiffFilesBatch).toHaveBeenCalled();
+ expect(wrapper.vm.fetchCoverageFiles).toHaveBeenCalled();
expect(wrapper.vm.unwatchDiscussions).toHaveBeenCalled();
expect(wrapper.vm.diffFilesLength).toEqual(100);
expect(wrapper.vm.unwatchRetrievingBatches).toHaveBeenCalled();
@@ -286,6 +290,7 @@ describe('diffs/components/app', () => {
expect(wrapper.vm.startRenderDiffsQueue).toHaveBeenCalled();
expect(wrapper.vm.fetchDiffFilesMeta).toHaveBeenCalled();
expect(wrapper.vm.fetchDiffFilesBatch).toHaveBeenCalled();
+ expect(wrapper.vm.fetchCoverageFiles).toHaveBeenCalled();
expect(wrapper.vm.unwatchDiscussions).toHaveBeenCalled();
expect(wrapper.vm.diffFilesLength).toEqual(100);
expect(wrapper.vm.unwatchRetrievingBatches).toHaveBeenCalled();
diff --git a/spec/javascripts/diffs/components/inline_diff_table_row_spec.js b/spec/javascripts/diffs/components/inline_diff_table_row_spec.js
index 67443e9aecc..392893eb695 100644
--- a/spec/javascripts/diffs/components/inline_diff_table_row_spec.js
+++ b/spec/javascripts/diffs/components/inline_diff_table_row_spec.js
@@ -12,6 +12,7 @@ describe('InlineDiffTableRow', () => {
vm = createComponentWithStore(Vue.extend(InlineDiffTableRow), createStore(), {
line: thisLine,
fileHash: diffFileMockData.file_hash,
+ filePath: diffFileMockData.file_path,
contextLinesPath: 'contextLinesPath',
isHighlighted: false,
}).$mount();
@@ -39,4 +40,64 @@ describe('InlineDiffTableRow', () => {
.then(done)
.catch(done.fail);
});
+
+ describe('sets coverage title and class', () => {
+ it('for lines with coverage', done => {
+ vm.$nextTick()
+ .then(() => {
+ const name = diffFileMockData.file_path;
+ const line = thisLine.new_line;
+
+ vm.$store.state.diffs.coverageFiles = { files: { [name]: { [line]: 5 } } };
+
+ return vm.$nextTick();
+ })
+ .then(() => {
+ const coverage = vm.$el.querySelector('.line-coverage');
+
+ expect(coverage.title).toContain('Test coverage: 5 hits');
+ expect(coverage.classList).toContain('coverage');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('for lines without coverage', done => {
+ vm.$nextTick()
+ .then(() => {
+ const name = diffFileMockData.file_path;
+ const line = thisLine.new_line;
+
+ vm.$store.state.diffs.coverageFiles = { files: { [name]: { [line]: 0 } } };
+
+ return vm.$nextTick();
+ })
+ .then(() => {
+ const coverage = vm.$el.querySelector('.line-coverage');
+
+ expect(coverage.title).toContain('No test coverage');
+ expect(coverage.classList).toContain('no-coverage');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('for unknown lines', done => {
+ vm.$nextTick()
+ .then(() => {
+ vm.$store.state.diffs.coverageFiles = {};
+
+ return vm.$nextTick();
+ })
+ .then(() => {
+ const coverage = vm.$el.querySelector('.line-coverage');
+
+ expect(coverage.title).not.toContain('Coverage');
+ expect(coverage.classList).not.toContain('coverage');
+ expect(coverage.classList).not.toContain('no-coverage');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
});
diff --git a/spec/javascripts/diffs/components/parallel_diff_table_row_spec.js b/spec/javascripts/diffs/components/parallel_diff_table_row_spec.js
index 32c947bbd8e..4e69382ba03 100644
--- a/spec/javascripts/diffs/components/parallel_diff_table_row_spec.js
+++ b/spec/javascripts/diffs/components/parallel_diff_table_row_spec.js
@@ -14,6 +14,7 @@ describe('ParallelDiffTableRow', () => {
vm = createComponentWithStore(Vue.extend(ParallelDiffTableRow), createStore(), {
line: thisLine,
fileHash: diffFileMockData.file_hash,
+ filePath: diffFileMockData.file_path,
contextLinesPath: 'contextLinesPath',
isHighlighted: false,
}).$mount();
@@ -52,6 +53,7 @@ describe('ParallelDiffTableRow', () => {
vm = createComponentWithStore(Vue.extend(ParallelDiffTableRow), createStore(), {
line: thisLine,
fileHash: diffFileMockData.file_hash,
+ filePath: diffFileMockData.file_path,
contextLinesPath: 'contextLinesPath',
isHighlighted: false,
}).$mount();
@@ -81,5 +83,65 @@ describe('ParallelDiffTableRow', () => {
.then(done)
.catch(done.fail);
});
+
+ describe('sets coverage title and class', () => {
+ it('for lines with coverage', done => {
+ vm.$nextTick()
+ .then(() => {
+ const name = diffFileMockData.file_path;
+ const line = rightLine.new_line;
+
+ vm.$store.state.diffs.coverageFiles = { files: { [name]: { [line]: 5 } } };
+
+ return vm.$nextTick();
+ })
+ .then(() => {
+ const coverage = vm.$el.querySelector('.line-coverage.right-side');
+
+ expect(coverage.title).toContain('Test coverage: 5 hits');
+ expect(coverage.classList).toContain('coverage');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('for lines without coverage', done => {
+ vm.$nextTick()
+ .then(() => {
+ const name = diffFileMockData.file_path;
+ const line = rightLine.new_line;
+
+ vm.$store.state.diffs.coverageFiles = { files: { [name]: { [line]: 0 } } };
+
+ return vm.$nextTick();
+ })
+ .then(() => {
+ const coverage = vm.$el.querySelector('.line-coverage.right-side');
+
+ expect(coverage.title).toContain('No test coverage');
+ expect(coverage.classList).toContain('no-coverage');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('for unknown lines', done => {
+ vm.$nextTick()
+ .then(() => {
+ vm.$store.state.diffs.coverageFiles = {};
+
+ return vm.$nextTick();
+ })
+ .then(() => {
+ const coverage = vm.$el.querySelector('.line-coverage.right-side');
+
+ expect(coverage.title).not.toContain('Coverage');
+ expect(coverage.classList).not.toContain('coverage');
+ expect(coverage.classList).not.toContain('no-coverage');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
});
});
diff --git a/spec/javascripts/diffs/store/actions_spec.js b/spec/javascripts/diffs/store/actions_spec.js
index ff17d8ec158..7363a213847 100644
--- a/spec/javascripts/diffs/store/actions_spec.js
+++ b/spec/javascripts/diffs/store/actions_spec.js
@@ -12,6 +12,7 @@ import actions, {
fetchDiffFiles,
fetchDiffFilesBatch,
fetchDiffFilesMeta,
+ fetchCoverageFiles,
assignDiscussionsToDiff,
removeDiscussionsFromDiff,
startRenderDiffsQueue,
@@ -73,6 +74,7 @@ describe('DiffsStoreActions', () => {
const endpoint = '/diffs/set/endpoint';
const endpointMetadata = '/diffs/set/endpoint/metadata';
const endpointBatch = '/diffs/set/endpoint/batch';
+ const endpointCoverage = '/diffs/set/coverage_reports';
const projectPath = '/root/project';
const dismissEndpoint = '/-/user_callouts';
const showSuggestPopover = false;
@@ -84,6 +86,7 @@ describe('DiffsStoreActions', () => {
endpoint,
endpointBatch,
endpointMetadata,
+ endpointCoverage,
projectPath,
dismissEndpoint,
showSuggestPopover,
@@ -93,6 +96,7 @@ describe('DiffsStoreActions', () => {
endpoint: '',
endpointBatch: '',
endpointMetadata: '',
+ endpointCoverage: '',
projectPath: '',
dismissEndpoint: '',
showSuggestPopover: true,
@@ -105,6 +109,7 @@ describe('DiffsStoreActions', () => {
endpoint,
endpointMetadata,
endpointBatch,
+ endpointCoverage,
projectPath,
dismissEndpoint,
showSuggestPopover,
@@ -318,6 +323,44 @@ describe('DiffsStoreActions', () => {
});
});
+ describe('fetchCoverageFiles', () => {
+ let mock;
+ const endpointCoverage = '/fetch';
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => mock.restore());
+
+ it('should commit SET_COVERAGE_DATA with received response', done => {
+ const data = { files: { 'app.js': { '1': 0, '2': 1 } } };
+
+ mock.onGet(endpointCoverage).reply(200, { data });
+
+ testAction(
+ fetchCoverageFiles,
+ {},
+ { endpointCoverage },
+ [{ type: types.SET_COVERAGE_DATA, payload: { data } }],
+ [],
+ done,
+ );
+ });
+
+ it('should show flash on API error', done => {
+ const flashSpy = spyOnDependency(actions, 'createFlash');
+
+ mock.onGet(endpointCoverage).reply(400);
+
+ testAction(fetchCoverageFiles, {}, { endpointCoverage }, [], [], () => {
+ expect(flashSpy).toHaveBeenCalledTimes(1);
+ expect(flashSpy).toHaveBeenCalledWith(jasmine.stringMatching('Something went wrong'));
+ done();
+ });
+ });
+ });
+
describe('setHighlightedRow', () => {
it('should mark currently selected diff and set lineHash and fileHash of highlightedRow', () => {
testAction(setHighlightedRow, 'ABC_123', {}, [
diff --git a/spec/javascripts/diffs/store/getters_spec.js b/spec/javascripts/diffs/store/getters_spec.js
index 9e628fdd540..ca47f51cb15 100644
--- a/spec/javascripts/diffs/store/getters_spec.js
+++ b/spec/javascripts/diffs/store/getters_spec.js
@@ -282,4 +282,34 @@ describe('Diffs Module Getters', () => {
expect(getters.currentDiffIndex(localState)).toEqual(0);
});
});
+
+ describe('fileLineCoverage', () => {
+ beforeEach(() => {
+ Object.assign(localState.coverageFiles, { files: { 'app.js': { '1': 0, '2': 5 } } });
+ });
+
+ it('returns empty object when no coverage data is available', () => {
+ Object.assign(localState.coverageFiles, {});
+
+ expect(getters.fileLineCoverage(localState)('test.js', 2)).toEqual({});
+ });
+
+ it('returns empty object when unknown filename is passed', () => {
+ expect(getters.fileLineCoverage(localState)('test.js', 2)).toEqual({});
+ });
+
+ it('returns no-coverage info when correct filename and line is passed', () => {
+ expect(getters.fileLineCoverage(localState)('app.js', 1)).toEqual({
+ text: 'No test coverage',
+ class: 'no-coverage',
+ });
+ });
+
+ it('returns coverage info when correct filename and line is passed', () => {
+ expect(getters.fileLineCoverage(localState)('app.js', 2)).toEqual({
+ text: 'Test coverage: 5 hits',
+ class: 'coverage',
+ });
+ });
+ });
});
diff --git a/spec/javascripts/diffs/store/mutations_spec.js b/spec/javascripts/diffs/store/mutations_spec.js
index ffe5d89e615..c36aff39aa9 100644
--- a/spec/javascripts/diffs/store/mutations_spec.js
+++ b/spec/javascripts/diffs/store/mutations_spec.js
@@ -123,6 +123,17 @@ describe('DiffsStoreMutations', () => {
});
});
+ describe('SET_COVERAGE_DATA', () => {
+ it('should set coverage data properly', () => {
+ const state = { coverageFiles: {} };
+ const coverage = { 'app.js': { '1': 0, '2': 1 } };
+
+ mutations[types.SET_COVERAGE_DATA](state, coverage);
+
+ expect(state.coverageFiles).toEqual(coverage);
+ });
+ });
+
describe('SET_DIFF_VIEW_TYPE', () => {
it('should set diff view type properly', () => {
const state = {};
diff --git a/spec/lib/gitlab/ci/config/entry/reports_spec.rb b/spec/lib/gitlab/ci/config/entry/reports_spec.rb
index 31e1aaa42bf..2c8f76c8f34 100644
--- a/spec/lib/gitlab/ci/config/entry/reports_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/reports_spec.rb
@@ -45,6 +45,7 @@ describe Gitlab::Ci::Config::Entry::Reports do
:performance | 'performance.json'
:lsif | 'lsif.json'
:dotenv | 'build.dotenv'
+ :cobertura | 'cobertura-coverage.xml'
end
with_them do
diff --git a/spec/lib/gitlab/ci/parsers/coverage/cobertura_spec.rb b/spec/lib/gitlab/ci/parsers/coverage/cobertura_spec.rb
new file mode 100644
index 00000000000..e97544683db
--- /dev/null
+++ b/spec/lib/gitlab/ci/parsers/coverage/cobertura_spec.rb
@@ -0,0 +1,176 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+describe Gitlab::Ci::Parsers::Coverage::Cobertura do
+ describe '#parse!' do
+ subject { described_class.new.parse!(cobertura, coverage_report) }
+
+ let(:coverage_report) { Gitlab::Ci::Reports::CoverageReports.new }
+
+ context 'when data is Cobertura style XML' do
+ context 'when there is no <class>' do
+ let(:cobertura) { '' }
+
+ it 'parses XML and returns empty coverage' do
+ expect { subject }.not_to raise_error
+
+ expect(coverage_report.files).to eq({})
+ end
+ end
+
+ context 'when there is a single <class>' do
+ context 'with no lines' do
+ let(:cobertura) do
+ <<-EOF.strip_heredoc
+ <classes><class filename="app.rb"></class></classes>
+ EOF
+ end
+
+ it 'parses XML and returns empty coverage' do
+ expect { subject }.not_to raise_error
+
+ expect(coverage_report.files).to eq({})
+ end
+ end
+
+ context 'with a single line' do
+ let(:cobertura) do
+ <<-EOF.strip_heredoc
+ <classes>
+ <class filename="app.rb"><lines>
+ <line number="1" hits="2"/>
+ </lines></class>
+ </classes>
+ EOF
+ end
+
+ it 'parses XML and returns a single file with coverage' do
+ expect { subject }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2 } })
+ end
+ end
+
+ context 'with multipe lines and methods info' do
+ let(:cobertura) do
+ <<-EOF.strip_heredoc
+ <classes>
+ <class filename="app.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ </classes>
+ EOF
+ end
+
+ it 'parses XML and returns a single file with coverage' do
+ expect { subject }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2, 2 => 0 } })
+ end
+ end
+ end
+
+ context 'when there are multipe <class>' do
+ context 'with the same filename and different lines' do
+ let(:cobertura) do
+ <<-EOF.strip_heredoc
+ <classes>
+ <class filename="app.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ <class filename="app.rb"><methods/><lines>
+ <line number="6" hits="1"/>
+ <line number="7" hits="1"/>
+ </lines></class>
+ </classes>
+ EOF
+ end
+
+ it 'parses XML and returns a single file with merged coverage' do
+ expect { subject }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2, 2 => 0, 6 => 1, 7 => 1 } })
+ end
+ end
+
+ context 'with the same filename and lines' do
+ let(:cobertura) do
+ <<-EOF.strip_heredoc
+ <packages><package><classes>
+ <class filename="app.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ <class filename="app.rb"><methods/><lines>
+ <line number="1" hits="1"/>
+ <line number="2" hits="1"/>
+ </lines></class>
+ </classes></package></packages>
+ EOF
+ end
+
+ it 'parses XML and returns a single file with summed-up coverage' do
+ expect { subject }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 3, 2 => 1 } })
+ end
+ end
+
+ context 'with missing filename' do
+ let(:cobertura) do
+ <<-EOF.strip_heredoc
+ <classes>
+ <class filename="app.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ <class><methods/><lines>
+ <line number="6" hits="1"/>
+ <line number="7" hits="1"/>
+ </lines></class>
+ </classes>
+ EOF
+ end
+
+ it 'parses XML and ignores class with missing name' do
+ expect { subject }.not_to raise_error
+
+ expect(coverage_report.files).to eq({ 'app.rb' => { 1 => 2, 2 => 0 } })
+ end
+ end
+
+ context 'with invalid line information' do
+ let(:cobertura) do
+ <<-EOF.strip_heredoc
+ <classes>
+ <class filename="app.rb"><methods/><lines>
+ <line number="1" hits="2"/>
+ <line number="2" hits="0"/>
+ </lines></class>
+ <class filename="app.rb"><methods/><lines>
+ <line null="test" hits="1"/>
+ <line number="7" hits="1"/>
+ </lines></class>
+ </classes>
+ EOF
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(described_class::CoberturaParserError)
+ end
+ end
+ end
+ end
+
+ context 'when data is not Cobertura style XML' do
+ let(:cobertura) { { coverage: '12%' }.to_json }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(described_class::CoberturaParserError)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/parsers_spec.rb b/spec/lib/gitlab/ci/parsers_spec.rb
index 4b647bffe59..9d6896b3cb4 100644
--- a/spec/lib/gitlab/ci/parsers_spec.rb
+++ b/spec/lib/gitlab/ci/parsers_spec.rb
@@ -6,7 +6,7 @@ describe Gitlab::Ci::Parsers do
describe '.fabricate!' do
subject { described_class.fabricate!(file_type) }
- context 'when file_type exists' do
+ context 'when file_type is junit' do
let(:file_type) { 'junit' }
it 'fabricates the class' do
@@ -14,6 +14,14 @@ describe Gitlab::Ci::Parsers do
end
end
+ context 'when file_type is cobertura' do
+ let(:file_type) { 'cobertura' }
+
+ it 'fabricates the class' do
+ is_expected.to be_a(described_class::Coverage::Cobertura)
+ end
+ end
+
context 'when file_type does not exist' do
let(:file_type) { 'undefined' }
diff --git a/spec/lib/gitlab/ci/reports/coverage_reports_spec.rb b/spec/lib/gitlab/ci/reports/coverage_reports_spec.rb
new file mode 100644
index 00000000000..7cf43ceab32
--- /dev/null
+++ b/spec/lib/gitlab/ci/reports/coverage_reports_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Reports::CoverageReports do
+ let(:coverage_report) { described_class.new }
+
+ it { expect(coverage_report.files).to eq({}) }
+
+ describe '#pick' do
+ before do
+ coverage_report.add_file('app.rb', { 1 => 0, 2 => 1 })
+ coverage_report.add_file('routes.rb', { 3 => 1, 4 => 0 })
+ end
+
+ it 'returns only picked files while ignoring nonexistent ones' do
+ expect(coverage_report.pick(['routes.rb', 'nonexistent.txt'])).to eq({
+ files: { 'routes.rb' => { 3 => 1, 4 => 0 } }
+ })
+ end
+ end
+
+ describe '#add_file' do
+ context 'when providing two individual files' do
+ before do
+ coverage_report.add_file('app.rb', { 1 => 0, 2 => 1 })
+ coverage_report.add_file('routes.rb', { 3 => 1, 4 => 0 })
+ end
+
+ it 'initializes a new test suite and returns it' do
+ expect(coverage_report.files).to eq({
+ 'app.rb' => { 1 => 0, 2 => 1 },
+ 'routes.rb' => { 3 => 1, 4 => 0 }
+ })
+ end
+ end
+
+ context 'when providing the same files twice' do
+ context 'with different line coverage' do
+ before do
+ coverage_report.add_file('admin.rb', { 1 => 0, 2 => 1 })
+ coverage_report.add_file('admin.rb', { 3 => 1, 4 => 0 })
+ end
+
+ it 'initializes a new test suite and returns it' do
+ expect(coverage_report.files).to eq({
+ 'admin.rb' => { 1 => 0, 2 => 1, 3 => 1, 4 => 0 }
+ })
+ end
+ end
+
+ context 'with identical line coverage' do
+ before do
+ coverage_report.add_file('projects.rb', { 1 => 0, 2 => 1 })
+ coverage_report.add_file('projects.rb', { 1 => 0, 2 => 1 })
+ end
+
+ it 'initializes a new test suite and returns it' do
+ expect(coverage_report.files).to eq({
+ 'projects.rb' => { 1 => 0, 2 => 2 }
+ })
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/group/tree_saver_spec.rb b/spec/lib/gitlab/import_export/group/tree_saver_spec.rb
index a7440ac24ca..44fd49f0ac3 100644
--- a/spec/lib/gitlab/import_export/group/tree_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/group/tree_saver_spec.rb
@@ -197,6 +197,6 @@ describe Gitlab::ImportExport::Group::TreeSaver do
end
def group_json(filename)
- JSON.parse(IO.read(filename))
+ ::JSON.parse(IO.read(filename))
end
end
diff --git a/spec/lib/gitlab/import_export/json/legacy_writer_spec.rb b/spec/lib/gitlab/import_export/json/legacy_writer_spec.rb
new file mode 100644
index 00000000000..b4cdfee3b22
--- /dev/null
+++ b/spec/lib/gitlab/import_export/json/legacy_writer_spec.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::JSON::LegacyWriter do
+ let(:path) { "#{Dir.tmpdir}/legacy_writer_spec/test.json" }
+
+ subject { described_class.new(path) }
+
+ after do
+ FileUtils.rm_rf(path)
+ end
+
+ describe "#write" do
+ context "when key is already written" do
+ it "raises exception" do
+ key = "key"
+ value = "value"
+ subject.write(key, value)
+
+ expect { subject.write(key, "new value") }.to raise_exception("key '#{key}' already written")
+ end
+ end
+
+ context "when key is not already written" do
+ context "when multiple key value pairs are stored" do
+ it "writes correct json" do
+ expected_hash = { "key" => "value_1", "key_1" => "value_2" }
+ expected_hash.each do |key, value|
+ subject.write(key, value)
+ end
+ subject.close
+
+ expect(saved_json(path)).to eq(expected_hash)
+ end
+ end
+ end
+ end
+
+ describe "#append" do
+ context "when key is already written" do
+ it "appends values under a given key" do
+ key = "key"
+ values = %w(value_1 value_2)
+ expected_hash = { key => values }
+ values.each do |value|
+ subject.append(key, value)
+ end
+ subject.close
+
+ expect(saved_json(path)).to eq(expected_hash)
+ end
+ end
+
+ context "when key is not already written" do
+ it "writes correct json" do
+ expected_hash = { "key" => ["value"] }
+ subject.append("key", "value")
+ subject.close
+
+ expect(saved_json(path)).to eq(expected_hash)
+ end
+ end
+ end
+
+ describe "#set" do
+ it "writes correct json" do
+ expected_hash = { "key" => "value_1", "key_1" => "value_2" }
+ subject.set(expected_hash)
+ subject.close
+
+ expect(saved_json(path)).to eq(expected_hash)
+ end
+ end
+
+ def saved_json(filename)
+ ::JSON.parse(IO.read(filename))
+ end
+end
diff --git a/spec/lib/gitlab/import_export/relation_tree_saver_spec.rb b/spec/lib/gitlab/import_export/legacy_relation_tree_saver_spec.rb
index 2fc26c0e3d4..db77bd338e1 100644
--- a/spec/lib/gitlab/import_export/relation_tree_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/legacy_relation_tree_saver_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::ImportExport::RelationTreeSaver do
+describe Gitlab::ImportExport::LegacyRelationTreeSaver do
let(:exportable) { create(:group) }
let(:relation_tree_saver) { described_class.new }
let(:tree) { {} }
diff --git a/spec/lib/gitlab/import_export/project/legacy_tree_saver_spec.rb b/spec/lib/gitlab/import_export/project/legacy_tree_saver_spec.rb
new file mode 100644
index 00000000000..d4406dbc60b
--- /dev/null
+++ b/spec/lib/gitlab/import_export/project/legacy_tree_saver_spec.rb
@@ -0,0 +1,397 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::Project::LegacyTreeSaver do
+ describe 'saves the project tree into a json object' do
+ let(:shared) { project.import_export_shared }
+ let(:project_tree_saver) { described_class.new(project: project, current_user: user, shared: shared) }
+ let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" }
+ let(:user) { create(:user) }
+ let!(:project) { setup_project }
+
+ before do
+ project.add_maintainer(user)
+ allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
+ allow_any_instance_of(MergeRequest).to receive(:source_branch_sha).and_return('ABCD')
+ allow_any_instance_of(MergeRequest).to receive(:target_branch_sha).and_return('DCBA')
+ end
+
+ after do
+ FileUtils.rm_rf(export_path)
+ end
+
+ it 'saves project successfully' do
+ expect(project_tree_saver.save).to be true
+ end
+
+ context ':export_fast_serialize feature flag checks' do
+ before do
+ expect(Gitlab::ImportExport::Reader).to receive(:new).with(shared: shared).and_return(reader)
+ expect(reader).to receive(:project_tree).and_return(project_tree)
+ end
+
+ let(:serializer) { instance_double('Gitlab::ImportExport::FastHashSerializer') }
+ let(:reader) { instance_double('Gitlab::ImportExport::Reader') }
+ let(:project_tree) do
+ {
+ include: [{ issues: { include: [] } }],
+ preload: { issues: nil }
+ }
+ end
+
+ context 'when :export_fast_serialize feature is enabled' do
+ before do
+ stub_feature_flags(export_fast_serialize: true)
+ end
+
+ it 'uses FastHashSerializer' do
+ expect(Gitlab::ImportExport::FastHashSerializer)
+ .to receive(:new)
+ .with(project, project_tree)
+ .and_return(serializer)
+
+ expect(serializer).to receive(:execute)
+
+ project_tree_saver.save
+ end
+ end
+
+ context 'when :export_fast_serialize feature is disabled' do
+ before do
+ stub_feature_flags(export_fast_serialize: false)
+ end
+
+ it 'is serialized via built-in `as_json`' do
+ expect(project).to receive(:as_json).with(project_tree)
+
+ project_tree_saver.save
+ end
+ end
+ end
+
+ # It is mostly duplicated in
+ # `spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb`
+ # except:
+ # context 'with description override' do
+ # context 'group members' do
+ # ^ These are specific for the Project::TreeSaver
+ context 'JSON' do
+ let(:saved_project_json) do
+ project_tree_saver.save
+ project_json(project_tree_saver.full_path)
+ end
+
+ # It is not duplicated in
+ # `spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb`
+ context 'with description override' do
+ let(:params) { { description: 'Foo Bar' } }
+ let(:project_tree_saver) { described_class.new(project: project, current_user: user, shared: shared, params: params) }
+
+ it 'overrides the project description' do
+ expect(saved_project_json).to include({ 'description' => params[:description] })
+ end
+ end
+
+ it 'saves the correct json' do
+ expect(saved_project_json).to include({ 'description' => 'description', 'visibility_level' => 20 })
+ end
+
+ it 'has approvals_before_merge set' do
+ expect(saved_project_json['approvals_before_merge']).to eq(1)
+ end
+
+ it 'has milestones' do
+ expect(saved_project_json['milestones']).not_to be_empty
+ end
+
+ it 'has merge requests' do
+ expect(saved_project_json['merge_requests']).not_to be_empty
+ end
+
+ it 'has merge request\'s milestones' do
+ expect(saved_project_json['merge_requests'].first['milestone']).not_to be_empty
+ end
+
+ it 'has merge request\'s source branch SHA' do
+ expect(saved_project_json['merge_requests'].first['source_branch_sha']).to eq('ABCD')
+ end
+
+ it 'has merge request\'s target branch SHA' do
+ expect(saved_project_json['merge_requests'].first['target_branch_sha']).to eq('DCBA')
+ end
+
+ it 'has events' do
+ expect(saved_project_json['merge_requests'].first['milestone']['events']).not_to be_empty
+ end
+
+ it 'has snippets' do
+ expect(saved_project_json['snippets']).not_to be_empty
+ end
+
+ it 'has snippet notes' do
+ expect(saved_project_json['snippets'].first['notes']).not_to be_empty
+ end
+
+ it 'has releases' do
+ expect(saved_project_json['releases']).not_to be_empty
+ end
+
+ it 'has no author on releases' do
+ expect(saved_project_json['releases'].first['author']).to be_nil
+ end
+
+ it 'has the author ID on releases' do
+ expect(saved_project_json['releases'].first['author_id']).not_to be_nil
+ end
+
+ it 'has issues' do
+ expect(saved_project_json['issues']).not_to be_empty
+ end
+
+ it 'has issue comments' do
+ notes = saved_project_json['issues'].first['notes']
+
+ expect(notes).not_to be_empty
+ expect(notes.first['type']).to eq('DiscussionNote')
+ end
+
+ it 'has issue assignees' do
+ expect(saved_project_json['issues'].first['issue_assignees']).not_to be_empty
+ end
+
+ it 'has author on issue comments' do
+ expect(saved_project_json['issues'].first['notes'].first['author']).not_to be_empty
+ end
+
+ it 'has project members' do
+ expect(saved_project_json['project_members']).not_to be_empty
+ end
+
+ it 'has merge requests diffs' do
+ expect(saved_project_json['merge_requests'].first['merge_request_diff']).not_to be_empty
+ end
+
+ it 'has merge request diff files' do
+ expect(saved_project_json['merge_requests'].first['merge_request_diff']['merge_request_diff_files']).not_to be_empty
+ end
+
+ it 'has merge request diff commits' do
+ expect(saved_project_json['merge_requests'].first['merge_request_diff']['merge_request_diff_commits']).not_to be_empty
+ end
+
+ it 'has merge requests comments' do
+ expect(saved_project_json['merge_requests'].first['notes']).not_to be_empty
+ end
+
+ it 'has author on merge requests comments' do
+ expect(saved_project_json['merge_requests'].first['notes'].first['author']).not_to be_empty
+ end
+
+ it 'has pipeline stages' do
+ expect(saved_project_json.dig('ci_pipelines', 0, 'stages')).not_to be_empty
+ end
+
+ it 'has pipeline statuses' do
+ expect(saved_project_json.dig('ci_pipelines', 0, 'stages', 0, 'statuses')).not_to be_empty
+ end
+
+ it 'has pipeline builds' do
+ builds_count = saved_project_json
+ .dig('ci_pipelines', 0, 'stages', 0, 'statuses')
+ .count { |hash| hash['type'] == 'Ci::Build' }
+
+ expect(builds_count).to eq(1)
+ end
+
+ it 'has no when YML attributes but only the DB column' do
+ expect_any_instance_of(Gitlab::Ci::YamlProcessor).not_to receive(:build_attributes)
+
+ saved_project_json
+ end
+
+ it 'has pipeline commits' do
+ expect(saved_project_json['ci_pipelines']).not_to be_empty
+ end
+
+ it 'has ci pipeline notes' do
+ expect(saved_project_json['ci_pipelines'].first['notes']).not_to be_empty
+ end
+
+ it 'has labels with no associations' do
+ expect(saved_project_json['labels']).not_to be_empty
+ end
+
+ it 'has labels associated to records' do
+ expect(saved_project_json['issues'].first['label_links'].first['label']).not_to be_empty
+ end
+
+ it 'has project and group labels' do
+ label_types = saved_project_json['issues'].first['label_links'].map { |link| link['label']['type'] }
+
+ expect(label_types).to match_array(%w(ProjectLabel GroupLabel))
+ end
+
+ it 'has priorities associated to labels' do
+ priorities = saved_project_json['issues'].first['label_links'].flat_map { |link| link['label']['priorities'] }
+
+ expect(priorities).not_to be_empty
+ end
+
+ it 'has issue resource label events' do
+ expect(saved_project_json['issues'].first['resource_label_events']).not_to be_empty
+ end
+
+ it 'has merge request resource label events' do
+ expect(saved_project_json['merge_requests'].first['resource_label_events']).not_to be_empty
+ end
+
+ it 'saves the correct service type' do
+ expect(saved_project_json['services'].first['type']).to eq('CustomIssueTrackerService')
+ end
+
+ it 'saves the properties for a service' do
+ expect(saved_project_json['services'].first['properties']).to eq('one' => 'value')
+ end
+
+ it 'has project feature' do
+ project_feature = saved_project_json['project_feature']
+ expect(project_feature).not_to be_empty
+ expect(project_feature["issues_access_level"]).to eq(ProjectFeature::DISABLED)
+ expect(project_feature["wiki_access_level"]).to eq(ProjectFeature::ENABLED)
+ expect(project_feature["builds_access_level"]).to eq(ProjectFeature::PRIVATE)
+ end
+
+ it 'has custom attributes' do
+ expect(saved_project_json['custom_attributes'].count).to eq(2)
+ end
+
+ it 'has badges' do
+ expect(saved_project_json['project_badges'].count).to eq(2)
+ end
+
+ it 'does not complain about non UTF-8 characters in MR diff files' do
+ ActiveRecord::Base.connection.execute("UPDATE merge_request_diff_files SET diff = '---\n- :diff: !binary |-\n LS0tIC9kZXYvbnVsbAorKysgYi9pbWFnZXMvbnVjb3IucGRmCkBAIC0wLDAg\n KzEsMTY3OSBAQAorJVBERi0xLjUNJeLjz9MNCisxIDAgb2JqDTw8L01ldGFk\n YXR'")
+
+ expect(project_tree_saver.save).to be true
+ end
+
+ context 'group members' do
+ let(:user2) { create(:user, email: 'group@member.com') }
+ let(:member_emails) do
+ saved_project_json['project_members'].map do |pm|
+ pm['user']['email']
+ end
+ end
+
+ before do
+ Group.first.add_developer(user2)
+ end
+
+ it 'does not export group members if it has no permission' do
+ Group.first.add_developer(user)
+
+ expect(member_emails).not_to include('group@member.com')
+ end
+
+ it 'does not export group members as maintainer' do
+ Group.first.add_maintainer(user)
+
+ expect(member_emails).not_to include('group@member.com')
+ end
+
+ it 'exports group members as group owner' do
+ Group.first.add_owner(user)
+
+ expect(member_emails).to include('group@member.com')
+ end
+
+ context 'as admin' do
+ let(:user) { create(:admin) }
+
+ it 'exports group members as admin' do
+ expect(member_emails).to include('group@member.com')
+ end
+
+ it 'exports group members as project members' do
+ member_types = saved_project_json['project_members'].map { |pm| pm['source_type'] }
+
+ expect(member_types).to all(eq('Project'))
+ end
+ end
+ end
+
+ context 'project attributes' do
+ it 'does not contain the runners token' do
+ expect(saved_project_json).not_to include("runners_token" => 'token')
+ end
+ end
+
+ it 'has a board and a list' do
+ expect(saved_project_json['boards'].first['lists']).not_to be_empty
+ end
+ end
+ end
+
+ def setup_project
+ release = create(:release)
+ group = create(:group)
+
+ project = create(:project,
+ :public,
+ :repository,
+ :issues_disabled,
+ :wiki_enabled,
+ :builds_private,
+ description: 'description',
+ releases: [release],
+ group: group,
+ approvals_before_merge: 1
+ )
+ allow(project).to receive(:commit).and_return(Commit.new(RepoHelpers.sample_commit, project))
+
+ issue = create(:issue, assignees: [user], project: project)
+ snippet = create(:project_snippet, project: project)
+ project_label = create(:label, project: project)
+ group_label = create(:group_label, group: group)
+ create(:label_link, label: project_label, target: issue)
+ create(:label_link, label: group_label, target: issue)
+ create(:label_priority, label: group_label, priority: 1)
+ milestone = create(:milestone, project: project)
+ merge_request = create(:merge_request, source_project: project, milestone: milestone)
+
+ ci_build = create(:ci_build, project: project, when: nil)
+ ci_build.pipeline.update(project: project)
+ create(:commit_status, project: project, pipeline: ci_build.pipeline)
+
+ create(:milestone, project: project)
+ create(:discussion_note, noteable: issue, project: project)
+ create(:note, noteable: merge_request, project: project)
+ create(:note, noteable: snippet, project: project)
+ create(:note_on_commit,
+ author: user,
+ project: project,
+ commit_id: ci_build.pipeline.sha)
+
+ create(:resource_label_event, label: project_label, issue: issue)
+ create(:resource_label_event, label: group_label, merge_request: merge_request)
+
+ create(:event, :created, target: milestone, project: project, author: user)
+ create(:service, project: project, type: 'CustomIssueTrackerService', category: 'issue_tracker', properties: { one: 'value' })
+
+ create(:project_custom_attribute, project: project)
+ create(:project_custom_attribute, project: project)
+
+ create(:project_badge, project: project)
+ create(:project_badge, project: project)
+
+ board = create(:board, project: project, name: 'TestBoard')
+ create(:list, board: board, position: 0, label: project_label)
+
+ project
+ end
+
+ def project_json(filename)
+ ::JSON.parse(IO.read(filename))
+ end
+end
diff --git a/spec/lib/gitlab/import_export/project/tree_saver_spec.rb b/spec/lib/gitlab/import_export/project/tree_saver_spec.rb
index 151fdf8810f..23360b725b9 100644
--- a/spec/lib/gitlab/import_export/project/tree_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/project/tree_saver_spec.rb
@@ -25,57 +25,6 @@ describe Gitlab::ImportExport::Project::TreeSaver do
expect(project_tree_saver.save).to be true
end
- context ':export_fast_serialize feature flag checks' do
- before do
- expect(Gitlab::ImportExport::Reader).to receive(:new).with(shared: shared).and_return(reader)
- expect(reader).to receive(:project_tree).and_return(project_tree)
- end
-
- let(:serializer) { instance_double('Gitlab::ImportExport::FastHashSerializer') }
- let(:reader) { instance_double('Gitlab::ImportExport::Reader') }
- let(:project_tree) do
- {
- include: [{ issues: { include: [] } }],
- preload: { issues: nil }
- }
- end
-
- context 'when :export_fast_serialize feature is enabled' do
- before do
- stub_feature_flags(export_fast_serialize: true)
- end
-
- it 'uses FastHashSerializer' do
- expect(Gitlab::ImportExport::FastHashSerializer)
- .to receive(:new)
- .with(project, project_tree)
- .and_return(serializer)
-
- expect(serializer).to receive(:execute)
-
- project_tree_saver.save
- end
- end
-
- context 'when :export_fast_serialize feature is disabled' do
- before do
- stub_feature_flags(export_fast_serialize: false)
- end
-
- it 'is serialized via built-in `as_json`' do
- expect(project).to receive(:as_json).with(project_tree)
-
- project_tree_saver.save
- end
- end
- end
-
- # It is mostly duplicated in
- # `spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb`
- # except:
- # context 'with description override' do
- # context 'group members' do
- # ^ These are specific for the Project::TreeSaver
context 'JSON' do
let(:saved_project_json) do
project_tree_saver.save
@@ -392,6 +341,6 @@ describe Gitlab::ImportExport::Project::TreeSaver do
end
def project_json(filename)
- JSON.parse(IO.read(filename))
+ ::JSON.parse(IO.read(filename))
end
end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 6c77b16f908..a661aa6e3a9 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -3946,6 +3946,53 @@ describe Ci::Build do
end
end
+ describe '#collect_coverage_reports!' do
+ subject { build.collect_coverage_reports!(coverage_report) }
+
+ let(:coverage_report) { Gitlab::Ci::Reports::CoverageReports.new }
+
+ it { expect(coverage_report.files).to eq({}) }
+
+ context 'when build has a coverage report' do
+ context 'when there is a Cobertura coverage report from simplecov-cobertura' do
+ before do
+ create(:ci_job_artifact, :cobertura, job: build, project: build.project)
+ end
+
+ it 'parses blobs and add the results to the coverage report' do
+ expect { subject }.not_to raise_error
+
+ expect(coverage_report.files.keys).to match_array(['app/controllers/abuse_reports_controller.rb'])
+ expect(coverage_report.files['app/controllers/abuse_reports_controller.rb'].count).to eq(23)
+ end
+ end
+
+ context 'when there is a Cobertura coverage report from gocov-xml' do
+ before do
+ create(:ci_job_artifact, :coverage_gocov_xml, job: build, project: build.project)
+ end
+
+ it 'parses blobs and add the results to the coverage report' do
+ expect { subject }.not_to raise_error
+
+ expect(coverage_report.files.keys).to match_array(['auth/token.go', 'auth/rpccredentials.go'])
+ expect(coverage_report.files['auth/token.go'].count).to eq(49)
+ expect(coverage_report.files['auth/rpccredentials.go'].count).to eq(10)
+ end
+ end
+
+ context 'when there is a corrupted Cobertura coverage report' do
+ before do
+ create(:ci_job_artifact, :coverage_with_corrupted_data, job: build, project: build.project)
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(Gitlab::Ci::Parsers::Coverage::Cobertura::CoberturaParserError)
+ end
+ end
+ end
+ end
+
describe '#report_artifacts' do
subject { build.report_artifacts }
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index 0a7a44b225c..de93c3c1675 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -70,6 +70,22 @@ describe Ci::JobArtifact do
end
end
+ describe '.coverage_reports' do
+ subject { described_class.coverage_reports }
+
+ context 'when there is a coverage report' do
+ let!(:artifact) { create(:ci_job_artifact, :cobertura) }
+
+ it { is_expected.to eq([artifact]) }
+ end
+
+ context 'when there are no coverage reports' do
+ let!(:artifact) { create(:ci_job_artifact, :archive) }
+
+ it { is_expected.to be_empty }
+ end
+ end
+
describe '.erasable' do
subject { described_class.erasable }
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 51a2e2aff67..f18c77988c8 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -344,9 +344,9 @@ describe Ci::Pipeline, :mailer do
end
describe '.with_reports' do
- subject { described_class.with_reports(Ci::JobArtifact.test_reports) }
-
context 'when pipeline has a test report' do
+ subject { described_class.with_reports(Ci::JobArtifact.test_reports) }
+
let!(:pipeline_with_report) { create(:ci_pipeline, :with_test_reports) }
it 'selects the pipeline' do
@@ -354,7 +354,19 @@ describe Ci::Pipeline, :mailer do
end
end
+ context 'when pipeline has a coverage report' do
+ subject { described_class.with_reports(Ci::JobArtifact.coverage_reports) }
+
+ let!(:pipeline_with_report) { create(:ci_pipeline, :with_coverage_reports) }
+
+ it 'selects the pipeline' do
+ is_expected.to eq([pipeline_with_report])
+ end
+ end
+
context 'when pipeline does not have metrics reports' do
+ subject { described_class.with_reports(Ci::JobArtifact.test_reports) }
+
let!(:pipeline_without_report) { create(:ci_empty_pipeline) }
it 'does not select the pipeline' do
@@ -2730,6 +2742,43 @@ describe Ci::Pipeline, :mailer do
end
end
+ describe '#coverage_reports' do
+ subject { pipeline.coverage_reports }
+
+ context 'when pipeline has multiple builds with coverage reports' do
+ let!(:build_rspec) { create(:ci_build, :success, name: 'rspec', pipeline: pipeline, project: project) }
+ let!(:build_golang) { create(:ci_build, :success, name: 'golang', pipeline: pipeline, project: project) }
+
+ before do
+ create(:ci_job_artifact, :cobertura, job: build_rspec, project: project)
+ create(:ci_job_artifact, :coverage_gocov_xml, job: build_golang, project: project)
+ end
+
+ it 'returns coverage reports with collected data' do
+ expect(subject.files.keys).to match_array([
+ "auth/token.go",
+ "auth/rpccredentials.go",
+ "app/controllers/abuse_reports_controller.rb"
+ ])
+ end
+
+ context 'when builds are retried' do
+ let!(:build_rspec) { create(:ci_build, :retried, :success, name: 'rspec', pipeline: pipeline, project: project) }
+ let!(:build_golang) { create(:ci_build, :retried, :success, name: 'golang', pipeline: pipeline, project: project) }
+
+ it 'does not take retried builds into account' do
+ expect(subject.files).to eql({})
+ end
+ end
+ end
+
+ context 'when pipeline does not have any builds with coverage reports' do
+ it 'returns empty coverage reports' do
+ expect(subject.files).to eql({})
+ end
+ end
+ end
+
describe '#total_size' do
let!(:build_job1) { create(:ci_build, pipeline: pipeline, stage_idx: 0) }
let!(:build_job2) { create(:ci_build, pipeline: pipeline, stage_idx: 0) }
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 7cadce12213..137795dcbc3 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -908,6 +908,16 @@ describe MergeRequest do
end
end
+ describe '#new_paths' do
+ let(:merge_request) do
+ create(:merge_request, source_branch: 'expand-collapse-files', target_branch: 'master')
+ end
+
+ it 'returns new path of changed files' do
+ expect(merge_request.new_paths.count).to eq(105)
+ end
+ end
+
describe "#related_notes" do
let!(:merge_request) { create(:merge_request) }
@@ -1581,6 +1591,24 @@ describe MergeRequest do
end
end
+ describe '#has_coverage_reports?' do
+ subject { merge_request.has_coverage_reports? }
+
+ let(:project) { create(:project, :repository) }
+
+ context 'when head pipeline has coverage reports' do
+ let(:merge_request) { create(:merge_request, :with_coverage_reports, source_project: project) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when head pipeline does not have coverage reports' do
+ let(:merge_request) { create(:merge_request, source_project: project) }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
describe '#calculate_reactive_cache' do
let(:project) { create(:project, :repository) }
let(:merge_request) { create(:merge_request, source_project: project) }
@@ -1663,6 +1691,60 @@ describe MergeRequest do
end
end
+ describe '#find_coverage_reports' do
+ let(:project) { create(:project, :repository) }
+ let(:merge_request) { create(:merge_request, :with_coverage_reports, source_project: project) }
+ let(:pipeline) { merge_request.head_pipeline }
+
+ subject { merge_request.find_coverage_reports }
+
+ context 'when head pipeline has coverage reports' do
+ let!(:job) do
+ create(:ci_build, options: { artifacts: { reports: { cobertura: ['cobertura-coverage.xml'] } } }, pipeline: pipeline)
+ end
+
+ let!(:artifacts_metadata) { create(:ci_job_artifact, :metadata, job: job) }
+
+ context 'when reactive cache worker is parsing results asynchronously' do
+ it 'returns status' do
+ expect(subject[:status]).to eq(:parsing)
+ end
+ end
+
+ context 'when reactive cache worker is inline' do
+ before do
+ synchronous_reactive_cache(merge_request)
+ end
+
+ it 'returns status and data' do
+ expect(subject[:status]).to eq(:parsed)
+ end
+
+ context 'when an error occurrs' do
+ before do
+ merge_request.update!(head_pipeline: nil)
+ end
+
+ it 'returns an error message' do
+ expect(subject[:status]).to eq(:error)
+ end
+ end
+
+ context 'when cached results is not latest' do
+ before do
+ allow_next_instance_of(Ci::GenerateCoverageReportsService) do |service|
+ allow(service).to receive(:latest?).and_return(false)
+ end
+ end
+
+ it 'raises and InvalidateReactiveCache error' do
+ expect { subject }.to raise_error(ReactiveCaching::InvalidateReactiveCache)
+ end
+ end
+ end
+ end
+ end
+
describe '#compare_test_reports' do
subject { merge_request.compare_test_reports }
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 6303fe8a5bb..849494e7cd4 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -4335,4 +4335,27 @@ describe User, :do_not_mock_admin_mode do
it { expect(user.user_detail).to be_persisted }
end
end
+
+ describe '#gitlab_employee?' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { user.gitlab_employee? }
+
+ where(:email, :is_com, :expected_result) do
+ 'test@gitlab.com' | true | true
+ 'test@example.com' | true | false
+ 'test@gitlab.com' | false | false
+ 'test@example.com' | false | false
+ end
+
+ with_them do
+ let(:user) { build(:user, email: email) }
+
+ before do
+ allow(Gitlab).to receive(:com?).and_return(is_com)
+ end
+
+ it { is_expected.to be expected_result }
+ end
+ end
end
diff --git a/spec/presenters/projects/import_export/project_export_presenter_spec.rb b/spec/presenters/projects/import_export/project_export_presenter_spec.rb
new file mode 100644
index 00000000000..052ca36974a
--- /dev/null
+++ b/spec/presenters/projects/import_export/project_export_presenter_spec.rb
@@ -0,0 +1,92 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Projects::ImportExport::ProjectExportPresenter do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:user) { create(:user) }
+
+ subject { described_class.new(project, current_user: user) }
+
+ describe '#description' do
+ context "override_description not provided" do
+ it "keeps original description" do
+ expect(subject.description).to eq(project.description)
+ end
+ end
+
+ context "override_description provided" do
+ let(:description) { "overridden description" }
+
+ subject { described_class.new(project, current_user: user, override_description: description) }
+
+ it "overrides description" do
+ expect(subject.description).to eq(description)
+ end
+ end
+ end
+
+ describe '#as_json' do
+ context "override_description not provided" do
+ it "keeps original description" do
+ expect(subject.as_json["description"]).to eq(project.description)
+ end
+ end
+
+ context "override_description provided" do
+ let(:description) { "overridden description" }
+
+ subject { described_class.new(project, current_user: user, override_description: description) }
+
+ it "overrides description" do
+ expect(subject.as_json["description"]).to eq(description)
+ end
+ end
+ end
+
+ describe '#project_members' do
+ let(:user2) { create(:user, email: 'group@member.com') }
+ let(:member_emails) do
+ subject.project_members.map do |pm|
+ pm.user.email
+ end
+ end
+
+ before do
+ group.add_developer(user2)
+ end
+
+ it 'does not export group members if it has no permission' do
+ group.add_developer(user)
+
+ expect(member_emails).not_to include('group@member.com')
+ end
+
+ it 'does not export group members as maintainer' do
+ group.add_maintainer(user)
+
+ expect(member_emails).not_to include('group@member.com')
+ end
+
+ it 'exports group members as group owner' do
+ group.add_owner(user)
+
+ expect(member_emails).to include('group@member.com')
+ end
+
+ context 'as admin' do
+ let(:user) { create(:admin) }
+
+ it 'exports group members as admin' do
+ expect(member_emails).to include('group@member.com')
+ end
+
+ it 'exports group members as project members' do
+ member_types = subject.project_members.map { |pm| pm.source_type }
+
+ expect(member_types).to all(eq('Project'))
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index 6cecab8656a..0ed4dcec93e 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -36,7 +36,8 @@ describe Ci::RetryBuildService do
job_artifacts_performance job_artifacts_lsif
job_artifacts_codequality job_artifacts_metrics scheduled_at
job_variables waiting_for_resource_at job_artifacts_metrics_referee
- job_artifacts_network_referee job_artifacts_dotenv needs].freeze
+ job_artifacts_network_referee job_artifacts_dotenv
+ job_artifacts_cobertura needs].freeze
IGNORE_ACCESSORS =
%i[type lock_version target_url base_tags trace_sections
diff --git a/spec/services/projects/import_export/export_service_spec.rb b/spec/services/projects/import_export/export_service_spec.rb
index e00507d1827..1315ae26322 100644
--- a/spec/services/projects/import_export/export_service_spec.rb
+++ b/spec/services/projects/import_export/export_service_spec.rb
@@ -26,10 +26,28 @@ describe Projects::ImportExport::ExportService do
service.execute
end
- it 'saves the models' do
- expect(Gitlab::ImportExport::Project::TreeSaver).to receive(:new).and_call_original
+ context 'when :streaming_serializer feature is enabled' do
+ before do
+ stub_feature_flags(streaming_serializer: true)
+ end
- service.execute
+ it 'saves the models' do
+ expect(Gitlab::ImportExport::Project::TreeSaver).to receive(:new).and_call_original
+
+ service.execute
+ end
+ end
+
+ context 'when :streaming_serializer feature is disabled' do
+ before do
+ stub_feature_flags(streaming_serializer: false)
+ end
+
+ it 'saves the models' do
+ expect(Gitlab::ImportExport::Project::LegacyTreeSaver).to receive(:new).and_call_original
+
+ service.execute
+ end
end
it 'saves the uploads' do
diff --git a/spec/support/shared_examples/requests/api/discussions_shared_examples.rb b/spec/support/shared_examples/requests/api/discussions_shared_examples.rb
index 939ea405724..3ad2263688b 100644
--- a/spec/support/shared_examples/requests/api/discussions_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/discussions_shared_examples.rb
@@ -55,6 +55,58 @@ RSpec.shared_examples 'with cross-reference system notes' do
end
RSpec.shared_examples 'discussions API' do |parent_type, noteable_type, id_name, can_reply_to_individual_notes: false|
+ shared_examples 'is_gitlab_employee attribute presence' do
+ subject { get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions", user) }
+
+ before do
+ allow(Gitlab).to receive(:com?).and_return(true)
+ user.update(email: email)
+ user.confirm
+ end
+
+ context 'when author is a gitlab employee' do
+ let(:email) { 'test@gitlab.com' }
+
+ it 'returns is_gitlab_employee as true' do
+ subject
+
+ expect(json_response.first["notes"].first["author"]['is_gitlab_employee']).to be true
+ end
+ end
+
+ shared_examples 'non inclusion of gitlab employee badge' do
+ it 'does not include is_gitlab_employee attribute' do
+ subject
+
+ expect(json_response.first["notes"].first["author"]).not_to have_key('is_gitlab_employee')
+ end
+ end
+
+ context 'when author is not a gitlab employee' do
+ let(:email) { 'test@example.com' }
+
+ it_behaves_like 'non inclusion of gitlab employee badge'
+ end
+
+ describe 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(gitlab_employee_badge: false)
+ end
+
+ context 'when author is a gitlab employee' do
+ let(:email) { 'test@gitlab.com' }
+
+ it_behaves_like 'non inclusion of gitlab employee badge'
+ end
+
+ context 'when author is not a gitlab employee' do
+ let(:email) { 'test@example.com' }
+
+ it_behaves_like 'non inclusion of gitlab employee badge'
+ end
+ end
+ end
+
describe "GET /#{parent_type}/:id/#{noteable_type}/:noteable_id/discussions" do
it "returns an array of discussions" do
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions", user)
@@ -78,6 +130,8 @@ RSpec.shared_examples 'discussions API' do |parent_type, noteable_type, id_name,
expect(response).to have_gitlab_http_status(:not_found)
end
+
+ it_behaves_like 'is_gitlab_employee attribute presence'
end
describe "GET /#{parent_type}/:id/#{noteable_type}/:noteable_id/discussions/:discussion_id" do
@@ -196,6 +250,8 @@ RSpec.shared_examples 'discussions API' do |parent_type, noteable_type, id_name,
end
end
end
+
+ it_behaves_like 'is_gitlab_employee attribute presence'
end
describe "POST /#{parent_type}/:id/#{noteable_type}/:noteable_id/discussions/:discussion_id/notes" do