summaryrefslogtreecommitdiff
path: root/spec/lib
diff options
context:
space:
mode:
authorGrzegorz Bizon <grzesiek.bizon@gmail.com>2017-11-30 12:52:04 +0100
committerGrzegorz Bizon <grzesiek.bizon@gmail.com>2017-11-30 12:52:04 +0100
commit5df4ba0a93fd266105ccf35fd49fa18e6403c15b (patch)
tree57d32dbd0af5d54643060faba9ab6ba993357a1f /spec/lib
parent1486950bc9396f9b8384763d68f36387326eb745 (diff)
parentfeece7713247a063bfa71ab701f8a164e6fa71bb (diff)
downloadgitlab-ce-5df4ba0a93fd266105ccf35fd49fa18e6403c15b.tar.gz
Merge branch 'master' into backstage/gb/build-pipeline-in-a-separate-class
* master: (1794 commits)
Diffstat (limited to 'spec/lib')
-rw-r--r--spec/lib/additional_email_headers_interceptor_spec.rb21
-rw-r--r--spec/lib/api/helpers_spec.rb109
-rw-r--r--spec/lib/banzai/commit_renderer_spec.rb2
-rw-r--r--spec/lib/banzai/filter/absolute_link_filter_spec.rb58
-rw-r--r--spec/lib/banzai/filter/gollum_tags_filter_spec.rb10
-rw-r--r--spec/lib/banzai/filter/issue_reference_filter_spec.rb70
-rw-r--r--spec/lib/banzai/filter/label_reference_filter_spec.rb12
-rw-r--r--spec/lib/banzai/filter/merge_request_reference_filter_spec.rb10
-rw-r--r--spec/lib/banzai/filter/mermaid_filter_spec.rb12
-rw-r--r--spec/lib/banzai/filter/milestone_reference_filter_spec.rb45
-rw-r--r--spec/lib/banzai/filter/sanitization_filter_spec.rb5
-rw-r--r--spec/lib/banzai/filter/snippet_reference_filter_spec.rb10
-rw-r--r--spec/lib/banzai/filter/syntax_highlight_filter_spec.rb2
-rw-r--r--spec/lib/banzai/filter/user_reference_filter_spec.rb33
-rw-r--r--spec/lib/banzai/note_renderer_spec.rb24
-rw-r--r--spec/lib/banzai/object_renderer_spec.rb4
-rw-r--r--spec/lib/banzai/renderer_spec.rb2
-rw-r--r--spec/lib/container_registry/path_spec.rb18
-rw-r--r--spec/lib/feature_spec.rb41
-rw-r--r--spec/lib/github/client_spec.rb34
-rw-r--r--spec/lib/github/import/legacy_diff_note_spec.rb9
-rw-r--r--spec/lib/github/import/note_spec.rb9
-rw-r--r--spec/lib/gitlab/app_logger_spec.rb12
-rw-r--r--spec/lib/gitlab/auth/request_authenticator_spec.rb67
-rw-r--r--spec/lib/gitlab/auth/user_auth_finders_spec.rb194
-rw-r--r--spec/lib/gitlab/auth_spec.rb66
-rw-r--r--spec/lib/gitlab/background_migration/create_fork_network_memberships_range_spec.rb9
-rw-r--r--spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb37
-rw-r--r--spec/lib/gitlab/background_migration/migrate_system_uploads_to_new_folder_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/populate_fork_networks_range_spec.rb33
-rw-r--r--spec/lib/gitlab/background_migration/populate_merge_requests_latest_merge_request_diff_id_spec.rb62
-rw-r--r--spec/lib/gitlab/backup/manager_spec.rb4
-rw-r--r--spec/lib/gitlab/bare_repository_import/importer_spec.rb168
-rw-r--r--spec/lib/gitlab/bare_repository_import/repository_spec.rb51
-rw-r--r--spec/lib/gitlab/bare_repository_importer_spec.rb100
-rw-r--r--spec/lib/gitlab/bitbucket_import/importer_spec.rb33
-rw-r--r--spec/lib/gitlab/checks/change_access_spec.rb46
-rw-r--r--spec/lib/gitlab/checks/force_push_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/lfs_integrity_spec.rb74
-rw-r--r--spec/lib/gitlab/ci/build/policy/kubernetes_spec.rb21
-rw-r--r--spec/lib/gitlab/ci/cron_parser_spec.rb48
-rw-r--r--spec/lib/gitlab/ci/status/build/cancelable_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/build/factory_spec.rb18
-rw-r--r--spec/lib/gitlab/ci/status/build/failed_allowed_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/build/play_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/build/retryable_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/build/stop_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/canceled_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/created_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/failed_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/manual_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/pending_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/running_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/skipped_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/success_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/success_warning_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb28
-rw-r--r--spec/lib/gitlab/conflict/file_collection_spec.rb2
-rw-r--r--spec/lib/gitlab/conflict/file_spec.rb25
-rw-r--r--spec/lib/gitlab/current_settings_spec.rb2
-rw-r--r--spec/lib/gitlab/database/grant_spec.rb22
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb4
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb2
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb18
-rw-r--r--spec/lib/gitlab/database_spec.rb42
-rw-r--r--spec/lib/gitlab/diff/file_spec.rb8
-rw-r--r--spec/lib/gitlab/diff/parser_spec.rb17
-rw-r--r--spec/lib/gitlab/diff/position_spec.rb57
-rw-r--r--spec/lib/gitlab/diff/position_tracer_spec.rb4
-rw-r--r--spec/lib/gitlab/encoding_helper_spec.rb17
-rw-r--r--spec/lib/gitlab/fake_application_settings_spec.rb10
-rw-r--r--spec/lib/gitlab/file_detector_spec.rb12
-rw-r--r--spec/lib/gitlab/git/blame_spec.rb2
-rw-r--r--spec/lib/gitlab/git/blob_spec.rb59
-rw-r--r--spec/lib/gitlab/git/branch_spec.rb32
-rw-r--r--spec/lib/gitlab/git/commit_spec.rb6
-rw-r--r--spec/lib/gitlab/git/conflict/parser_spec.rb (renamed from spec/lib/gitlab/conflict/parser_spec.rb)68
-rw-r--r--spec/lib/gitlab/git/diff_collection_spec.rb1
-rw-r--r--spec/lib/gitlab/git/env_spec.rb42
-rw-r--r--spec/lib/gitlab/git/hooks_service_spec.rb28
-rw-r--r--spec/lib/gitlab/git/lfs_changes_spec.rb48
-rw-r--r--spec/lib/gitlab/git/popen_spec.rb149
-rw-r--r--spec/lib/gitlab/git/remote_repository_spec.rb99
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb462
-rw-r--r--spec/lib/gitlab/git/rev_list_spec.rb123
-rw-r--r--spec/lib/gitlab/git/storage/circuit_breaker_spec.rb315
-rw-r--r--spec/lib/gitlab/git/storage/forked_storage_check_spec.rb15
-rw-r--r--spec/lib/gitlab/git/storage/health_spec.rb30
-rw-r--r--spec/lib/gitlab/git/storage/null_circuit_breaker_spec.rb17
-rw-r--r--spec/lib/gitlab/git/tag_spec.rb2
-rw-r--r--spec/lib/gitlab/git/user_spec.rb33
-rw-r--r--spec/lib/gitlab/git_access_spec.rb4
-rw-r--r--spec/lib/gitlab/gitaly_client/operation_service_spec.rb36
-rw-r--r--spec/lib/gitlab/gitaly_client/ref_service_spec.rb17
-rw-r--r--spec/lib/gitlab/gitaly_client/repository_service_spec.rb11
-rw-r--r--spec/lib/gitlab/gitaly_client/util_spec.rb22
-rw-r--r--spec/lib/gitlab/gitaly_client/wiki_service_spec.rb88
-rw-r--r--spec/lib/gitlab/gitaly_client_spec.rb16
-rw-r--r--spec/lib/gitlab/github_import/bulk_importing_spec.rb62
-rw-r--r--spec/lib/gitlab/github_import/caching_spec.rb117
-rw-r--r--spec/lib/gitlab/github_import/client_spec.rb389
-rw-r--r--spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb152
-rw-r--r--spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb119
-rw-r--r--spec/lib/gitlab/github_import/importer/issue_and_label_links_importer_spec.rb27
-rw-r--r--spec/lib/gitlab/github_import/importer/issue_importer_spec.rb201
-rw-r--r--spec/lib/gitlab/github_import/importer/issues_importer_spec.rb111
-rw-r--r--spec/lib/gitlab/github_import/importer/label_links_importer_spec.rb82
-rw-r--r--spec/lib/gitlab/github_import/importer/labels_importer_spec.rb107
-rw-r--r--spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb120
-rw-r--r--spec/lib/gitlab/github_import/importer/note_importer_spec.rb151
-rw-r--r--spec/lib/gitlab/github_import/importer/notes_importer_spec.rb116
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb221
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb272
-rw-r--r--spec/lib/gitlab/github_import/importer/releases_importer_spec.rb125
-rw-r--r--spec/lib/gitlab/github_import/importer/repository_importer_spec.rb227
-rw-r--r--spec/lib/gitlab/github_import/issuable_finder_spec.rb38
-rw-r--r--spec/lib/gitlab/github_import/label_finder_spec.rb61
-rw-r--r--spec/lib/gitlab/github_import/markdown_text_spec.rb28
-rw-r--r--spec/lib/gitlab/github_import/milestone_finder_spec.rb57
-rw-r--r--spec/lib/gitlab/github_import/page_counter_spec.rb32
-rw-r--r--spec/lib/gitlab/github_import/parallel_importer_spec.rb40
-rw-r--r--spec/lib/gitlab/github_import/parallel_scheduling_spec.rb296
-rw-r--r--spec/lib/gitlab/github_import/representation/diff_note_spec.rb164
-rw-r--r--spec/lib/gitlab/github_import/representation/expose_attribute_spec.rb19
-rw-r--r--spec/lib/gitlab/github_import/representation/issue_spec.rb182
-rw-r--r--spec/lib/gitlab/github_import/representation/note_spec.rb107
-rw-r--r--spec/lib/gitlab/github_import/representation/pull_request_spec.rb288
-rw-r--r--spec/lib/gitlab/github_import/representation/to_hash_spec.rb37
-rw-r--r--spec/lib/gitlab/github_import/representation/user_spec.rb33
-rw-r--r--spec/lib/gitlab/github_import/representation_spec.rb17
-rw-r--r--spec/lib/gitlab/github_import/sequential_importer_spec.rb37
-rw-r--r--spec/lib/gitlab/github_import/user_finder_spec.rb333
-rw-r--r--spec/lib/gitlab/github_import_spec.rb79
-rw-r--r--spec/lib/gitlab/group_hierarchy_spec.rb28
-rw-r--r--spec/lib/gitlab/health_checks/fs_shards_check_spec.rb2
-rw-r--r--spec/lib/gitlab/hook_data/issuable_builder_spec.rb110
-rw-r--r--spec/lib/gitlab/hook_data/issue_builder_spec.rb45
-rw-r--r--spec/lib/gitlab/hook_data/merge_request_builder_spec.rb61
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml12
-rw-r--r--spec/lib/gitlab/import_export/fork_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/merge_request_parser_spec.rb12
-rw-r--r--spec/lib/gitlab/import_export/project.group.json188
-rw-r--r--spec/lib/gitlab/import_export/project.json763
-rw-r--r--spec/lib/gitlab/import_export/project.light.json51
-rw-r--r--spec/lib/gitlab/import_export/project_tree_restorer_spec.rb166
-rw-r--r--spec/lib/gitlab/import_export/project_tree_saver_spec.rb17
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml57
-rw-r--r--spec/lib/gitlab/import_export/uploads_restorer_spec.rb55
-rw-r--r--spec/lib/gitlab/import_export/uploads_saver_spec.rb61
-rw-r--r--spec/lib/gitlab/import_sources_spec.rb4
-rw-r--r--spec/lib/gitlab/issuable_metadata_spec.rb12
-rw-r--r--spec/lib/gitlab/kubernetes/helm_spec.rb100
-rw-r--r--spec/lib/gitlab/kubernetes/namespace_spec.rb66
-rw-r--r--spec/lib/gitlab/ldap/authentication_spec.rb4
-rw-r--r--spec/lib/gitlab/ldap/user_spec.rb8
-rw-r--r--spec/lib/gitlab/legacy_github_import/branch_formatter_spec.rb (renamed from spec/lib/gitlab/github_import/branch_formatter_spec.rb)2
-rw-r--r--spec/lib/gitlab/legacy_github_import/client_spec.rb97
-rw-r--r--spec/lib/gitlab/legacy_github_import/comment_formatter_spec.rb (renamed from spec/lib/gitlab/github_import/comment_formatter_spec.rb)2
-rw-r--r--spec/lib/gitlab/legacy_github_import/importer_spec.rb (renamed from spec/lib/gitlab/github_import/importer_spec.rb)28
-rw-r--r--spec/lib/gitlab/legacy_github_import/issuable_formatter_spec.rb (renamed from spec/lib/gitlab/github_import/issuable_formatter_spec.rb)2
-rw-r--r--spec/lib/gitlab/legacy_github_import/issue_formatter_spec.rb (renamed from spec/lib/gitlab/github_import/issue_formatter_spec.rb)14
-rw-r--r--spec/lib/gitlab/legacy_github_import/label_formatter_spec.rb (renamed from spec/lib/gitlab/github_import/label_formatter_spec.rb)2
-rw-r--r--spec/lib/gitlab/legacy_github_import/milestone_formatter_spec.rb (renamed from spec/lib/gitlab/github_import/milestone_formatter_spec.rb)8
-rw-r--r--spec/lib/gitlab/legacy_github_import/project_creator_spec.rb (renamed from spec/lib/gitlab/github_import/project_creator_spec.rb)2
-rw-r--r--spec/lib/gitlab/legacy_github_import/pull_request_formatter_spec.rb (renamed from spec/lib/gitlab/github_import/pull_request_formatter_spec.rb)26
-rw-r--r--spec/lib/gitlab/legacy_github_import/release_formatter_spec.rb (renamed from spec/lib/gitlab/github_import/release_formatter_spec.rb)2
-rw-r--r--spec/lib/gitlab/legacy_github_import/user_formatter_spec.rb (renamed from spec/lib/gitlab/github_import/user_formatter_spec.rb)2
-rw-r--r--spec/lib/gitlab/legacy_github_import/wiki_formatter_spec.rb (renamed from spec/lib/gitlab/github_import/wiki_formatter_spec.rb)4
-rw-r--r--spec/lib/gitlab/metrics/background_transaction_spec.rb19
-rw-r--r--spec/lib/gitlab/metrics/instrumentation_spec.rb3
-rw-r--r--spec/lib/gitlab/metrics/method_call_spec.rb59
-rw-r--r--spec/lib/gitlab/metrics/rack_middleware_spec.rb84
-rw-r--r--spec/lib/gitlab/metrics/samplers/influx_sampler_spec.rb (renamed from spec/lib/gitlab/metrics/influx_sampler_spec.rb)2
-rw-r--r--spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb90
-rw-r--r--spec/lib/gitlab/metrics/samplers/unicorn_sampler_spec.rb (renamed from spec/lib/gitlab/metrics/unicorn_sampler_spec.rb)2
-rw-r--r--spec/lib/gitlab/metrics/sidekiq_middleware_spec.rb8
-rw-r--r--spec/lib/gitlab/metrics/subscribers/action_view_spec.rb11
-rw-r--r--spec/lib/gitlab/metrics/subscribers/active_record_spec.rb17
-rw-r--r--spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb89
-rw-r--r--spec/lib/gitlab/metrics/web_transaction_spec.rb (renamed from spec/lib/gitlab/metrics/transaction_spec.rb)75
-rw-r--r--spec/lib/gitlab/metrics_spec.rb43
-rw-r--r--spec/lib/gitlab/middleware/go_spec.rb144
-rw-r--r--spec/lib/gitlab/middleware/rails_queue_duration_spec.rb12
-rw-r--r--spec/lib/gitlab/middleware/read_only_spec.rb42
-rw-r--r--spec/lib/gitlab/multi_collection_paginator_spec.rb46
-rw-r--r--spec/lib/gitlab/o_auth/user_spec.rb11
-rw-r--r--spec/lib/gitlab/path_regex_spec.rb65
-rw-r--r--spec/lib/gitlab/project_search_results_spec.rb9
-rw-r--r--spec/lib/gitlab/project_template_spec.rb8
-rw-r--r--spec/lib/gitlab/quick_actions/spend_time_and_date_separator_spec.rb81
-rw-r--r--spec/lib/gitlab/saml/auth_hash_spec.rb40
-rw-r--r--spec/lib/gitlab/saml/user_spec.rb34
-rw-r--r--spec/lib/gitlab/shell_spec.rb4
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/memory_killer_spec.rb63
-rw-r--r--spec/lib/gitlab/sidekiq_status_spec.rb12
-rw-r--r--spec/lib/gitlab/sql/pattern_spec.rb30
-rw-r--r--spec/lib/gitlab/sql/union_spec.rb7
-rw-r--r--spec/lib/gitlab/url_blocker_spec.rb16
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb6
-rw-r--r--spec/lib/gitlab/utils/merge_hash_spec.rb33
-rw-r--r--spec/lib/gitlab/utils/strong_memoize_spec.rb52
-rw-r--r--spec/lib/gitlab/workhorse_spec.rb88
-rw-r--r--spec/lib/google_api/cloud_platform/client_spec.rb2
-rw-r--r--spec/lib/milestone_array_spec.rb34
-rw-r--r--spec/lib/system_check/app/git_user_default_ssh_config_check_spec.rb8
205 files changed, 9744 insertions, 1677 deletions
diff --git a/spec/lib/additional_email_headers_interceptor_spec.rb b/spec/lib/additional_email_headers_interceptor_spec.rb
index 580450eef1e..b5c1a360ba9 100644
--- a/spec/lib/additional_email_headers_interceptor_spec.rb
+++ b/spec/lib/additional_email_headers_interceptor_spec.rb
@@ -1,12 +1,29 @@
require 'spec_helper'
describe AdditionalEmailHeadersInterceptor do
- it 'adds Auto-Submitted header' do
- mail = ActionMailer::Base.mail(to: 'test@mail.com', from: 'info@mail.com', body: 'hello').deliver
+ let(:mail) do
+ ActionMailer::Base.mail(to: 'test@mail.com', from: 'info@mail.com', body: 'hello')
+ end
+
+ before do
+ mail.deliver_now
+ end
+ it 'adds Auto-Submitted header' do
expect(mail.header['To'].value).to eq('test@mail.com')
expect(mail.header['From'].value).to eq('info@mail.com')
expect(mail.header['Auto-Submitted'].value).to eq('auto-generated')
expect(mail.header['X-Auto-Response-Suppress'].value).to eq('All')
end
+
+ context 'when the same mail object is sent twice' do
+ before do
+ mail.deliver_now
+ end
+
+ it 'does not add the Auto-Submitted header twice' do
+ expect(mail.header['Auto-Submitted'].value).to eq('auto-generated')
+ expect(mail.header['X-Auto-Response-Suppress'].value).to eq('All')
+ end
+ end
end
diff --git a/spec/lib/api/helpers_spec.rb b/spec/lib/api/helpers_spec.rb
new file mode 100644
index 00000000000..3c4deba4712
--- /dev/null
+++ b/spec/lib/api/helpers_spec.rb
@@ -0,0 +1,109 @@
+require 'spec_helper'
+
+describe API::Helpers do
+ subject { Class.new.include(described_class).new }
+
+ describe '#find_namespace' do
+ let(:namespace) { create(:namespace) }
+
+ shared_examples 'namespace finder' do
+ context 'when namespace exists' do
+ it 'returns requested namespace' do
+ expect(subject.find_namespace(existing_id)).to eq(namespace)
+ end
+ end
+
+ context "when namespace doesn't exists" do
+ it 'returns nil' do
+ expect(subject.find_namespace(non_existing_id)).to be_nil
+ end
+ end
+ end
+
+ context 'when ID is used as an argument' do
+ let(:existing_id) { namespace.id }
+ let(:non_existing_id) { 9999 }
+
+ it_behaves_like 'namespace finder'
+ end
+
+ context 'when PATH is used as an argument' do
+ let(:existing_id) { namespace.path }
+ let(:non_existing_id) { 'non-existing-path' }
+
+ it_behaves_like 'namespace finder'
+ end
+ end
+
+ shared_examples 'user namespace finder' do
+ let(:user1) { create(:user) }
+
+ before do
+ allow(subject).to receive(:current_user).and_return(user1)
+ allow(subject).to receive(:header).and_return(nil)
+ allow(subject).to receive(:not_found!).and_raise('404 Namespace not found')
+ end
+
+ context 'when namespace is group' do
+ let(:namespace) { create(:group) }
+
+ context 'when user has access to group' do
+ before do
+ namespace.add_guest(user1)
+ namespace.save!
+ end
+
+ it 'returns requested namespace' do
+ expect(namespace_finder).to eq(namespace)
+ end
+ end
+
+ context "when user doesn't have access to group" do
+ it 'raises not found error' do
+ expect { namespace_finder }.to raise_error(RuntimeError, '404 Namespace not found')
+ end
+ end
+ end
+
+ context "when namespace is user's personal namespace" do
+ let(:namespace) { create(:namespace) }
+
+ context 'when user owns the namespace' do
+ before do
+ namespace.owner = user1
+ namespace.save!
+ end
+
+ it 'returns requested namespace' do
+ expect(namespace_finder).to eq(namespace)
+ end
+ end
+
+ context "when user doesn't own the namespace" do
+ it 'raises not found error' do
+ expect { namespace_finder }.to raise_error(RuntimeError, '404 Namespace not found')
+ end
+ end
+ end
+ end
+
+ describe '#find_namespace!' do
+ let(:namespace_finder) do
+ subject.find_namespace!(namespace.id)
+ end
+
+ it_behaves_like 'user namespace finder'
+ end
+
+ describe '#user_namespace' do
+ let(:namespace_finder) do
+ subject.user_namespace
+ end
+
+ before do
+ allow(subject).to receive(:params).and_return({ id: namespace.id })
+ end
+
+ it_behaves_like 'user namespace finder'
+ end
+end
diff --git a/spec/lib/banzai/commit_renderer_spec.rb b/spec/lib/banzai/commit_renderer_spec.rb
index 049d025a5b9..84adaebdcbe 100644
--- a/spec/lib/banzai/commit_renderer_spec.rb
+++ b/spec/lib/banzai/commit_renderer_spec.rb
@@ -10,7 +10,7 @@ describe Banzai::CommitRenderer do
described_class::ATTRIBUTES.each do |attr|
expect_any_instance_of(Banzai::ObjectRenderer).to receive(:render).with([project.commit], attr).once.and_call_original
- expect(Banzai::Renderer).to receive(:cacheless_render_field).with(project.commit, attr)
+ expect(Banzai::Renderer).to receive(:cacheless_render_field).with(project.commit, attr, {})
end
described_class.render([project.commit], project, user)
diff --git a/spec/lib/banzai/filter/absolute_link_filter_spec.rb b/spec/lib/banzai/filter/absolute_link_filter_spec.rb
new file mode 100644
index 00000000000..a3ad056efcd
--- /dev/null
+++ b/spec/lib/banzai/filter/absolute_link_filter_spec.rb
@@ -0,0 +1,58 @@
+require 'spec_helper'
+
+describe Banzai::Filter::AbsoluteLinkFilter do
+ def filter(doc, context = {})
+ described_class.call(doc, context)
+ end
+
+ context 'with html links' do
+ context 'if only_path is false' do
+ let(:only_path_context) do
+ { only_path: false }
+ end
+ let(:fake_url) { 'http://www.example.com' }
+
+ before do
+ allow(Gitlab.config.gitlab).to receive(:url).and_return(fake_url)
+ end
+
+ context 'has the .gfm class' do
+ it 'converts a relative url into absolute' do
+ doc = filter(link('/foo', 'gfm'), only_path_context)
+ expect(doc.at_css('a')['href']).to eq "#{fake_url}/foo"
+ end
+
+ it 'does not change the url if it already absolute' do
+ doc = filter(link("#{fake_url}/foo", 'gfm'), only_path_context)
+ expect(doc.at_css('a')['href']).to eq "#{fake_url}/foo"
+ end
+
+ context 'if relative_url_root is set' do
+ it 'joins the url without without doubling the path' do
+ allow(Gitlab.config.gitlab).to receive(:url).and_return("#{fake_url}/gitlab/")
+ doc = filter(link("/gitlab/foo", 'gfm'), only_path_context)
+ expect(doc.at_css('a')['href']).to eq "#{fake_url}/gitlab/foo"
+ end
+ end
+ end
+
+ context 'has not the .gfm class' do
+ it 'does not convert a relative url into absolute' do
+ doc = filter(link('/foo'), only_path_context)
+ expect(doc.at_css('a')['href']).to eq '/foo'
+ end
+ end
+ end
+
+ context 'if only_path is not false' do
+ it 'does not convert a relative url into absolute' do
+ expect(filter(link('/foo', 'gfm')).at_css('a')['href']).to eq '/foo'
+ expect(filter(link('/foo')).at_css('a')['href']).to eq '/foo'
+ end
+ end
+ end
+
+ def link(path, css_class = '')
+ %(<a class="#{css_class}" href="#{path}">example</a>)
+ end
+end
diff --git a/spec/lib/banzai/filter/gollum_tags_filter_spec.rb b/spec/lib/banzai/filter/gollum_tags_filter_spec.rb
index 97d612e6347..ca76d6f0881 100644
--- a/spec/lib/banzai/filter/gollum_tags_filter_spec.rb
+++ b/spec/lib/banzai/filter/gollum_tags_filter_spec.rb
@@ -15,9 +15,13 @@ describe Banzai::Filter::GollumTagsFilter do
context 'linking internal images' do
it 'creates img tag if image exists' do
- file = Gollum::File.new(project_wiki.wiki)
- expect(file).to receive(:path).and_return('images/image.jpg')
- expect(project_wiki).to receive(:find_file).with('images/image.jpg').and_return(file)
+ gollum_file_double = double('Gollum::File',
+ mime_type: 'image/jpeg',
+ name: 'images/image.jpg',
+ path: 'images/image.jpg',
+ raw_data: '')
+ wiki_file = Gitlab::Git::WikiFile.new(gollum_file_double)
+ expect(project_wiki).to receive(:find_file).with('images/image.jpg').and_return(wiki_file)
tag = '[[images/image.jpg]]'
doc = filter("See #{tag}", project_wiki: project_wiki)
diff --git a/spec/lib/banzai/filter/issue_reference_filter_spec.rb b/spec/lib/banzai/filter/issue_reference_filter_spec.rb
index 9c74c9b8c99..f70c69ef588 100644
--- a/spec/lib/banzai/filter/issue_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/issue_reference_filter_spec.rb
@@ -317,6 +317,76 @@ describe Banzai::Filter::IssueReferenceFilter do
end
end
+ context 'group context' do
+ let(:group) { create(:group) }
+ let(:context) { { project: nil, group: group } }
+
+ it 'ignores shorthanded issue reference' do
+ reference = "##{issue.iid}"
+ text = "Fixed #{reference}"
+
+ expect(reference_filter(text, context).to_html).to eq(text)
+ end
+
+ it 'ignores valid references when cross-reference project uses external tracker' do
+ expect_any_instance_of(described_class).to receive(:find_object)
+ .with(project, issue.iid)
+ .and_return(nil)
+
+ reference = "#{project.full_path}##{issue.iid}"
+ text = "Issue #{reference}"
+
+ expect(reference_filter(text, context).to_html).to eq(text)
+ end
+
+ it 'links to a valid reference for complete cross-reference' do
+ reference = "#{project.full_path}##{issue.iid}"
+ doc = reference_filter("See #{reference}", context)
+
+ link = doc.css('a').first
+ expect(link.attr('href')).to eq(helper.url_for_issue(issue.iid, project))
+ expect(link.text).to include("#{project.full_path}##{issue.iid}")
+ end
+
+ it 'ignores reference for shorthand cross-reference' do
+ reference = "#{project.path}##{issue.iid}"
+ text = "See #{reference}"
+
+ expect(reference_filter(text, context).to_html).to eq(text)
+ end
+
+ it 'links to a valid reference for url cross-reference' do
+ reference = helper.url_for_issue(issue.iid, project) + "#note_123"
+
+ doc = reference_filter("See #{reference}", context)
+
+ link = doc.css('a').first
+ expect(link.attr('href')).to eq(helper.url_for_issue(issue.iid, project) + "#note_123")
+ expect(link.text).to include("#{project.full_path}##{issue.iid}")
+ end
+
+ it 'links to a valid reference for cross-reference in link href' do
+ reference = "#{helper.url_for_issue(issue.iid, project) + "#note_123"}"
+ reference_link = %{<a href="#{reference}">Reference</a>}
+
+ doc = reference_filter("See #{reference_link}", context)
+
+ link = doc.css('a').first
+ expect(link.attr('href')).to eq(helper.url_for_issue(issue.iid, project) + "#note_123")
+ expect(link.text).to include('Reference')
+ end
+
+ it 'links to a valid reference for issue reference in the link href' do
+ reference = issue.to_reference(group)
+ reference_link = %{<a href="#{reference}">Reference</a>}
+ doc = reference_filter("See #{reference_link}", context)
+
+ link = doc.css('a').first
+ expect(link.attr('href')).to eq(helper.url_for_issue(issue.iid, project))
+ expect(link.text).to include('Reference')
+ end
+ end
+
describe '#issues_per_project' do
context 'using an internal issue tracker' do
it 'returns a Hash containing the issues per project' do
diff --git a/spec/lib/banzai/filter/label_reference_filter_spec.rb b/spec/lib/banzai/filter/label_reference_filter_spec.rb
index 2cd30a5e302..862b1fe3fd3 100644
--- a/spec/lib/banzai/filter/label_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/label_reference_filter_spec.rb
@@ -594,4 +594,16 @@ describe Banzai::Filter::LabelReferenceFilter do
expect(reference_filter(act).to_html).to eq exp
end
end
+
+ describe 'group context' do
+ it 'points to referenced project issues page' do
+ project = create(:project)
+ label = create(:label, project: project)
+ reference = "#{project.full_path}~#{label.name}"
+
+ result = reference_filter("See #{reference}", { project: nil, group: create(:group) } )
+
+ expect(result.css('a').first.attr('href')).to eq(urls.project_issues_url(project, label_name: label.name))
+ end
+ end
end
diff --git a/spec/lib/banzai/filter/merge_request_reference_filter_spec.rb b/spec/lib/banzai/filter/merge_request_reference_filter_spec.rb
index ed2788f8a33..158844e25ae 100644
--- a/spec/lib/banzai/filter/merge_request_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/merge_request_reference_filter_spec.rb
@@ -214,4 +214,14 @@ describe Banzai::Filter::MergeRequestReferenceFilter do
expect(doc.to_html).to match(/\(<a.+>#{Regexp.escape(merge.to_reference(project))} \(diffs, comment 123\)<\/a>\.\)/)
end
end
+
+ context 'group context' do
+ it 'links to a valid reference' do
+ reference = "#{project.full_path}!#{merge.iid}"
+
+ result = reference_filter("See #{reference}", { project: nil, group: create(:group) } )
+
+ expect(result.css('a').first.attr('href')).to eq(urls.project_merge_request_url(project, merge))
+ end
+ end
end
diff --git a/spec/lib/banzai/filter/mermaid_filter_spec.rb b/spec/lib/banzai/filter/mermaid_filter_spec.rb
new file mode 100644
index 00000000000..532d25e121d
--- /dev/null
+++ b/spec/lib/banzai/filter/mermaid_filter_spec.rb
@@ -0,0 +1,12 @@
+require 'spec_helper'
+
+describe Banzai::Filter::MermaidFilter do
+ include FilterSpecHelper
+
+ it 'adds `js-render-mermaid` class to the `pre` tag' do
+ doc = filter("<pre class='code highlight js-syntax-highlight mermaid' lang='mermaid' v-pre='true'><code>graph TD;\n A--&gt;B;\n</code></pre>")
+ result = doc.xpath('descendant-or-self::pre').first
+
+ expect(result[:class]).to include('js-render-mermaid')
+ end
+end
diff --git a/spec/lib/banzai/filter/milestone_reference_filter_spec.rb b/spec/lib/banzai/filter/milestone_reference_filter_spec.rb
index fe7a8c84c9e..6a9087d2e59 100644
--- a/spec/lib/banzai/filter/milestone_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/milestone_reference_filter_spec.rb
@@ -294,8 +294,7 @@ describe Banzai::Filter::MilestoneReferenceFilter do
end
end
- context 'project milestones' do
- let(:milestone) { create(:milestone, project: project) }
+ shared_context 'project milestones' do
let(:reference) { milestone.to_reference(format: :iid) }
include_examples 'reference parsing'
@@ -309,8 +308,7 @@ describe Banzai::Filter::MilestoneReferenceFilter do
it_behaves_like 'cross project shorthand reference'
end
- context 'group milestones' do
- let(:milestone) { create(:milestone, group: group) }
+ shared_context 'group milestones' do
let(:reference) { milestone.to_reference(format: :name) }
include_examples 'reference parsing'
@@ -343,4 +341,43 @@ describe Banzai::Filter::MilestoneReferenceFilter do
expect(doc.css('a')).to be_empty
end
end
+
+ context 'group context' do
+ it 'links to a valid reference' do
+ milestone = create(:milestone, project: project)
+ reference = "#{project.full_path}%#{milestone.iid}"
+
+ result = reference_filter("See #{reference}", { project: nil, group: create(:group) } )
+
+ expect(result.css('a').first.attr('href')).to eq(urls.milestone_url(milestone))
+ end
+ end
+
+ context 'when milestone is open' do
+ context 'project milestones' do
+ let(:milestone) { create(:milestone, project: project) }
+
+ include_context 'project milestones'
+ end
+
+ context 'group milestones' do
+ let(:milestone) { create(:milestone, group: group) }
+
+ include_context 'group milestones'
+ end
+ end
+
+ context 'when milestone is closed' do
+ context 'project milestones' do
+ let(:milestone) { create(:milestone, :closed, project: project) }
+
+ include_context 'project milestones'
+ end
+
+ context 'group milestones' do
+ let(:milestone) { create(:milestone, :closed, group: group) }
+
+ include_context 'group milestones'
+ end
+ end
end
diff --git a/spec/lib/banzai/filter/sanitization_filter_spec.rb b/spec/lib/banzai/filter/sanitization_filter_spec.rb
index 5f41e28fece..17a620ef603 100644
--- a/spec/lib/banzai/filter/sanitization_filter_spec.rb
+++ b/spec/lib/banzai/filter/sanitization_filter_spec.rb
@@ -217,6 +217,11 @@ describe Banzai::Filter::SanitizationFilter do
output: '<img>'
},
+ 'protocol-based JS injection: Unicode' => {
+ input: %Q(<a href="\u0001java\u0003script:alert('XSS')">foo</a>),
+ output: '<a>foo</a>'
+ },
+
'protocol-based JS injection: spaces and entities' => {
input: '<a href=" &#14; javascript:alert(\'XSS\');">foo</a>',
output: '<a href="">foo</a>'
diff --git a/spec/lib/banzai/filter/snippet_reference_filter_spec.rb b/spec/lib/banzai/filter/snippet_reference_filter_spec.rb
index 90ac4c7b238..3a07a6dc179 100644
--- a/spec/lib/banzai/filter/snippet_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/snippet_reference_filter_spec.rb
@@ -201,4 +201,14 @@ describe Banzai::Filter::SnippetReferenceFilter do
expect(reference_filter(act).to_html).to match(/<a.+>#{Regexp.escape(invalidate_reference(reference))}<\/a>/)
end
end
+
+ context 'group context' do
+ it 'links to a valid reference' do
+ reference = "#{project.full_path}$#{snippet.id}"
+
+ result = reference_filter("See #{reference}", { project: nil, group: create(:group) } )
+
+ expect(result.css('a').first.attr('href')).to eq(urls.project_snippet_url(project, snippet))
+ end
+ end
end
diff --git a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
index 5a23e0e70cc..9f2efa05a01 100644
--- a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
+++ b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
@@ -31,7 +31,7 @@ describe Banzai::Filter::SyntaxHighlightFilter do
it "highlights as plaintext" do
result = filter('<pre><code lang="ruby">This is a test</code></pre>')
- expect(result.to_html).to eq('<pre class="code highlight" lang="" v-pre="true"><code>This is a test</code></pre>')
+ expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight" lang="" v-pre="true"><code>This is a test</code></pre>')
end
end
end
diff --git a/spec/lib/banzai/filter/user_reference_filter_spec.rb b/spec/lib/banzai/filter/user_reference_filter_spec.rb
index 34dac1db69a..fc03741976e 100644
--- a/spec/lib/banzai/filter/user_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/user_reference_filter_spec.rb
@@ -208,6 +208,39 @@ describe Banzai::Filter::UserReferenceFilter do
end
end
+ context 'in group context' do
+ let(:group) { create(:group) }
+ let(:group_member) { create(:user) }
+
+ before do
+ group.add_developer(group_member)
+ end
+
+ let(:context) { { author: group_member, project: nil, group: group } }
+
+ it 'supports a special @all mention' do
+ reference = User.reference_prefix + 'all'
+ doc = reference_filter("Hey #{reference}", context)
+
+ expect(doc.css('a').length).to eq(1)
+ expect(doc.css('a').first.attr('href')).to eq urls.group_url(group)
+ end
+
+ it 'supports mentioning a single user' do
+ reference = group_member.to_reference
+ doc = reference_filter("Hey #{reference}", context)
+
+ expect(doc.css('a').first.attr('href')).to eq urls.user_url(group_member)
+ end
+
+ it 'supports mentioning a group' do
+ reference = group.to_reference
+ doc = reference_filter("Hey #{reference}", context)
+
+ expect(doc.css('a').first.attr('href')).to eq urls.user_url(group)
+ end
+ end
+
describe '#namespaces' do
it 'returns a Hash containing all Namespaces' do
document = Nokogiri::HTML.fragment("<p>#{user.to_reference}</p>")
diff --git a/spec/lib/banzai/note_renderer_spec.rb b/spec/lib/banzai/note_renderer_spec.rb
deleted file mode 100644
index 32764bee5eb..00000000000
--- a/spec/lib/banzai/note_renderer_spec.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-require 'spec_helper'
-
-describe Banzai::NoteRenderer do
- describe '.render' do
- it 'renders a Note' do
- note = double(:note)
- project = double(:project)
- wiki = double(:wiki)
- user = double(:user)
-
- expect(Banzai::ObjectRenderer).to receive(:new)
- .with(project, user,
- requested_path: 'foo',
- project_wiki: wiki,
- ref: 'bar')
- .and_call_original
-
- expect_any_instance_of(Banzai::ObjectRenderer)
- .to receive(:render).with([note], :note)
-
- described_class.render([note], project, user, 'foo', wiki, 'bar')
- end
- end
-end
diff --git a/spec/lib/banzai/object_renderer_spec.rb b/spec/lib/banzai/object_renderer_spec.rb
index b172a1b718c..074d521a5c6 100644
--- a/spec/lib/banzai/object_renderer_spec.rb
+++ b/spec/lib/banzai/object_renderer_spec.rb
@@ -22,7 +22,7 @@ describe Banzai::ObjectRenderer do
end
it 'retrieves field content using Banzai::Renderer.render_field' do
- expect(Banzai::Renderer).to receive(:render_field).with(object, :note).and_call_original
+ expect(Banzai::Renderer).to receive(:render_field).with(object, :note, {}).and_call_original
renderer.render([object], :note)
end
@@ -68,7 +68,7 @@ describe Banzai::ObjectRenderer do
end
it 'retrieves field content using Banzai::Renderer.cacheless_render_field' do
- expect(Banzai::Renderer).to receive(:cacheless_render_field).with(commit, :title).and_call_original
+ expect(Banzai::Renderer).to receive(:cacheless_render_field).with(commit, :title, {}).and_call_original
renderer.render([commit], :title)
end
diff --git a/spec/lib/banzai/renderer_spec.rb b/spec/lib/banzai/renderer_spec.rb
index 81a04a2d46d..650cecfc778 100644
--- a/spec/lib/banzai/renderer_spec.rb
+++ b/spec/lib/banzai/renderer_spec.rb
@@ -18,7 +18,7 @@ describe Banzai::Renderer do
let(:commit) { create(:project, :repository).commit }
it 'returns cacheless render field' do
- expect(renderer).to receive(:cacheless_render_field).with(commit, :title)
+ expect(renderer).to receive(:cacheless_render_field).with(commit, :title, {})
renderer.render_field(commit, :title)
end
diff --git a/spec/lib/container_registry/path_spec.rb b/spec/lib/container_registry/path_spec.rb
index 84cacdd3f0d..010deae822c 100644
--- a/spec/lib/container_registry/path_spec.rb
+++ b/spec/lib/container_registry/path_spec.rb
@@ -86,6 +86,24 @@ describe ContainerRegistry::Path do
it { is_expected.to be_valid }
end
+
+ context 'when path contains double underscore' do
+ let(:path) { 'my/repository__name' }
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'when path contains invalid separator with dot' do
+ let(:path) { 'some/registry-.name' }
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'when path contains invalid separator with underscore' do
+ let(:path) { 'some/registry._name' }
+
+ it { is_expected.not_to be_valid }
+ end
end
describe '#has_repository?' do
diff --git a/spec/lib/feature_spec.rb b/spec/lib/feature_spec.rb
index 1076c63b5f2..10020511bf8 100644
--- a/spec/lib/feature_spec.rb
+++ b/spec/lib/feature_spec.rb
@@ -13,6 +13,47 @@ describe Feature do
end
end
+ describe '.persisted_names' do
+ it 'returns the names of the persisted features' do
+ Feature::FlipperFeature.create!(key: 'foo')
+
+ expect(described_class.persisted_names).to eq(%w[foo])
+ end
+
+ it 'returns an empty Array when no features are presisted' do
+ expect(described_class.persisted_names).to be_empty
+ end
+
+ it 'caches the feature names when request store is active', :request_store do
+ Feature::FlipperFeature.create!(key: 'foo')
+
+ expect(Feature::FlipperFeature)
+ .to receive(:feature_names)
+ .once
+ .and_call_original
+
+ 2.times do
+ expect(described_class.persisted_names).to eq(%w[foo])
+ end
+ end
+ end
+
+ describe '.persisted?' do
+ it 'returns true for a persisted feature' do
+ Feature::FlipperFeature.create!(key: 'foo')
+
+ feature = double(:feature, name: 'foo')
+
+ expect(described_class.persisted?(feature)).to eq(true)
+ end
+
+ it 'returns false for a feature that is not persisted' do
+ feature = double(:feature, name: 'foo')
+
+ expect(described_class.persisted?(feature)).to eq(false)
+ end
+ end
+
describe '.all' do
let(:features) { Set.new }
diff --git a/spec/lib/github/client_spec.rb b/spec/lib/github/client_spec.rb
deleted file mode 100644
index b846096fe25..00000000000
--- a/spec/lib/github/client_spec.rb
+++ /dev/null
@@ -1,34 +0,0 @@
-require 'spec_helper'
-
-describe Github::Client do
- let(:connection) { spy }
- let(:rate_limit) { double(get: [false, 1]) }
- let(:client) { described_class.new({}) }
- let(:results) { double }
- let(:response) { double }
-
- before do
- allow(Faraday).to receive(:new).and_return(connection)
- allow(Github::RateLimit).to receive(:new).with(connection).and_return(rate_limit)
- end
-
- describe '#get' do
- before do
- allow(Github::Response).to receive(:new).with(results).and_return(response)
- end
-
- it 'uses a default per_page param' do
- expect(connection).to receive(:get).with('/foo', per_page: 100).and_return(results)
-
- expect(client.get('/foo')).to eq(response)
- end
-
- context 'with per_page given' do
- it 'overwrites the default per_page' do
- expect(connection).to receive(:get).with('/foo', per_page: 30).and_return(results)
-
- expect(client.get('/foo', per_page: 30)).to eq(response)
- end
- end
- end
-end
diff --git a/spec/lib/github/import/legacy_diff_note_spec.rb b/spec/lib/github/import/legacy_diff_note_spec.rb
deleted file mode 100644
index 8c50b46cacb..00000000000
--- a/spec/lib/github/import/legacy_diff_note_spec.rb
+++ /dev/null
@@ -1,9 +0,0 @@
-require 'spec_helper'
-
-describe Github::Import::LegacyDiffNote do
- describe '#type' do
- it 'returns the original note type' do
- expect(described_class.new.type).to eq('LegacyDiffNote')
- end
- end
-end
diff --git a/spec/lib/github/import/note_spec.rb b/spec/lib/github/import/note_spec.rb
deleted file mode 100644
index fcdccd9e097..00000000000
--- a/spec/lib/github/import/note_spec.rb
+++ /dev/null
@@ -1,9 +0,0 @@
-require 'spec_helper'
-
-describe Github::Import::Note do
- describe '#type' do
- it 'returns the original note type' do
- expect(described_class.new.type).to eq('Note')
- end
- end
-end
diff --git a/spec/lib/gitlab/app_logger_spec.rb b/spec/lib/gitlab/app_logger_spec.rb
new file mode 100644
index 00000000000..c86d30ce6df
--- /dev/null
+++ b/spec/lib/gitlab/app_logger_spec.rb
@@ -0,0 +1,12 @@
+require 'spec_helper'
+
+describe Gitlab::AppLogger, :request_store do
+ subject { described_class }
+
+ it 'builds a logger once' do
+ expect(::Logger).to receive(:new).and_call_original
+
+ subject.info('hello world')
+ subject.error('hello again')
+ end
+end
diff --git a/spec/lib/gitlab/auth/request_authenticator_spec.rb b/spec/lib/gitlab/auth/request_authenticator_spec.rb
new file mode 100644
index 00000000000..ffcd90b9fcb
--- /dev/null
+++ b/spec/lib/gitlab/auth/request_authenticator_spec.rb
@@ -0,0 +1,67 @@
+require 'spec_helper'
+
+describe Gitlab::Auth::RequestAuthenticator do
+ let(:env) do
+ {
+ 'rack.input' => '',
+ 'REQUEST_METHOD' => 'GET'
+ }
+ end
+ let(:request) { ActionDispatch::Request.new(env) }
+
+ subject { described_class.new(request) }
+
+ describe '#user' do
+ let!(:sessionless_user) { build(:user) }
+ let!(:session_user) { build(:user) }
+
+ it 'returns sessionless user first' do
+ allow_any_instance_of(described_class).to receive(:find_sessionless_user).and_return(sessionless_user)
+ allow_any_instance_of(described_class).to receive(:find_user_from_warden).and_return(session_user)
+
+ expect(subject.user).to eq sessionless_user
+ end
+
+ it 'returns session user if no sessionless user found' do
+ allow_any_instance_of(described_class).to receive(:find_user_from_warden).and_return(session_user)
+
+ expect(subject.user).to eq session_user
+ end
+
+ it 'returns nil if no user found' do
+ expect(subject.user).to be_blank
+ end
+
+ it 'bubbles up exceptions' do
+ allow_any_instance_of(described_class).to receive(:find_user_from_warden).and_raise(Gitlab::Auth::UnauthorizedError)
+ end
+ end
+
+ describe '#find_sessionless_user' do
+ let!(:access_token_user) { build(:user) }
+ let!(:rss_token_user) { build(:user) }
+
+ it 'returns access_token user first' do
+ allow_any_instance_of(described_class).to receive(:find_user_from_access_token).and_return(access_token_user)
+ allow_any_instance_of(described_class).to receive(:find_user_from_rss_token).and_return(rss_token_user)
+
+ expect(subject.find_sessionless_user).to eq access_token_user
+ end
+
+ it 'returns rss_token user if no access_token user found' do
+ allow_any_instance_of(described_class).to receive(:find_user_from_rss_token).and_return(rss_token_user)
+
+ expect(subject.find_sessionless_user).to eq rss_token_user
+ end
+
+ it 'returns nil if no user found' do
+ expect(subject.find_sessionless_user).to be_blank
+ end
+
+ it 'rescue Gitlab::Auth::AuthenticationError exceptions' do
+ allow_any_instance_of(described_class).to receive(:find_user_from_access_token).and_raise(Gitlab::Auth::UnauthorizedError)
+
+ expect(subject.find_sessionless_user).to be_blank
+ end
+ end
+end
diff --git a/spec/lib/gitlab/auth/user_auth_finders_spec.rb b/spec/lib/gitlab/auth/user_auth_finders_spec.rb
new file mode 100644
index 00000000000..4637816570c
--- /dev/null
+++ b/spec/lib/gitlab/auth/user_auth_finders_spec.rb
@@ -0,0 +1,194 @@
+require 'spec_helper'
+
+describe Gitlab::Auth::UserAuthFinders do
+ include described_class
+
+ let(:user) { create(:user) }
+ let(:env) do
+ {
+ 'rack.input' => ''
+ }
+ end
+ let(:request) { Rack::Request.new(env)}
+
+ def set_param(key, value)
+ request.update_param(key, value)
+ end
+
+ describe '#find_user_from_warden' do
+ context 'with CSRF token' do
+ before do
+ allow(Gitlab::RequestForgeryProtection).to receive(:verified?).and_return(true)
+ end
+
+ context 'with invalid credentials' do
+ it 'returns nil' do
+ expect(find_user_from_warden).to be_nil
+ end
+ end
+
+ context 'with valid credentials' do
+ it 'returns the user' do
+ env['warden'] = double("warden", authenticate: user)
+
+ expect(find_user_from_warden).to eq user
+ end
+ end
+ end
+
+ context 'without CSRF token' do
+ it 'returns nil' do
+ allow(Gitlab::RequestForgeryProtection).to receive(:verified?).and_return(false)
+ env['warden'] = double("warden", authenticate: user)
+
+ expect(find_user_from_warden).to be_nil
+ end
+ end
+ end
+
+ describe '#find_user_from_rss_token' do
+ context 'when the request format is atom' do
+ before do
+ env['HTTP_ACCEPT'] = 'application/atom+xml'
+ end
+
+ it 'returns user if valid rss_token' do
+ set_param(:rss_token, user.rss_token)
+
+ expect(find_user_from_rss_token).to eq user
+ end
+
+ it 'returns nil if rss_token is blank' do
+ expect(find_user_from_rss_token).to be_nil
+ end
+
+ it 'returns exception if invalid rss_token' do
+ set_param(:rss_token, 'invalid_token')
+
+ expect { find_user_from_rss_token }.to raise_error(Gitlab::Auth::UnauthorizedError)
+ end
+ end
+
+ context 'when the request format is not atom' do
+ it 'returns nil' do
+ set_param(:rss_token, user.rss_token)
+
+ expect(find_user_from_rss_token).to be_nil
+ end
+ end
+ end
+
+ describe '#find_user_from_access_token' do
+ let(:personal_access_token) { create(:personal_access_token, user: user) }
+
+ it 'returns nil if no access_token present' do
+ expect(find_personal_access_token).to be_nil
+ end
+
+ context 'when validate_access_token! returns valid' do
+ it 'returns user' do
+ env[Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_HEADER] = personal_access_token.token
+
+ expect(find_user_from_access_token).to eq user
+ end
+
+ it 'returns exception if token has no user' do
+ env[Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_HEADER] = personal_access_token.token
+ allow_any_instance_of(PersonalAccessToken).to receive(:user).and_return(nil)
+
+ expect { find_user_from_access_token }.to raise_error(Gitlab::Auth::UnauthorizedError)
+ end
+ end
+ end
+
+ describe '#find_personal_access_token' do
+ let(:personal_access_token) { create(:personal_access_token, user: user) }
+
+ context 'passed as header' do
+ it 'returns token if valid personal_access_token' do
+ env[Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_HEADER] = personal_access_token.token
+
+ expect(find_personal_access_token).to eq personal_access_token
+ end
+ end
+
+ context 'passed as param' do
+ it 'returns token if valid personal_access_token' do
+ set_param(Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_PARAM, personal_access_token.token)
+
+ expect(find_personal_access_token).to eq personal_access_token
+ end
+ end
+
+ it 'returns nil if no personal_access_token' do
+ expect(find_personal_access_token).to be_nil
+ end
+
+ it 'returns exception if invalid personal_access_token' do
+ env[Gitlab::Auth::UserAuthFinders::PRIVATE_TOKEN_HEADER] = 'invalid_token'
+
+ expect { find_personal_access_token }.to raise_error(Gitlab::Auth::UnauthorizedError)
+ end
+ end
+
+ describe '#find_oauth_access_token' do
+ let(:application) { Doorkeeper::Application.create!(name: 'MyApp', redirect_uri: 'https://app.com', owner: user) }
+ let(:token) { Doorkeeper::AccessToken.create!(application_id: application.id, resource_owner_id: user.id, scopes: 'api') }
+
+ context 'passed as header' do
+ it 'returns token if valid oauth_access_token' do
+ env['HTTP_AUTHORIZATION'] = "Bearer #{token.token}"
+
+ expect(find_oauth_access_token.token).to eq token.token
+ end
+ end
+
+ context 'passed as param' do
+ it 'returns user if valid oauth_access_token' do
+ set_param(:access_token, token.token)
+
+ expect(find_oauth_access_token.token).to eq token.token
+ end
+ end
+
+ it 'returns nil if no oauth_access_token' do
+ expect(find_oauth_access_token).to be_nil
+ end
+
+ it 'returns exception if invalid oauth_access_token' do
+ env['HTTP_AUTHORIZATION'] = "Bearer invalid_token"
+
+ expect { find_oauth_access_token }.to raise_error(Gitlab::Auth::UnauthorizedError)
+ end
+ end
+
+ describe '#validate_access_token!' do
+ let(:personal_access_token) { create(:personal_access_token, user: user) }
+
+ it 'returns nil if no access_token present' do
+ expect(validate_access_token!).to be_nil
+ end
+
+ context 'token is not valid' do
+ before do
+ allow_any_instance_of(described_class).to receive(:access_token).and_return(personal_access_token)
+ end
+
+ it 'returns Gitlab::Auth::ExpiredError if token expired' do
+ personal_access_token.expires_at = 1.day.ago
+
+ expect { validate_access_token! }.to raise_error(Gitlab::Auth::ExpiredError)
+ end
+
+ it 'returns Gitlab::Auth::RevokedError if token revoked' do
+ personal_access_token.revoke!
+
+ expect { validate_access_token! }.to raise_error(Gitlab::Auth::RevokedError)
+ end
+
+ it 'returns Gitlab::Auth::InsufficientScopeError if invalid token scope' do
+ expect { validate_access_token!(scopes: [:sudo]) }.to raise_error(Gitlab::Auth::InsufficientScopeError)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb
index af1db2c3455..a6fbec295b5 100644
--- a/spec/lib/gitlab/auth_spec.rb
+++ b/spec/lib/gitlab/auth_spec.rb
@@ -5,7 +5,7 @@ describe Gitlab::Auth do
describe 'constants' do
it 'API_SCOPES contains all scopes for API access' do
- expect(subject::API_SCOPES).to eq [:api, :read_user]
+ expect(subject::API_SCOPES).to eq %i[api read_user sudo]
end
it 'OPENID_SCOPES contains all scopes for OpenID Connect' do
@@ -19,7 +19,7 @@ describe Gitlab::Auth do
it 'optional_scopes contains all non-default scopes' do
stub_container_registry_config(enabled: true)
- expect(subject.optional_scopes).to eq %i[read_user read_registry openid]
+ expect(subject.optional_scopes).to eq %i[read_user sudo read_registry openid]
end
context 'registry_scopes' do
@@ -133,6 +133,25 @@ describe Gitlab::Auth do
gl_auth.find_for_git_client(user.username, token, project: nil, ip: 'ip')
end
+
+ it 'grants deploy key write permissions' do
+ project = create(:project)
+ key = create(:deploy_key, can_push: true)
+ create(:deploy_keys_project, deploy_key: key, project: project)
+ token = Gitlab::LfsToken.new(key).token
+
+ expect(gl_auth).to receive(:rate_limit!).with('ip', success: true, login: "lfs+deploy-key-#{key.id}")
+ expect(gl_auth.find_for_git_client("lfs+deploy-key-#{key.id}", token, project: project, ip: 'ip')).to eq(Gitlab::Auth::Result.new(key, nil, :lfs_deploy_token, read_write_authentication_abilities))
+ end
+
+ it 'does not grant deploy key write permissions' do
+ project = create(:project)
+ key = create(:deploy_key, can_push: true)
+ token = Gitlab::LfsToken.new(key).token
+
+ expect(gl_auth).to receive(:rate_limit!).with('ip', success: true, login: "lfs+deploy-key-#{key.id}")
+ expect(gl_auth.find_for_git_client("lfs+deploy-key-#{key.id}", token, project: project, ip: 'ip')).to eq(Gitlab::Auth::Result.new(key, nil, :lfs_deploy_token, read_authentication_abilities))
+ end
end
context 'while using OAuth tokens as passwords' do
@@ -164,7 +183,7 @@ describe Gitlab::Auth do
personal_access_token = create(:personal_access_token, scopes: ['api'])
expect(gl_auth).to receive(:rate_limit!).with('ip', success: true, login: '')
- expect(gl_auth.find_for_git_client('', personal_access_token.token, project: nil, ip: 'ip')).to eq(Gitlab::Auth::Result.new(personal_access_token.user, nil, :personal_token, full_authentication_abilities))
+ expect(gl_auth.find_for_git_client('', personal_access_token.token, project: nil, ip: 'ip')).to eq(Gitlab::Auth::Result.new(personal_access_token.user, nil, :personal_access_token, full_authentication_abilities))
end
context 'when registry is enabled' do
@@ -176,7 +195,7 @@ describe Gitlab::Auth do
personal_access_token = create(:personal_access_token, scopes: ['read_registry'])
expect(gl_auth).to receive(:rate_limit!).with('ip', success: true, login: '')
- expect(gl_auth.find_for_git_client('', personal_access_token.token, project: nil, ip: 'ip')).to eq(Gitlab::Auth::Result.new(personal_access_token.user, nil, :personal_token, [:read_container_image]))
+ expect(gl_auth.find_for_git_client('', personal_access_token.token, project: nil, ip: 'ip')).to eq(Gitlab::Auth::Result.new(personal_access_token.user, nil, :personal_access_token, [:read_container_image]))
end
end
@@ -184,14 +203,14 @@ describe Gitlab::Auth do
impersonation_token = create(:personal_access_token, :impersonation, scopes: ['api'])
expect(gl_auth).to receive(:rate_limit!).with('ip', success: true, login: '')
- expect(gl_auth.find_for_git_client('', impersonation_token.token, project: nil, ip: 'ip')).to eq(Gitlab::Auth::Result.new(impersonation_token.user, nil, :personal_token, full_authentication_abilities))
+ expect(gl_auth.find_for_git_client('', impersonation_token.token, project: nil, ip: 'ip')).to eq(Gitlab::Auth::Result.new(impersonation_token.user, nil, :personal_access_token, full_authentication_abilities))
end
it 'limits abilities based on scope' do
- personal_access_token = create(:personal_access_token, scopes: ['read_user'])
+ personal_access_token = create(:personal_access_token, scopes: %w[read_user sudo])
expect(gl_auth).to receive(:rate_limit!).with('ip', success: true, login: '')
- expect(gl_auth.find_for_git_client('', personal_access_token.token, project: nil, ip: 'ip')).to eq(Gitlab::Auth::Result.new(personal_access_token.user, nil, :personal_token, []))
+ expect(gl_auth.find_for_git_client('', personal_access_token.token, project: nil, ip: 'ip')).to eq(Gitlab::Auth::Result.new(personal_access_token.user, nil, :personal_access_token, []))
end
it 'fails if password is nil' do
@@ -232,9 +251,9 @@ describe Gitlab::Auth do
end
it 'throws an error suggesting user create a PAT when internal auth is disabled' do
- allow_any_instance_of(ApplicationSetting).to receive(:password_authentication_enabled?) { false }
+ allow_any_instance_of(ApplicationSetting).to receive(:password_authentication_enabled_for_git?) { false }
- expect { gl_auth.find_for_git_client('foo', 'bar', project: nil, ip: 'ip') }.to raise_error(Gitlab::Auth::MissingPersonalTokenError)
+ expect { gl_auth.find_for_git_client('foo', 'bar', project: nil, ip: 'ip') }.to raise_error(Gitlab::Auth::MissingPersonalAccessTokenError)
end
end
@@ -305,6 +324,26 @@ describe Gitlab::Auth do
gl_auth.find_with_user_password('ldap_user', 'password')
end
end
+
+ context "with password authentication disabled for Git" do
+ before do
+ stub_application_setting(password_authentication_enabled_for_git: false)
+ end
+
+ it "does not find user by valid login/password" do
+ expect(gl_auth.find_with_user_password(username, password)).to be_nil
+ end
+
+ context "with ldap enabled" do
+ before do
+ allow(Gitlab::LDAP::Config).to receive(:enabled?).and_return(true)
+ end
+
+ it "does not find non-ldap user by valid login/password" do
+ expect(gl_auth.find_with_user_password(username, password)).to be_nil
+ end
+ end
+ end
end
private
@@ -326,10 +365,15 @@ describe Gitlab::Auth do
]
end
- def full_authentication_abilities
+ def read_write_authentication_abilities
read_authentication_abilities + [
:push_code,
- :create_container_image,
+ :create_container_image
+ ]
+ end
+
+ def full_authentication_abilities
+ read_write_authentication_abilities + [
:admin_container_image
]
end
diff --git a/spec/lib/gitlab/background_migration/create_fork_network_memberships_range_spec.rb b/spec/lib/gitlab/background_migration/create_fork_network_memberships_range_spec.rb
index 1a4ea2bac48..79d2c071446 100644
--- a/spec/lib/gitlab/background_migration/create_fork_network_memberships_range_spec.rb
+++ b/spec/lib/gitlab/background_migration/create_fork_network_memberships_range_spec.rb
@@ -93,7 +93,14 @@ describe Gitlab::BackgroundMigration::CreateForkNetworkMembershipsRange, :migrat
end
it 'knows it is finished for this range' do
- expect(migration.missing_members?(1, 7)).to be_falsy
+ expect(migration.missing_members?(1, 8)).to be_falsy
+ end
+
+ it 'does not miss members for forks of forks for which the root was deleted' do
+ forked_project_links.create(id: 9, forked_from_project_id: base1_fork1.id, forked_to_project_id: create(:project).id)
+ base1.destroy
+
+ expect(migration.missing_members?(7, 10)).to be_falsy
end
context 'with more forks' do
diff --git a/spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb b/spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb
index d2e7243ee05..84d9e635810 100644
--- a/spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb
+++ b/spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb
@@ -1,9 +1,13 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :truncate do
+describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :truncate, :migration, schema: 20171114162227 do
+ let(:merge_request_diffs) { table(:merge_request_diffs) }
+ let(:merge_requests) { table(:merge_requests) }
+
describe '#perform' do
- let(:merge_request) { create(:merge_request) }
- let(:merge_request_diff) { merge_request.merge_request_diff }
+ let(:project) { create(:project, :repository) }
+ let(:merge_request) { merge_requests.create!(iid: 1, target_project_id: project.id, source_project_id: project.id, target_branch: 'feature', source_branch: 'master').becomes(MergeRequest) }
+ let(:merge_request_diff) { MergeRequest.find(merge_request.id).create_merge_request_diff }
let(:updated_merge_request_diff) { MergeRequestDiff.find(merge_request_diff.id) }
def diffs_to_hashes(diffs)
@@ -31,8 +35,8 @@ describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :t
end
it 'creates correct entries in the merge_request_diff_commits table' do
- expect(updated_merge_request_diff.merge_request_diff_commits.count).to eq(commits.count)
- expect(updated_merge_request_diff.commits.map(&:to_hash)).to eq(commits)
+ expect(updated_merge_request_diff.merge_request_diff_commits.count).to eq(expected_commits.count)
+ expect(updated_merge_request_diff.commits.map(&:to_hash)).to eq(expected_commits)
end
it 'creates correct entries in the merge_request_diff_files table' do
@@ -68,7 +72,7 @@ describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :t
let(:stop_id) { described_class::MergeRequestDiff.maximum(:id) }
before do
- merge_request.reload_diff(true)
+ merge_request.create_merge_request_diff
convert_to_yaml(start_id, merge_request_diff.commits, diffs_to_hashes(merge_request_diff.merge_request_diff_files))
convert_to_yaml(stop_id, updated_merge_request_diff.commits, diffs_to_hashes(updated_merge_request_diff.merge_request_diff_files))
@@ -199,6 +203,16 @@ describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :t
context 'when the merge request diff has valid commits and diffs' do
let(:commits) { merge_request_diff.commits.map(&:to_hash) }
+ let(:expected_commits) { commits }
+ let(:diffs) { diffs_to_hashes(merge_request_diff.merge_request_diff_files) }
+ let(:expected_diffs) { diffs }
+
+ include_examples 'updated MR diff'
+ end
+
+ context 'when the merge request diff has diffs but no commits' do
+ let(:commits) { nil }
+ let(:expected_commits) { [] }
let(:diffs) { diffs_to_hashes(merge_request_diff.merge_request_diff_files) }
let(:expected_diffs) { diffs }
@@ -207,6 +221,7 @@ describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :t
context 'when the merge request diffs do not have too_large set' do
let(:commits) { merge_request_diff.commits.map(&:to_hash) }
+ let(:expected_commits) { commits }
let(:expected_diffs) { diffs_to_hashes(merge_request_diff.merge_request_diff_files) }
let(:diffs) do
@@ -218,6 +233,7 @@ describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :t
context 'when the merge request diffs do not have a_mode and b_mode set' do
let(:commits) { merge_request_diff.commits.map(&:to_hash) }
+ let(:expected_commits) { commits }
let(:expected_diffs) { diffs_to_hashes(merge_request_diff.merge_request_diff_files) }
let(:diffs) do
@@ -229,6 +245,7 @@ describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :t
context 'when the merge request diffs have binary content' do
let(:commits) { merge_request_diff.commits.map(&:to_hash) }
+ let(:expected_commits) { commits }
let(:expected_diffs) { diffs }
# The start of a PDF created by Illustrator
@@ -257,6 +274,7 @@ describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :t
context 'when the merge request diff has commits, but no diffs' do
let(:commits) { merge_request_diff.commits.map(&:to_hash) }
+ let(:expected_commits) { commits }
let(:diffs) { [] }
let(:expected_diffs) { diffs }
@@ -265,6 +283,7 @@ describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :t
context 'when the merge request diffs have invalid content' do
let(:commits) { merge_request_diff.commits.map(&:to_hash) }
+ let(:expected_commits) { commits }
let(:diffs) { ['--broken-diff'] }
let(:expected_diffs) { [] }
@@ -273,7 +292,8 @@ describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :t
context 'when the merge request diffs are Rugged::Patch instances' do
let(:commits) { merge_request_diff.commits.map(&:to_hash) }
- let(:first_commit) { merge_request.project.repository.commit(merge_request_diff.head_commit_sha) }
+ let(:first_commit) { project.repository.commit(merge_request_diff.head_commit_sha) }
+ let(:expected_commits) { commits }
let(:diffs) { first_commit.rugged_diff_from_parent.patches }
let(:expected_diffs) { [] }
@@ -282,7 +302,8 @@ describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :t
context 'when the merge request diffs are Rugged::Diff::Delta instances' do
let(:commits) { merge_request_diff.commits.map(&:to_hash) }
- let(:first_commit) { merge_request.project.repository.commit(merge_request_diff.head_commit_sha) }
+ let(:first_commit) { project.repository.commit(merge_request_diff.head_commit_sha) }
+ let(:expected_commits) { commits }
let(:diffs) { first_commit.rugged_diff_from_parent.deltas }
let(:expected_diffs) { [] }
diff --git a/spec/lib/gitlab/background_migration/migrate_system_uploads_to_new_folder_spec.rb b/spec/lib/gitlab/background_migration/migrate_system_uploads_to_new_folder_spec.rb
index 59f69d1e4b1..7b5a00c6111 100644
--- a/spec/lib/gitlab/background_migration/migrate_system_uploads_to_new_folder_spec.rb
+++ b/spec/lib/gitlab/background_migration/migrate_system_uploads_to_new_folder_spec.rb
@@ -8,7 +8,7 @@ describe Gitlab::BackgroundMigration::MigrateSystemUploadsToNewFolder do
end
describe '#perform' do
- it 'renames the path of system-uploads', truncate: true do
+ it 'renames the path of system-uploads', :truncate do
upload = create(:upload, model: create(:project), path: 'uploads/system/project/avatar.jpg')
migration.perform('uploads/system/', 'uploads/-/system/')
diff --git a/spec/lib/gitlab/background_migration/populate_fork_networks_range_spec.rb b/spec/lib/gitlab/background_migration/populate_fork_networks_range_spec.rb
index 3ef1873e615..e52baf8dde7 100644
--- a/spec/lib/gitlab/background_migration/populate_fork_networks_range_spec.rb
+++ b/spec/lib/gitlab/background_migration/populate_fork_networks_range_spec.rb
@@ -3,12 +3,9 @@ require 'spec_helper'
describe Gitlab::BackgroundMigration::PopulateForkNetworksRange, :migration, schema: 20170929131201 do
let(:migration) { described_class.new }
let(:base1) { create(:project) }
- let(:base1_fork1) { create(:project) }
- let(:base1_fork2) { create(:project) }
let(:base2) { create(:project) }
let(:base2_fork1) { create(:project) }
- let(:base2_fork2) { create(:project) }
let!(:forked_project_links) { table(:forked_project_links) }
let!(:fork_networks) { table(:fork_networks) }
@@ -21,21 +18,24 @@ describe Gitlab::BackgroundMigration::PopulateForkNetworksRange, :migration, sch
# A normal fork link
forked_project_links.create(id: 1,
forked_from_project_id: base1.id,
- forked_to_project_id: base1_fork1.id)
+ forked_to_project_id: create(:project).id)
forked_project_links.create(id: 2,
forked_from_project_id: base1.id,
- forked_to_project_id: base1_fork2.id)
-
+ forked_to_project_id: create(:project).id)
forked_project_links.create(id: 3,
forked_from_project_id: base2.id,
forked_to_project_id: base2_fork1.id)
+
+ # create a fork of a fork
forked_project_links.create(id: 4,
forked_from_project_id: base2_fork1.id,
forked_to_project_id: create(:project).id)
-
forked_project_links.create(id: 5,
- forked_from_project_id: base2.id,
- forked_to_project_id: base2_fork2.id)
+ forked_from_project_id: create(:project).id,
+ forked_to_project_id: create(:project).id)
+
+ # Stub out the calls to the other migrations
+ allow(BackgroundMigrationWorker).to receive(:perform_in)
migration.perform(1, 3)
end
@@ -62,6 +62,17 @@ describe Gitlab::BackgroundMigration::PopulateForkNetworksRange, :migration, sch
expect(base2_membership).not_to be_nil
end
+ it 'creates a fork network for the fork of which the source was deleted' do
+ fork = create(:project)
+ forked_project_links.create(id: 6, forked_from_project_id: 99999, forked_to_project_id: fork.id)
+
+ migration.perform(5, 8)
+
+ expect(fork_networks.find_by(root_project_id: 99999)).to be_nil
+ expect(fork_networks.find_by(root_project_id: fork.id)).not_to be_nil
+ expect(fork_network_members.find_by(project_id: fork.id)).not_to be_nil
+ end
+
it 'schedules a job for inserting memberships for forks-of-forks' do
delay = Gitlab::BackgroundMigration::CreateForkNetworkMembershipsRange::RESCHEDULE_DELAY
@@ -72,11 +83,11 @@ describe Gitlab::BackgroundMigration::PopulateForkNetworksRange, :migration, sch
end
it 'only processes a single batch of links at a time' do
- expect(fork_network_members.count).to eq(5)
+ expect(fork_networks.count).to eq(2)
migration.perform(3, 5)
- expect(fork_network_members.count).to eq(7)
+ expect(fork_networks.count).to eq(3)
end
it 'can be repeated without effect' do
diff --git a/spec/lib/gitlab/background_migration/populate_merge_requests_latest_merge_request_diff_id_spec.rb b/spec/lib/gitlab/background_migration/populate_merge_requests_latest_merge_request_diff_id_spec.rb
new file mode 100644
index 00000000000..0cb753c5853
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/populate_merge_requests_latest_merge_request_diff_id_spec.rb
@@ -0,0 +1,62 @@
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::PopulateMergeRequestsLatestMergeRequestDiffId, :migration, schema: 20171026082505 do
+ let(:projects_table) { table(:projects) }
+ let(:merge_requests_table) { table(:merge_requests) }
+ let(:merge_request_diffs_table) { table(:merge_request_diffs) }
+
+ let(:project) { projects_table.create!(name: 'gitlab', path: 'gitlab-org/gitlab-ce') }
+
+ def create_mr!(name, diffs: 0)
+ merge_request =
+ merge_requests_table.create!(target_project_id: project.id,
+ target_branch: 'master',
+ source_project_id: project.id,
+ source_branch: name,
+ title: name)
+
+ diffs.times do
+ merge_request_diffs_table.create!(merge_request_id: merge_request.id)
+ end
+
+ merge_request
+ end
+
+ def diffs_for(merge_request)
+ merge_request_diffs_table.where(merge_request_id: merge_request.id)
+ end
+
+ describe '#perform' do
+ it 'ignores MRs without diffs' do
+ merge_request_without_diff = create_mr!('without_diff')
+ mr_id = merge_request_without_diff.id
+
+ expect(merge_request_without_diff.latest_merge_request_diff_id).to be_nil
+
+ expect { subject.perform(mr_id, mr_id) }
+ .not_to change { merge_request_without_diff.reload.latest_merge_request_diff_id }
+ end
+
+ it 'ignores MRs that have a diff ID already set' do
+ merge_request_with_multiple_diffs = create_mr!('with_multiple_diffs', diffs: 3)
+ diff_id = diffs_for(merge_request_with_multiple_diffs).minimum(:id)
+ mr_id = merge_request_with_multiple_diffs.id
+
+ merge_request_with_multiple_diffs.update!(latest_merge_request_diff_id: diff_id)
+
+ expect { subject.perform(mr_id, mr_id) }
+ .not_to change { merge_request_with_multiple_diffs.reload.latest_merge_request_diff_id }
+ end
+
+ it 'migrates multiple MR diffs to the correct values' do
+ merge_requests = Array.new(3).map.with_index { |_, i| create_mr!(i, diffs: 3) }
+
+ subject.perform(merge_requests.first.id, merge_requests.last.id)
+
+ merge_requests.each do |merge_request|
+ expect(merge_request.reload.latest_merge_request_diff_id)
+ .to eq(diffs_for(merge_request).maximum(:id))
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/backup/manager_spec.rb b/spec/lib/gitlab/backup/manager_spec.rb
index 422f2af7266..b68301a066a 100644
--- a/spec/lib/gitlab/backup/manager_spec.rb
+++ b/spec/lib/gitlab/backup/manager_spec.rb
@@ -172,10 +172,6 @@ describe Backup::Manager do
end
describe '#unpack' do
- before do
- allow(Dir).to receive(:chdir)
- end
-
context 'when there are no backup files in the directory' do
before do
allow(Dir).to receive(:glob).and_return([])
diff --git a/spec/lib/gitlab/bare_repository_import/importer_spec.rb b/spec/lib/gitlab/bare_repository_import/importer_spec.rb
new file mode 100644
index 00000000000..7f3bf5fc41c
--- /dev/null
+++ b/spec/lib/gitlab/bare_repository_import/importer_spec.rb
@@ -0,0 +1,168 @@
+require 'spec_helper'
+
+describe Gitlab::BareRepositoryImport::Importer, repository: true do
+ let!(:admin) { create(:admin) }
+ let!(:base_dir) { Dir.mktmpdir + '/' }
+ let(:bare_repository) { Gitlab::BareRepositoryImport::Repository.new(base_dir, File.join(base_dir, "#{project_path}.git")) }
+
+ subject(:importer) { described_class.new(admin, bare_repository) }
+
+ before do
+ allow(described_class).to receive(:log)
+ end
+
+ after do
+ FileUtils.rm_rf(base_dir)
+ end
+
+ shared_examples 'importing a repository' do
+ describe '.execute' do
+ it 'creates a project for a repository in storage' do
+ FileUtils.mkdir_p(File.join(base_dir, "#{project_path}.git"))
+ fake_importer = double
+
+ expect(described_class).to receive(:new).and_return(fake_importer)
+ expect(fake_importer).to receive(:create_project_if_needed)
+
+ described_class.execute(base_dir)
+ end
+
+ it 'skips wiki repos' do
+ repo_dir = File.join(base_dir, 'the-group', 'the-project.wiki.git')
+ FileUtils.mkdir_p(File.join(repo_dir))
+
+ expect(described_class).to receive(:log).with(" * Skipping repo #{repo_dir}")
+ expect(described_class).not_to receive(:new)
+
+ described_class.execute(base_dir)
+ end
+
+ context 'without admin users' do
+ let(:admin) { nil }
+
+ it 'raises an error' do
+ expect { described_class.execute(base_dir) }.to raise_error(Gitlab::BareRepositoryImport::Importer::NoAdminError)
+ end
+ end
+ end
+
+ describe '#create_project_if_needed' do
+ it 'starts an import for a project that did not exist' do
+ expect(importer).to receive(:create_project)
+
+ importer.create_project_if_needed
+ end
+
+ it 'skips importing when the project already exists' do
+ project = create(:project, path: 'a-project', namespace: existing_group)
+
+ expect(importer).not_to receive(:create_project)
+ expect(importer).to receive(:log).with(" * #{project.name} (#{project_path}) exists")
+
+ importer.create_project_if_needed
+ end
+
+ it 'creates a project with the correct path in the database' do
+ importer.create_project_if_needed
+
+ expect(Project.find_by_full_path(project_path)).not_to be_nil
+ end
+
+ it 'creates the Git repo in disk' do
+ FileUtils.mkdir_p(File.join(base_dir, "#{project_path}.git"))
+
+ importer.create_project_if_needed
+
+ project = Project.find_by_full_path(project_path)
+
+ expect(File).to exist(File.join(project.repository_storage_path, project.disk_path + '.git'))
+ end
+
+ context 'hashed storage enabled' do
+ it 'creates a project with the correct path in the database' do
+ stub_application_setting(hashed_storage_enabled: true)
+
+ importer.create_project_if_needed
+
+ expect(Project.find_by_full_path(project_path)).not_to be_nil
+ end
+ end
+ end
+ end
+
+ context 'with subgroups', :nested_groups do
+ let(:project_path) { 'a-group/a-sub-group/a-project' }
+
+ let(:existing_group) do
+ group = create(:group, path: 'a-group')
+ create(:group, path: 'a-sub-group', parent: group)
+ end
+
+ it_behaves_like 'importing a repository'
+ end
+
+ context 'without subgroups' do
+ let(:project_path) { 'a-group/a-project' }
+ let(:existing_group) { create(:group, path: 'a-group') }
+
+ it_behaves_like 'importing a repository'
+ end
+
+ context 'without groups' do
+ let(:project_path) { 'a-project' }
+
+ it 'starts an import for a project that did not exist' do
+ expect(importer).to receive(:create_project)
+
+ importer.create_project_if_needed
+ end
+
+ it 'creates a project with the correct path in the database' do
+ importer.create_project_if_needed
+
+ expect(Project.find_by_full_path("#{admin.full_path}/#{project_path}")).not_to be_nil
+ end
+
+ it 'creates the Git repo in disk' do
+ FileUtils.mkdir_p(File.join(base_dir, "#{project_path}.git"))
+
+ importer.create_project_if_needed
+
+ project = Project.find_by_full_path("#{admin.full_path}/#{project_path}")
+
+ expect(File).to exist(File.join(project.repository_storage_path, project.disk_path + '.git'))
+ end
+ end
+
+ context 'with Wiki' do
+ let(:project_path) { 'a-group/a-project' }
+ let(:existing_group) { create(:group, path: 'a-group') }
+
+ it_behaves_like 'importing a repository'
+
+ it 'creates the Wiki git repo in disk' do
+ FileUtils.mkdir_p(File.join(base_dir, "#{project_path}.git"))
+ FileUtils.mkdir_p(File.join(base_dir, "#{project_path}.wiki.git"))
+
+ importer.create_project_if_needed
+
+ project = Project.find_by_full_path(project_path)
+
+ expect(File).to exist(File.join(project.repository_storage_path, project.disk_path + '.wiki.git'))
+ end
+ end
+
+ context 'when subgroups are not available' do
+ let(:project_path) { 'a-group/a-sub-group/a-project' }
+
+ before do
+ expect(Group).to receive(:supports_nested_groups?) { false }
+ end
+
+ describe '#create_project_if_needed' do
+ it 'raises an error' do
+ expect { importer.create_project_if_needed }.to raise_error('Nested groups are not supported on MySQL')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/bare_repository_import/repository_spec.rb b/spec/lib/gitlab/bare_repository_import/repository_spec.rb
new file mode 100644
index 00000000000..2db737f5fb6
--- /dev/null
+++ b/spec/lib/gitlab/bare_repository_import/repository_spec.rb
@@ -0,0 +1,51 @@
+require 'spec_helper'
+
+describe ::Gitlab::BareRepositoryImport::Repository do
+ let(:project_repo_path) { described_class.new('/full/path/', '/full/path/to/repo.git') }
+
+ it 'stores the repo path' do
+ expect(project_repo_path.repo_path).to eq('/full/path/to/repo.git')
+ end
+
+ it 'stores the group path' do
+ expect(project_repo_path.group_path).to eq('to')
+ end
+
+ it 'stores the project name' do
+ expect(project_repo_path.project_name).to eq('repo')
+ end
+
+ it 'stores the wiki path' do
+ expect(project_repo_path.wiki_path).to eq('/full/path/to/repo.wiki.git')
+ end
+
+ describe '#wiki?' do
+ it 'returns true if it is a wiki' do
+ wiki_path = described_class.new('/full/path/', '/full/path/to/a/b/my.wiki.git')
+
+ expect(wiki_path.wiki?).to eq(true)
+ end
+
+ it 'returns false if it is not a wiki' do
+ expect(project_repo_path.wiki?).to eq(false)
+ end
+ end
+
+ describe '#hashed?' do
+ it 'returns true if it is a hashed folder' do
+ path = described_class.new('/full/path/', '/full/path/@hashed/my.repo.git')
+
+ expect(path.hashed?).to eq(true)
+ end
+
+ it 'returns false if it is not a hashed folder' do
+ expect(project_repo_path.hashed?).to eq(false)
+ end
+ end
+
+ describe '#project_full_path' do
+ it 'returns the project full path' do
+ expect(project_repo_path.repo_path).to eq('/full/path/to/repo.git')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/bare_repository_importer_spec.rb b/spec/lib/gitlab/bare_repository_importer_spec.rb
deleted file mode 100644
index 36d1844b5b1..00000000000
--- a/spec/lib/gitlab/bare_repository_importer_spec.rb
+++ /dev/null
@@ -1,100 +0,0 @@
-require 'spec_helper'
-
-describe Gitlab::BareRepositoryImporter, repository: true do
- subject(:importer) { described_class.new('default', project_path) }
-
- let!(:admin) { create(:admin) }
-
- before do
- allow(described_class).to receive(:log)
- end
-
- shared_examples 'importing a repository' do
- describe '.execute' do
- it 'creates a project for a repository in storage' do
- FileUtils.mkdir_p(File.join(TestEnv.repos_path, "#{project_path}.git"))
- fake_importer = double
-
- expect(described_class).to receive(:new).with('default', project_path)
- .and_return(fake_importer)
- expect(fake_importer).to receive(:create_project_if_needed)
-
- described_class.execute
- end
-
- it 'skips wiki repos' do
- FileUtils.mkdir_p(File.join(TestEnv.repos_path, 'the-group', 'the-project.wiki.git'))
-
- expect(described_class).to receive(:log).with(' * Skipping wiki repo')
- expect(described_class).not_to receive(:new)
-
- described_class.execute
- end
- end
-
- describe '#initialize' do
- context 'without admin users' do
- let(:admin) { nil }
-
- it 'raises an error' do
- expect { importer }.to raise_error(Gitlab::BareRepositoryImporter::NoAdminError)
- end
- end
- end
-
- describe '#create_project_if_needed' do
- it 'starts an import for a project that did not exist' do
- expect(importer).to receive(:create_project)
-
- importer.create_project_if_needed
- end
-
- it 'skips importing when the project already exists' do
- project = create(:project, path: 'a-project', namespace: existing_group)
-
- expect(importer).not_to receive(:create_project)
- expect(importer).to receive(:log).with(" * #{project.name} (#{project_path}) exists")
-
- importer.create_project_if_needed
- end
-
- it 'creates a project with the correct path in the database' do
- importer.create_project_if_needed
-
- expect(Project.find_by_full_path(project_path)).not_to be_nil
- end
- end
- end
-
- context 'with subgroups', :nested_groups do
- let(:project_path) { 'a-group/a-sub-group/a-project' }
-
- let(:existing_group) do
- group = create(:group, path: 'a-group')
- create(:group, path: 'a-sub-group', parent: group)
- end
-
- it_behaves_like 'importing a repository'
- end
-
- context 'without subgroups' do
- let(:project_path) { 'a-group/a-project' }
- let(:existing_group) { create(:group, path: 'a-group') }
-
- it_behaves_like 'importing a repository'
- end
-
- context 'when subgroups are not available' do
- let(:project_path) { 'a-group/a-sub-group/a-project' }
-
- before do
- expect(Group).to receive(:supports_nested_groups?) { false }
- end
-
- describe '#create_project_if_needed' do
- it 'raises an error' do
- expect { importer.create_project_if_needed }.to raise_error('Nested groups are not supported on MySQL')
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/bitbucket_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
index a66347ead76..a6a1d9e619f 100644
--- a/spec/lib/gitlab/bitbucket_import/importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
@@ -54,11 +54,13 @@ describe Gitlab::BitbucketImport::Importer do
create(
:project,
import_source: project_identifier,
+ import_url: "https://bitbucket.org/#{project_identifier}.git",
import_data_attributes: { credentials: data }
)
end
let(:importer) { described_class.new(project) }
+ let(:gitlab_shell) { double }
let(:issues_statuses_sample_data) do
{
@@ -67,6 +69,10 @@ describe Gitlab::BitbucketImport::Importer do
}
end
+ before do
+ allow(importer).to receive(:gitlab_shell) { gitlab_shell }
+ end
+
context 'issues statuses' do
before do
# HACK: Bitbucket::Representation.const_get('Issue') seems to return ::Issue without this
@@ -110,15 +116,36 @@ describe Gitlab::BitbucketImport::Importer do
end
it 'maps statuses to open or closed' do
+ allow(importer).to receive(:import_wiki)
+
importer.execute
expect(project.issues.where(state: "closed").size).to eq(5)
expect(project.issues.where(state: "opened").size).to eq(2)
end
- it 'calls import_wiki' do
- expect(importer).to receive(:import_wiki)
- importer.execute
+ describe 'wiki import' do
+ it 'is skipped when the wiki exists' do
+ expect(project.wiki).to receive(:repository_exists?) { true }
+ expect(importer.gitlab_shell).not_to receive(:import_repository)
+
+ importer.execute
+
+ expect(importer.errors).to be_empty
+ end
+
+ it 'imports to the project disk_path' do
+ expect(project.wiki).to receive(:repository_exists?) { false }
+ expect(importer.gitlab_shell).to receive(:import_repository).with(
+ project.repository_storage_path,
+ project.wiki.disk_path,
+ project.import_url + '/wiki'
+ )
+
+ importer.execute
+
+ expect(importer.errors).to be_empty
+ end
end
end
end
diff --git a/spec/lib/gitlab/checks/change_access_spec.rb b/spec/lib/gitlab/checks/change_access_spec.rb
index 6c25b7349e1..c2bca816aae 100644
--- a/spec/lib/gitlab/checks/change_access_spec.rb
+++ b/spec/lib/gitlab/checks/change_access_spec.rb
@@ -11,13 +11,13 @@ describe Gitlab::Checks::ChangeAccess do
let(:changes) { { oldrev: oldrev, newrev: newrev, ref: ref } }
let(:protocol) { 'ssh' }
- subject do
+ subject(:change_access) do
described_class.new(
changes,
project: project,
user_access: user_access,
protocol: protocol
- ).exec
+ )
end
before do
@@ -26,7 +26,7 @@ describe Gitlab::Checks::ChangeAccess do
context 'without failed checks' do
it "doesn't raise an error" do
- expect { subject }.not_to raise_error
+ expect { subject.exec }.not_to raise_error
end
end
@@ -34,7 +34,7 @@ describe Gitlab::Checks::ChangeAccess do
it 'raises an error' do
expect(user_access).to receive(:can_do_action?).with(:push_code).and_return(false)
- expect { subject }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to push code to this project.')
+ expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to push code to this project.')
end
end
@@ -45,7 +45,7 @@ describe Gitlab::Checks::ChangeAccess do
allow(user_access).to receive(:can_do_action?).with(:push_code).and_return(true)
expect(user_access).to receive(:can_do_action?).with(:admin_project).and_return(false)
- expect { subject }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to change existing tags on this project.')
+ expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to change existing tags on this project.')
end
context 'with protected tag' do
@@ -61,7 +61,7 @@ describe Gitlab::Checks::ChangeAccess do
let(:newrev) { '0000000000000000000000000000000000000000' }
it 'is prevented' do
- expect { subject }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /cannot be deleted/)
+ expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /cannot be deleted/)
end
end
@@ -70,7 +70,7 @@ describe Gitlab::Checks::ChangeAccess do
let(:newrev) { '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51' }
it 'is prevented' do
- expect { subject }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /cannot be updated/)
+ expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /cannot be updated/)
end
end
end
@@ -81,14 +81,14 @@ describe Gitlab::Checks::ChangeAccess do
let(:ref) { 'refs/tags/v9.1.0' }
it 'prevents creation below access level' do
- expect { subject }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /allowed to create this tag as it is protected/)
+ expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /allowed to create this tag as it is protected/)
end
context 'when user has access' do
let!(:protected_tag) { create(:protected_tag, :developers_can_create, project: project, name: 'v*') }
it 'allows tag creation' do
- expect { subject }.not_to raise_error
+ expect { subject.exec }.not_to raise_error
end
end
end
@@ -101,7 +101,7 @@ describe Gitlab::Checks::ChangeAccess do
let(:ref) { 'refs/heads/master' }
it 'raises an error' do
- expect { subject }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'The default branch of a project cannot be deleted.')
+ expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'The default branch of a project cannot be deleted.')
end
end
@@ -114,7 +114,7 @@ describe Gitlab::Checks::ChangeAccess do
it 'raises an error if the user is not allowed to do forced pushes to protected branches' do
expect(Gitlab::Checks::ForcePush).to receive(:force_push?).and_return(true)
- expect { subject }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to force push code to a protected branch on this project.')
+ expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to force push code to a protected branch on this project.')
end
it 'raises an error if the user is not allowed to merge to protected branches' do
@@ -122,13 +122,13 @@ describe Gitlab::Checks::ChangeAccess do
expect(user_access).to receive(:can_merge_to_branch?).and_return(false)
expect(user_access).to receive(:can_push_to_branch?).and_return(false)
- expect { subject }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to merge code into protected branches on this project.')
+ expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to merge code into protected branches on this project.')
end
it 'raises an error if the user is not allowed to push to protected branches' do
expect(user_access).to receive(:can_push_to_branch?).and_return(false)
- expect { subject }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to push code to protected branches on this project.')
+ expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to push code to protected branches on this project.')
end
context 'branch deletion' do
@@ -137,7 +137,7 @@ describe Gitlab::Checks::ChangeAccess do
context 'if the user is not allowed to delete protected branches' do
it 'raises an error' do
- expect { subject }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to delete protected branches from this project. Only a project master or owner can delete a protected branch.')
+ expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to delete protected branches from this project. Only a project master or owner can delete a protected branch.')
end
end
@@ -150,18 +150,32 @@ describe Gitlab::Checks::ChangeAccess do
let(:protocol) { 'web' }
it 'allows branch deletion' do
- expect { subject }.not_to raise_error
+ expect { subject.exec }.not_to raise_error
end
end
context 'over SSH or HTTP' do
it 'raises an error' do
- expect { subject }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You can only delete protected branches using the web interface.')
+ expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You can only delete protected branches using the web interface.')
end
end
end
end
end
end
+
+ context 'LFS integrity check' do
+ it 'fails if any LFS blobs are missing' do
+ allow_any_instance_of(Gitlab::Checks::LfsIntegrity).to receive(:objects_missing?).and_return(true)
+
+ expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /LFS objects are missing/)
+ end
+
+ it 'succeeds if LFS objects have already been uploaded' do
+ allow_any_instance_of(Gitlab::Checks::LfsIntegrity).to receive(:objects_missing?).and_return(false)
+
+ expect { subject.exec }.not_to raise_error
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/checks/force_push_spec.rb b/spec/lib/gitlab/checks/force_push_spec.rb
index 2c7ef622c51..633e319f46d 100644
--- a/spec/lib/gitlab/checks/force_push_spec.rb
+++ b/spec/lib/gitlab/checks/force_push_spec.rb
@@ -3,7 +3,7 @@ require 'spec_helper'
describe Gitlab::Checks::ForcePush do
let(:project) { create(:project, :repository) }
- context "exit code checking", skip_gitaly_mock: true do
+ context "exit code checking", :skip_gitaly_mock do
it "does not raise a runtime error if the `popen` call to git returns a zero exit code" do
allow_any_instance_of(Gitlab::Git::RevList).to receive(:popen).and_return(['normal output', 0])
diff --git a/spec/lib/gitlab/checks/lfs_integrity_spec.rb b/spec/lib/gitlab/checks/lfs_integrity_spec.rb
new file mode 100644
index 00000000000..17756621221
--- /dev/null
+++ b/spec/lib/gitlab/checks/lfs_integrity_spec.rb
@@ -0,0 +1,74 @@
+require 'spec_helper'
+
+describe Gitlab::Checks::LfsIntegrity do
+ include ProjectForksHelper
+ let(:project) { create(:project, :repository) }
+ let(:newrev) { '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51' }
+
+ subject { described_class.new(project, newrev) }
+
+ describe '#objects_missing?' do
+ let(:blob_object) { project.repository.blob_at_branch('lfs', 'files/lfs/lfs_object.iso') }
+
+ before do
+ allow_any_instance_of(Gitlab::Git::RevList).to receive(:new_objects) do |&lazy_block|
+ lazy_block.call([blob_object.id])
+ end
+ end
+
+ context 'with LFS not enabled' do
+ it 'skips integrity check' do
+ expect_any_instance_of(Gitlab::Git::RevList).not_to receive(:new_objects)
+
+ subject.objects_missing?
+ end
+ end
+
+ context 'with LFS enabled' do
+ before do
+ allow(project).to receive(:lfs_enabled?).and_return(true)
+ end
+
+ context 'deletion' do
+ let(:newrev) { nil }
+
+ it 'skips integrity check' do
+ expect_any_instance_of(Gitlab::Git::RevList).not_to receive(:new_objects)
+
+ expect(subject.objects_missing?).to be_falsey
+ end
+ end
+
+ it 'is true if any LFS blobs are missing' do
+ expect(subject.objects_missing?).to be_truthy
+ end
+
+ it 'is false if LFS objects have already been uploaded' do
+ lfs_object = create(:lfs_object, oid: blob_object.lfs_oid)
+ create(:lfs_objects_project, project: project, lfs_object: lfs_object)
+
+ expect(subject.objects_missing?).to be_falsey
+ end
+ end
+
+ context 'for forked project' do
+ let(:parent_project) { create(:project, :repository) }
+ let(:project) { fork_project(parent_project, nil, repository: true) }
+
+ before do
+ allow(project).to receive(:lfs_enabled?).and_return(true)
+ end
+
+ it 'is true parent project is missing LFS objects' do
+ expect(subject.objects_missing?).to be_truthy
+ end
+
+ it 'is false parent project already conatins LFS objects for the fork' do
+ lfs_object = create(:lfs_object, oid: blob_object.lfs_oid)
+ create(:lfs_objects_project, project: parent_project, lfs_object: lfs_object)
+
+ expect(subject.objects_missing?).to be_falsey
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/build/policy/kubernetes_spec.rb b/spec/lib/gitlab/ci/build/policy/kubernetes_spec.rb
index 15eb01eb472..4884d5f8ba4 100644
--- a/spec/lib/gitlab/ci/build/policy/kubernetes_spec.rb
+++ b/spec/lib/gitlab/ci/build/policy/kubernetes_spec.rb
@@ -4,11 +4,24 @@ describe Gitlab::Ci::Build::Policy::Kubernetes do
let(:pipeline) { create(:ci_pipeline, project: project) }
context 'when kubernetes service is active' do
- set(:project) { create(:kubernetes_project) }
+ shared_examples 'same behavior between KubernetesService and Platform::Kubernetes' do
+ it 'is satisfied by a kubernetes pipeline' do
+ expect(described_class.new('active'))
+ .to be_satisfied_by(pipeline)
+ end
+ end
- it 'is satisfied by a kubernetes pipeline' do
- expect(described_class.new('active'))
- .to be_satisfied_by(pipeline)
+ context 'when user configured kubernetes from Integration > Kubernetes' do
+ let(:project) { create(:kubernetes_project) }
+
+ it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes'
+ end
+
+ context 'when user configured kubernetes from CI/CD > Clusters' do
+ let!(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+ let(:project) { cluster.project }
+
+ it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes'
end
end
diff --git a/spec/lib/gitlab/ci/cron_parser_spec.rb b/spec/lib/gitlab/ci/cron_parser_spec.rb
index 809fda11879..2a3f7807fdb 100644
--- a/spec/lib/gitlab/ci/cron_parser_spec.rb
+++ b/spec/lib/gitlab/ci/cron_parser_spec.rb
@@ -77,8 +77,20 @@ describe Gitlab::Ci::CronParser do
it_behaves_like "returns time in the future"
- it 'converts time in server time zone' do
- expect(subject.hour).to eq(hour_in_utc)
+ context 'when PST (Pacific Standard Time)' do
+ it 'converts time in server time zone' do
+ Timecop.freeze(Time.utc(2017, 1, 1)) do
+ expect(subject.hour).to eq(hour_in_utc)
+ end
+ end
+ end
+
+ context 'when PDT (Pacific Daylight Time)' do
+ it 'converts time in server time zone' do
+ Timecop.freeze(Time.utc(2017, 6, 1)) do
+ expect(subject.hour).to eq(hour_in_utc)
+ end
+ end
end
end
end
@@ -100,8 +112,20 @@ describe Gitlab::Ci::CronParser do
it_behaves_like "returns time in the future"
- it 'converts time in server time zone' do
- expect(subject.hour).to eq(hour_in_utc)
+ context 'when CET (Central European Time)' do
+ it 'converts time in server time zone' do
+ Timecop.freeze(Time.utc(2017, 1, 1)) do
+ expect(subject.hour).to eq(hour_in_utc)
+ end
+ end
+ end
+
+ context 'when CEST (Central European Summer Time)' do
+ it 'converts time in server time zone' do
+ Timecop.freeze(Time.utc(2017, 6, 1)) do
+ expect(subject.hour).to eq(hour_in_utc)
+ end
+ end
end
end
@@ -111,8 +135,20 @@ describe Gitlab::Ci::CronParser do
it_behaves_like "returns time in the future"
- it 'converts time in server time zone' do
- expect(subject.hour).to eq(hour_in_utc)
+ context 'when EST (Eastern Standard Time)' do
+ it 'converts time in server time zone' do
+ Timecop.freeze(Time.utc(2017, 1, 1)) do
+ expect(subject.hour).to eq(hour_in_utc)
+ end
+ end
+ end
+
+ context 'when EDT (Eastern Daylight Time)' do
+ it 'converts time in server time zone' do
+ Timecop.freeze(Time.utc(2017, 6, 1)) do
+ expect(subject.hour).to eq(hour_in_utc)
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/status/build/cancelable_spec.rb b/spec/lib/gitlab/ci/status/build/cancelable_spec.rb
index 5a7a42d84c0..9cdebaa5cf2 100644
--- a/spec/lib/gitlab/ci/status/build/cancelable_spec.rb
+++ b/spec/lib/gitlab/ci/status/build/cancelable_spec.rb
@@ -66,7 +66,7 @@ describe Gitlab::Ci::Status::Build::Cancelable do
end
describe '#action_icon' do
- it { expect(subject.action_icon).to eq 'icon_action_cancel' }
+ it { expect(subject.action_icon).to eq 'cancel' }
end
describe '#action_title' do
diff --git a/spec/lib/gitlab/ci/status/build/factory_spec.rb b/spec/lib/gitlab/ci/status/build/factory_spec.rb
index 8768302eda1..d196bc6a4c2 100644
--- a/spec/lib/gitlab/ci/status/build/factory_spec.rb
+++ b/spec/lib/gitlab/ci/status/build/factory_spec.rb
@@ -30,7 +30,7 @@ describe Gitlab::Ci::Status::Build::Factory do
it 'fabricates status with correct details' do
expect(status.text).to eq 'passed'
- expect(status.icon).to eq 'icon_status_success'
+ expect(status.icon).to eq 'status_success'
expect(status.favicon).to eq 'favicon_status_success'
expect(status.label).to eq 'passed'
expect(status).to have_details
@@ -57,7 +57,7 @@ describe Gitlab::Ci::Status::Build::Factory do
it 'fabricates status with correct details' do
expect(status.text).to eq 'failed'
- expect(status.icon).to eq 'icon_status_failed'
+ expect(status.icon).to eq 'status_failed'
expect(status.favicon).to eq 'favicon_status_failed'
expect(status.label).to eq 'failed'
expect(status).to have_details
@@ -84,7 +84,7 @@ describe Gitlab::Ci::Status::Build::Factory do
it 'fabricates status with correct details' do
expect(status.text).to eq 'failed'
- expect(status.icon).to eq 'icon_status_warning'
+ expect(status.icon).to eq 'status_warning'
expect(status.favicon).to eq 'favicon_status_failed'
expect(status.label).to eq 'failed (allowed to fail)'
expect(status).to have_details
@@ -113,7 +113,7 @@ describe Gitlab::Ci::Status::Build::Factory do
it 'fabricates status with correct details' do
expect(status.text).to eq 'canceled'
- expect(status.icon).to eq 'icon_status_canceled'
+ expect(status.icon).to eq 'status_canceled'
expect(status.favicon).to eq 'favicon_status_canceled'
expect(status.label).to eq 'canceled'
expect(status).to have_details
@@ -139,7 +139,7 @@ describe Gitlab::Ci::Status::Build::Factory do
it 'fabricates status with correct details' do
expect(status.text).to eq 'running'
- expect(status.icon).to eq 'icon_status_running'
+ expect(status.icon).to eq 'status_running'
expect(status.favicon).to eq 'favicon_status_running'
expect(status.label).to eq 'running'
expect(status).to have_details
@@ -165,7 +165,7 @@ describe Gitlab::Ci::Status::Build::Factory do
it 'fabricates status with correct details' do
expect(status.text).to eq 'pending'
- expect(status.icon).to eq 'icon_status_pending'
+ expect(status.icon).to eq 'status_pending'
expect(status.favicon).to eq 'favicon_status_pending'
expect(status.label).to eq 'pending'
expect(status).to have_details
@@ -190,7 +190,7 @@ describe Gitlab::Ci::Status::Build::Factory do
it 'fabricates status with correct details' do
expect(status.text).to eq 'skipped'
- expect(status.icon).to eq 'icon_status_skipped'
+ expect(status.icon).to eq 'status_skipped'
expect(status.favicon).to eq 'favicon_status_skipped'
expect(status.label).to eq 'skipped'
expect(status).to have_details
@@ -219,7 +219,7 @@ describe Gitlab::Ci::Status::Build::Factory do
it 'fabricates status with correct details' do
expect(status.text).to eq 'manual'
expect(status.group).to eq 'manual'
- expect(status.icon).to eq 'icon_status_manual'
+ expect(status.icon).to eq 'status_manual'
expect(status.favicon).to eq 'favicon_status_manual'
expect(status.label).to include 'manual play action'
expect(status).to have_details
@@ -274,7 +274,7 @@ describe Gitlab::Ci::Status::Build::Factory do
it 'fabricates status with correct details' do
expect(status.text).to eq 'manual'
expect(status.group).to eq 'manual'
- expect(status.icon).to eq 'icon_status_manual'
+ expect(status.icon).to eq 'status_manual'
expect(status.favicon).to eq 'favicon_status_manual'
expect(status.label).to eq 'manual stop action (not allowed)'
expect(status).to have_details
diff --git a/spec/lib/gitlab/ci/status/build/failed_allowed_spec.rb b/spec/lib/gitlab/ci/status/build/failed_allowed_spec.rb
index 20f71459738..99a5a7e4aca 100644
--- a/spec/lib/gitlab/ci/status/build/failed_allowed_spec.rb
+++ b/spec/lib/gitlab/ci/status/build/failed_allowed_spec.rb
@@ -18,7 +18,7 @@ describe Gitlab::Ci::Status::Build::FailedAllowed do
describe '#icon' do
it 'returns a warning icon' do
- expect(subject.icon).to eq 'icon_status_warning'
+ expect(subject.icon).to eq 'status_warning'
end
end
diff --git a/spec/lib/gitlab/ci/status/build/play_spec.rb b/spec/lib/gitlab/ci/status/build/play_spec.rb
index 32b2e62e4e0..81d5f553fd1 100644
--- a/spec/lib/gitlab/ci/status/build/play_spec.rb
+++ b/spec/lib/gitlab/ci/status/build/play_spec.rb
@@ -46,7 +46,7 @@ describe Gitlab::Ci::Status::Build::Play do
end
describe '#action_icon' do
- it { expect(subject.action_icon).to eq 'icon_action_play' }
+ it { expect(subject.action_icon).to eq 'play' }
end
describe '#action_title' do
diff --git a/spec/lib/gitlab/ci/status/build/retryable_spec.rb b/spec/lib/gitlab/ci/status/build/retryable_spec.rb
index 21026f2c968..14d42e0d70f 100644
--- a/spec/lib/gitlab/ci/status/build/retryable_spec.rb
+++ b/spec/lib/gitlab/ci/status/build/retryable_spec.rb
@@ -66,7 +66,7 @@ describe Gitlab::Ci::Status::Build::Retryable do
end
describe '#action_icon' do
- it { expect(subject.action_icon).to eq 'icon_action_retry' }
+ it { expect(subject.action_icon).to eq 'retry' }
end
describe '#action_title' do
diff --git a/spec/lib/gitlab/ci/status/build/stop_spec.rb b/spec/lib/gitlab/ci/status/build/stop_spec.rb
index e0425103f41..18e250772f0 100644
--- a/spec/lib/gitlab/ci/status/build/stop_spec.rb
+++ b/spec/lib/gitlab/ci/status/build/stop_spec.rb
@@ -38,7 +38,7 @@ describe Gitlab::Ci::Status::Build::Stop do
end
describe '#action_icon' do
- it { expect(subject.action_icon).to eq 'icon_action_stop' }
+ it { expect(subject.action_icon).to eq 'stop' }
end
describe '#action_title' do
diff --git a/spec/lib/gitlab/ci/status/canceled_spec.rb b/spec/lib/gitlab/ci/status/canceled_spec.rb
index 530639a5897..dc74d7e28c5 100644
--- a/spec/lib/gitlab/ci/status/canceled_spec.rb
+++ b/spec/lib/gitlab/ci/status/canceled_spec.rb
@@ -14,7 +14,7 @@ describe Gitlab::Ci::Status::Canceled do
end
describe '#icon' do
- it { expect(subject.icon).to eq 'icon_status_canceled' }
+ it { expect(subject.icon).to eq 'status_canceled' }
end
describe '#favicon' do
diff --git a/spec/lib/gitlab/ci/status/created_spec.rb b/spec/lib/gitlab/ci/status/created_spec.rb
index aef982e17f1..ce4333f2aca 100644
--- a/spec/lib/gitlab/ci/status/created_spec.rb
+++ b/spec/lib/gitlab/ci/status/created_spec.rb
@@ -14,7 +14,7 @@ describe Gitlab::Ci::Status::Created do
end
describe '#icon' do
- it { expect(subject.icon).to eq 'icon_status_created' }
+ it { expect(subject.icon).to eq 'status_created' }
end
describe '#favicon' do
diff --git a/spec/lib/gitlab/ci/status/failed_spec.rb b/spec/lib/gitlab/ci/status/failed_spec.rb
index 9a25743885c..a4a92117c7f 100644
--- a/spec/lib/gitlab/ci/status/failed_spec.rb
+++ b/spec/lib/gitlab/ci/status/failed_spec.rb
@@ -14,7 +14,7 @@ describe Gitlab::Ci::Status::Failed do
end
describe '#icon' do
- it { expect(subject.icon).to eq 'icon_status_failed' }
+ it { expect(subject.icon).to eq 'status_failed' }
end
describe '#favicon' do
diff --git a/spec/lib/gitlab/ci/status/manual_spec.rb b/spec/lib/gitlab/ci/status/manual_spec.rb
index 6fdc3801d71..0463f2e1aff 100644
--- a/spec/lib/gitlab/ci/status/manual_spec.rb
+++ b/spec/lib/gitlab/ci/status/manual_spec.rb
@@ -14,7 +14,7 @@ describe Gitlab::Ci::Status::Manual do
end
describe '#icon' do
- it { expect(subject.icon).to eq 'icon_status_manual' }
+ it { expect(subject.icon).to eq 'status_manual' }
end
describe '#favicon' do
diff --git a/spec/lib/gitlab/ci/status/pending_spec.rb b/spec/lib/gitlab/ci/status/pending_spec.rb
index ffc53f0506b..0e25358dd8a 100644
--- a/spec/lib/gitlab/ci/status/pending_spec.rb
+++ b/spec/lib/gitlab/ci/status/pending_spec.rb
@@ -14,7 +14,7 @@ describe Gitlab::Ci::Status::Pending do
end
describe '#icon' do
- it { expect(subject.icon).to eq 'icon_status_pending' }
+ it { expect(subject.icon).to eq 'status_pending' }
end
describe '#favicon' do
diff --git a/spec/lib/gitlab/ci/status/running_spec.rb b/spec/lib/gitlab/ci/status/running_spec.rb
index 0babf1fb54e..9c9d431bb5d 100644
--- a/spec/lib/gitlab/ci/status/running_spec.rb
+++ b/spec/lib/gitlab/ci/status/running_spec.rb
@@ -14,7 +14,7 @@ describe Gitlab::Ci::Status::Running do
end
describe '#icon' do
- it { expect(subject.icon).to eq 'icon_status_running' }
+ it { expect(subject.icon).to eq 'status_running' }
end
describe '#favicon' do
diff --git a/spec/lib/gitlab/ci/status/skipped_spec.rb b/spec/lib/gitlab/ci/status/skipped_spec.rb
index 670747c9f0b..63694ca0ea6 100644
--- a/spec/lib/gitlab/ci/status/skipped_spec.rb
+++ b/spec/lib/gitlab/ci/status/skipped_spec.rb
@@ -14,7 +14,7 @@ describe Gitlab::Ci::Status::Skipped do
end
describe '#icon' do
- it { expect(subject.icon).to eq 'icon_status_skipped' }
+ it { expect(subject.icon).to eq 'status_skipped' }
end
describe '#favicon' do
diff --git a/spec/lib/gitlab/ci/status/success_spec.rb b/spec/lib/gitlab/ci/status/success_spec.rb
index ff65b074808..2f67df71c4f 100644
--- a/spec/lib/gitlab/ci/status/success_spec.rb
+++ b/spec/lib/gitlab/ci/status/success_spec.rb
@@ -14,7 +14,7 @@ describe Gitlab::Ci::Status::Success do
end
describe '#icon' do
- it { expect(subject.icon).to eq 'icon_status_success' }
+ it { expect(subject.icon).to eq 'status_success' }
end
describe '#favicon' do
diff --git a/spec/lib/gitlab/ci/status/success_warning_spec.rb b/spec/lib/gitlab/ci/status/success_warning_spec.rb
index 7e2269397c6..4582354e739 100644
--- a/spec/lib/gitlab/ci/status/success_warning_spec.rb
+++ b/spec/lib/gitlab/ci/status/success_warning_spec.rb
@@ -14,7 +14,7 @@ describe Gitlab::Ci::Status::SuccessWarning do
end
describe '#icon' do
- it { expect(subject.icon).to eq 'icon_status_warning' }
+ it { expect(subject.icon).to eq 'status_warning' }
end
describe '#group' do
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index d72f8553f55..98880fe9f28 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -178,15 +178,29 @@ module Gitlab
end
context 'when kubernetes is active' do
- let(:project) { create(:kubernetes_project) }
- let(:pipeline) { create(:ci_empty_pipeline, project: project) }
+ shared_examples 'same behavior between KubernetesService and Platform::Kubernetes' do
+ it 'returns seeds for kubernetes dependent job' do
+ seeds = subject.stage_seeds(pipeline)
- it 'returns seeds for kubernetes dependent job' do
- seeds = subject.stage_seeds(pipeline)
+ expect(seeds.size).to eq 2
+ expect(seeds.first.builds.dig(0, :name)).to eq 'spinach'
+ expect(seeds.second.builds.dig(0, :name)).to eq 'production'
+ end
+ end
- expect(seeds.size).to eq 2
- expect(seeds.first.builds.dig(0, :name)).to eq 'spinach'
- expect(seeds.second.builds.dig(0, :name)).to eq 'production'
+ context 'when user configured kubernetes from Integration > Kubernetes' do
+ let(:project) { create(:kubernetes_project) }
+ let(:pipeline) { create(:ci_empty_pipeline, project: project) }
+
+ it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes'
+ end
+
+ context 'when user configured kubernetes from CI/CD > Clusters' do
+ let!(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+ let(:project) { cluster.project }
+ let(:pipeline) { create(:ci_empty_pipeline, project: project) }
+
+ it_behaves_like 'same behavior between KubernetesService and Platform::Kubernetes'
end
end
diff --git a/spec/lib/gitlab/conflict/file_collection_spec.rb b/spec/lib/gitlab/conflict/file_collection_spec.rb
index a4d7628b03a..5944ce8049a 100644
--- a/spec/lib/gitlab/conflict/file_collection_spec.rb
+++ b/spec/lib/gitlab/conflict/file_collection_spec.rb
@@ -2,7 +2,7 @@ require 'spec_helper'
describe Gitlab::Conflict::FileCollection do
let(:merge_request) { create(:merge_request, source_branch: 'conflict-resolvable', target_branch: 'conflict-start') }
- let(:file_collection) { described_class.read_only(merge_request) }
+ let(:file_collection) { described_class.new(merge_request) }
describe '#files' do
it 'returns an array of Conflict::Files' do
diff --git a/spec/lib/gitlab/conflict/file_spec.rb b/spec/lib/gitlab/conflict/file_spec.rb
index 5356e9742b4..92792144429 100644
--- a/spec/lib/gitlab/conflict/file_spec.rb
+++ b/spec/lib/gitlab/conflict/file_spec.rb
@@ -8,9 +8,10 @@ describe Gitlab::Conflict::File do
let(:our_commit) { rugged.branches['conflict-resolvable'].target }
let(:merge_request) { create(:merge_request, source_branch: 'conflict-resolvable', target_branch: 'conflict-start', source_project: project) }
let(:index) { rugged.merge_commits(our_commit, their_commit) }
- let(:conflict) { index.conflicts.last }
- let(:merge_file_result) { index.merge_file('files/ruby/regex.rb') }
- let(:conflict_file) { described_class.new(merge_file_result, conflict, merge_request: merge_request) }
+ let(:rugged_conflict) { index.conflicts.last }
+ let(:raw_conflict_content) { index.merge_file('files/ruby/regex.rb')[:data] }
+ let(:raw_conflict_file) { Gitlab::Git::Conflict::File.new(repository, our_commit.oid, rugged_conflict, raw_conflict_content) }
+ let(:conflict_file) { described_class.new(raw_conflict_file, merge_request: merge_request) }
describe '#resolve_lines' do
let(:section_keys) { conflict_file.sections.map { |section| section[:id] }.compact }
@@ -48,18 +49,18 @@ describe Gitlab::Conflict::File do
end
end
- it 'raises MissingResolution when passed a hash without resolutions for all sections' do
+ it 'raises ResolutionError when passed a hash without resolutions for all sections' do
empty_hash = section_keys.map { |key| [key, nil] }.to_h
invalid_hash = section_keys.map { |key| [key, 'invalid'] }.to_h
expect { conflict_file.resolve_lines({}) }
- .to raise_error(Gitlab::Conflict::File::MissingResolution)
+ .to raise_error(Gitlab::Git::Conflict::Resolver::ResolutionError)
expect { conflict_file.resolve_lines(empty_hash) }
- .to raise_error(Gitlab::Conflict::File::MissingResolution)
+ .to raise_error(Gitlab::Git::Conflict::Resolver::ResolutionError)
expect { conflict_file.resolve_lines(invalid_hash) }
- .to raise_error(Gitlab::Conflict::File::MissingResolution)
+ .to raise_error(Gitlab::Git::Conflict::Resolver::ResolutionError)
end
end
@@ -83,6 +84,13 @@ describe Gitlab::Conflict::File do
expect(line.text).to eq(html_to_text(line.rich_text))
end
end
+
+ # This spec will break if Rouge's highlighting changes, but we need to
+ # ensure that the lines are actually highlighted.
+ it 'highlights the lines correctly' do
+ expect(conflict_file.lines.first.rich_text)
+ .to eq("<span id=\"LC1\" class=\"line\" lang=\"ruby\"><span class=\"k\">module</span> <span class=\"nn\">Gitlab</span></span>\n")
+ end
end
describe '#sections' do
@@ -144,7 +152,7 @@ describe Gitlab::Conflict::File do
end
context 'with an example file' do
- let(:file) do
+ let(:raw_conflict_content) do
<<FILE
# Ensure there is no match line header here
def username_regexp
@@ -220,7 +228,6 @@ end
FILE
end
- let(:conflict_file) { described_class.new({ data: file }, conflict, merge_request: merge_request) }
let(:sections) { conflict_file.sections }
it 'sets the correct match line headers' do
diff --git a/spec/lib/gitlab/current_settings_spec.rb b/spec/lib/gitlab/current_settings_spec.rb
index d57ffcae8e1..492659a82b0 100644
--- a/spec/lib/gitlab/current_settings_spec.rb
+++ b/spec/lib/gitlab/current_settings_spec.rb
@@ -21,7 +21,7 @@ describe Gitlab::CurrentSettings do
it 'falls back to DB if Redis returns an empty value' do
expect(ApplicationSetting).to receive(:cached).and_return(nil)
- expect(ApplicationSetting).to receive(:last).and_call_original
+ expect(ApplicationSetting).to receive(:last).and_call_original.twice
expect(current_application_settings).to be_a(ApplicationSetting)
end
diff --git a/spec/lib/gitlab/database/grant_spec.rb b/spec/lib/gitlab/database/grant_spec.rb
index 651da3e8476..5ebf3f399b6 100644
--- a/spec/lib/gitlab/database/grant_spec.rb
+++ b/spec/lib/gitlab/database/grant_spec.rb
@@ -1,16 +1,6 @@
require 'spec_helper'
describe Gitlab::Database::Grant do
- describe '.scope_to_current_user' do
- it 'scopes the relation to the current user' do
- user = Gitlab::Database.username
- column = Gitlab::Database.postgresql? ? :grantee : :User
- names = described_class.scope_to_current_user.pluck(column).uniq
-
- expect(names).to eq([user])
- end
- end
-
describe '.create_and_execute_trigger' do
it 'returns true when the user can create and execute a trigger' do
# We assume the DB/user is set up correctly so that triggers can be
@@ -18,13 +8,11 @@ describe Gitlab::Database::Grant do
expect(described_class.create_and_execute_trigger?('users')).to eq(true)
end
- it 'returns false when the user can not create and/or execute a trigger' do
- allow(described_class).to receive(:scope_to_current_user)
- .and_return(described_class.none)
-
- result = described_class.create_and_execute_trigger?('kittens')
-
- expect(result).to eq(false)
+ it 'returns false when the user can not create and/or execute a trigger', :postgresql do
+ # In case of MySQL the user may have SUPER permissions, making it
+ # impossible to have `false` returned when running tests; hence we only
+ # run these tests on PostgreSQL.
+ expect(described_class.create_and_execute_trigger?('foo')).to eq(false)
end
end
end
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb
index 90aa4f63dd5..596cc435bd9 100644
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb
+++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb
@@ -229,7 +229,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameBase, :trunca
end
end
- describe '#track_rename', redis: true do
+ describe '#track_rename', :redis do
it 'tracks a rename in redis' do
key = 'rename:FakeRenameReservedPathMigrationV1:namespace'
@@ -246,7 +246,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameBase, :trunca
end
end
- describe '#reverts_for_type', redis: true do
+ describe '#reverts_for_type', :redis do
it 'yields for each tracked rename' do
subject.track_rename('project', 'old_path', 'new_path')
subject.track_rename('project', 'old_path2', 'new_path2')
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb
index 32ac0b88a9b..1143182531f 100644
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb
+++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb
@@ -241,7 +241,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, :
end
end
- describe '#revert_renames', redis: true do
+ describe '#revert_renames', :redis do
it 'renames the routes back to the previous values' do
project = create(:project, :repository, path: 'a-project', namespace: namespace)
subject.rename_namespace(namespace)
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
index 595e06a9748..e850b5cd6a4 100644
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
+++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
@@ -87,6 +87,14 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProjects, :tr
subject.move_project_folders(project, 'known-parent/the-path', 'known-parent/the-path0')
end
+ it 'does not move the repositories when hashed storage is enabled' do
+ project.update!(storage_version: Project::HASHED_STORAGE_FEATURES[:repository])
+
+ expect(subject).not_to receive(:move_repository)
+
+ subject.move_project_folders(project, 'known-parent/the-path', 'known-parent/the-path0')
+ end
+
it 'moves uploads' do
expect(subject).to receive(:move_uploads)
.with('known-parent/the-path', 'known-parent/the-path0')
@@ -94,6 +102,14 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProjects, :tr
subject.move_project_folders(project, 'known-parent/the-path', 'known-parent/the-path0')
end
+ it 'does not move uploads when hashed storage is enabled for attachments' do
+ project.update!(storage_version: Project::HASHED_STORAGE_FEATURES[:attachments])
+
+ expect(subject).not_to receive(:move_uploads)
+
+ subject.move_project_folders(project, 'known-parent/the-path', 'known-parent/the-path0')
+ end
+
it 'moves pages' do
expect(subject).to receive(:move_pages)
.with('known-parent/the-path', 'known-parent/the-path0')
@@ -115,7 +131,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProjects, :tr
end
end
- describe '#revert_renames', redis: true do
+ describe '#revert_renames', :redis do
it 'renames the routes back to the previous values' do
subject.rename_project(project)
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index 5fa94999d25..fcddfad3f9f 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -202,6 +202,26 @@ describe Gitlab::Database do
it 'handles non-UTF-8 data' do
expect { described_class.bulk_insert('test', [{ a: "\255" }]) }.not_to raise_error
end
+
+ context 'when using PostgreSQL' do
+ before do
+ allow(described_class).to receive(:mysql?).and_return(false)
+ end
+
+ it 'allows the returning of the IDs of the inserted rows' do
+ result = double(:result, values: [['10']])
+
+ expect(connection)
+ .to receive(:execute)
+ .with(/RETURNING id/)
+ .and_return(result)
+
+ ids = described_class
+ .bulk_insert('test', [{ number: 10 }], return_ids: true)
+
+ expect(ids).to eq([10])
+ end
+ end
end
describe '.create_connection_pool' do
@@ -256,4 +276,26 @@ describe Gitlab::Database do
expect(described_class.false_value).to eq 0
end
end
+
+ describe '#sanitize_timestamp' do
+ let(:max_timestamp) { Time.at((1 << 31) - 1) }
+
+ subject { described_class.sanitize_timestamp(timestamp) }
+
+ context 'with a timestamp smaller than MAX_TIMESTAMP_VALUE' do
+ let(:timestamp) { max_timestamp - 10.years }
+
+ it 'returns the given timestamp' do
+ expect(subject).to eq(timestamp)
+ end
+ end
+
+ context 'with a timestamp larger than MAX_TIMESTAMP_VALUE' do
+ let(:timestamp) { max_timestamp + 1.second }
+
+ it 'returns MAX_TIMESTAMP_VALUE' do
+ expect(subject).to eq(max_timestamp)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/diff/file_spec.rb b/spec/lib/gitlab/diff/file_spec.rb
index c91895cedc3..ff9acfd08b9 100644
--- a/spec/lib/gitlab/diff/file_spec.rb
+++ b/spec/lib/gitlab/diff/file_spec.rb
@@ -116,12 +116,8 @@ describe Gitlab::Diff::File do
end
context 'when renamed' do
- let(:commit) { project.commit('6907208d755b60ebeacb2e9dfea74c92c3449a1f') }
- let(:diff_file) { commit.diffs.diff_file_with_new_path('files/js/commit.coffee') }
-
- before do
- allow(diff_file.new_blob).to receive(:id).and_return(diff_file.old_blob.id)
- end
+ let(:commit) { project.commit('94bb47ca1297b7b3731ff2a36923640991e9236f') }
+ let(:diff_file) { commit.diffs.diff_file_with_new_path('CHANGELOG.md') }
it 'returns false' do
expect(diff_file.content_changed?).to be_falsey
diff --git a/spec/lib/gitlab/diff/parser_spec.rb b/spec/lib/gitlab/diff/parser_spec.rb
index 8af49ed50ff..80c8c189665 100644
--- a/spec/lib/gitlab/diff/parser_spec.rb
+++ b/spec/lib/gitlab/diff/parser_spec.rb
@@ -143,4 +143,21 @@ eos
it { expect(parser.parse([])).to eq([]) }
it { expect(parser.parse(nil)).to eq([]) }
end
+
+ describe 'tolerates special diff markers in a content' do
+ it "counts lines correctly" do
+ diff = <<~END
+ --- a/test
+ +++ b/test
+ @@ -1,2 +1,2 @@
+ +ipsum
+ +++ b
+ -ipsum
+ END
+
+ lines = parser.parse(diff.lines).to_a
+
+ expect(lines.size).to eq(3)
+ end
+ end
end
diff --git a/spec/lib/gitlab/diff/position_spec.rb b/spec/lib/gitlab/diff/position_spec.rb
index 9bf54fdecc4..677eb373d22 100644
--- a/spec/lib/gitlab/diff/position_spec.rb
+++ b/spec/lib/gitlab/diff/position_spec.rb
@@ -40,7 +40,7 @@ describe Gitlab::Diff::Position do
describe "#line_code" do
it "returns the correct line code" do
- line_code = Gitlab::Diff::LineCode.generate(subject.file_path, subject.new_line, 0)
+ line_code = Gitlab::Git.diff_line_code(subject.file_path, subject.new_line, 0)
expect(subject.line_code(project.repository)).to eq(line_code)
end
@@ -108,7 +108,7 @@ describe Gitlab::Diff::Position do
describe "#line_code" do
it "returns the correct line code" do
- line_code = Gitlab::Diff::LineCode.generate(subject.file_path, subject.new_line, 15)
+ line_code = Gitlab::Git.diff_line_code(subject.file_path, subject.new_line, 15)
expect(subject.line_code(project.repository)).to eq(line_code)
end
@@ -149,7 +149,7 @@ describe Gitlab::Diff::Position do
describe "#line_code" do
it "returns the correct line code" do
- line_code = Gitlab::Diff::LineCode.generate(subject.file_path, subject.new_line, subject.old_line)
+ line_code = Gitlab::Git.diff_line_code(subject.file_path, subject.new_line, subject.old_line)
expect(subject.line_code(project.repository)).to eq(line_code)
end
@@ -189,7 +189,7 @@ describe Gitlab::Diff::Position do
describe "#line_code" do
it "returns the correct line code" do
- line_code = Gitlab::Diff::LineCode.generate(subject.file_path, 13, subject.old_line)
+ line_code = Gitlab::Git.diff_line_code(subject.file_path, 13, subject.old_line)
expect(subject.line_code(project.repository)).to eq(line_code)
end
@@ -233,7 +233,7 @@ describe Gitlab::Diff::Position do
describe "#line_code" do
it "returns the correct line code" do
- line_code = Gitlab::Diff::LineCode.generate(subject.file_path, subject.new_line, 5)
+ line_code = Gitlab::Git.diff_line_code(subject.file_path, subject.new_line, 5)
expect(subject.line_code(project.repository)).to eq(line_code)
end
@@ -274,7 +274,7 @@ describe Gitlab::Diff::Position do
describe "#line_code" do
it "returns the correct line code" do
- line_code = Gitlab::Diff::LineCode.generate(subject.file_path, subject.new_line, subject.old_line)
+ line_code = Gitlab::Git.diff_line_code(subject.file_path, subject.new_line, subject.old_line)
expect(subject.line_code(project.repository)).to eq(line_code)
end
@@ -314,7 +314,7 @@ describe Gitlab::Diff::Position do
describe "#line_code" do
it "returns the correct line code" do
- line_code = Gitlab::Diff::LineCode.generate(subject.file_path, 4, subject.old_line)
+ line_code = Gitlab::Git.diff_line_code(subject.file_path, 4, subject.old_line)
expect(subject.line_code(project.repository)).to eq(line_code)
end
@@ -357,13 +357,50 @@ describe Gitlab::Diff::Position do
describe "#line_code" do
it "returns the correct line code" do
- line_code = Gitlab::Diff::LineCode.generate(subject.file_path, 0, subject.old_line)
+ line_code = Gitlab::Git.diff_line_code(subject.file_path, 0, subject.old_line)
expect(subject.line_code(project.repository)).to eq(line_code)
end
end
end
+ describe "position for a missing ref" do
+ let(:diff_refs) do
+ Gitlab::Diff::DiffRefs.new(
+ base_sha: "not_existing_sha",
+ head_sha: "existing_sha"
+ )
+ end
+
+ subject do
+ described_class.new(
+ old_path: "files/ruby/feature.rb",
+ new_path: "files/ruby/feature.rb",
+ old_line: 3,
+ new_line: nil,
+ diff_refs: diff_refs
+ )
+ end
+
+ describe "#diff_file" do
+ it "does not raise exception" do
+ expect { subject.diff_file(project.repository) }.not_to raise_error
+ end
+ end
+
+ describe "#diff_line" do
+ it "does not raise exception" do
+ expect { subject.diff_line(project.repository) }.not_to raise_error
+ end
+ end
+
+ describe "#line_code" do
+ it "does not raise exception" do
+ expect { subject.line_code(project.repository) }.not_to raise_error
+ end
+ end
+ end
+
describe "position for a file in the initial commit" do
let(:commit) { project.commit("1a0b36b3cdad1d2ee32457c102a8c0b7056fa863") }
@@ -399,7 +436,7 @@ describe Gitlab::Diff::Position do
describe "#line_code" do
it "returns the correct line code" do
- line_code = Gitlab::Diff::LineCode.generate(subject.file_path, subject.new_line, 0)
+ line_code = Gitlab::Git.diff_line_code(subject.file_path, subject.new_line, 0)
expect(subject.line_code(project.repository)).to eq(line_code)
end
@@ -447,7 +484,7 @@ describe Gitlab::Diff::Position do
describe "#line_code" do
it "returns the correct line code" do
- line_code = Gitlab::Diff::LineCode.generate(subject.file_path, 0, subject.old_line)
+ line_code = Gitlab::Git.diff_line_code(subject.file_path, 0, subject.old_line)
expect(subject.line_code(project.repository)).to eq(line_code)
end
diff --git a/spec/lib/gitlab/diff/position_tracer_spec.rb b/spec/lib/gitlab/diff/position_tracer_spec.rb
index e5138705443..ddc4f6c5b5c 100644
--- a/spec/lib/gitlab/diff/position_tracer_spec.rb
+++ b/spec/lib/gitlab/diff/position_tracer_spec.rb
@@ -1771,9 +1771,9 @@ describe Gitlab::Diff::PositionTracer do
describe "merge of target branch" do
let(:merge_commit) do
- update_file_again_commit
+ second_create_file_commit
- merge_request = create(:merge_request, source_branch: second_create_file_commit.sha, target_branch: branch_name, source_project: project)
+ merge_request = create(:merge_request, source_branch: second_branch_name, target_branch: branch_name, source_project: project)
repository.merge(current_user, merge_request.diff_head_sha, merge_request, "Merge branches")
diff --git a/spec/lib/gitlab/encoding_helper_spec.rb b/spec/lib/gitlab/encoding_helper_spec.rb
index 8b14b227e65..f6e5c55240f 100644
--- a/spec/lib/gitlab/encoding_helper_spec.rb
+++ b/spec/lib/gitlab/encoding_helper_spec.rb
@@ -6,6 +6,10 @@ describe Gitlab::EncodingHelper do
describe '#encode!' do
[
+ ["nil", nil, nil],
+ ["empty string", "".encode("ASCII-8BIT"), "".encode("UTF-8")],
+ ["invalid utf-8 encoded string", "my bad string\xE5".force_encoding("UTF-8"), "my bad string"],
+ ["frozen non-ascii string", "é".force_encoding("ASCII-8BIT").freeze, "é".encode("UTF-8")],
[
'leaves ascii only string as is',
'ascii only string',
@@ -81,6 +85,9 @@ describe Gitlab::EncodingHelper do
describe '#encode_utf8' do
[
+ ["nil", nil, nil],
+ ["empty string", "".encode("ASCII-8BIT"), "".encode("UTF-8")],
+ ["invalid utf-8 encoded string", "my bad string\xE5".force_encoding("UTF-8"), "my bad stringå"],
[
"encodes valid utf8 encoded string to utf8",
"λ, λ, λ".encode("UTF-8"),
@@ -95,12 +102,18 @@ describe Gitlab::EncodingHelper do
"encodes valid ISO-8859-1 encoded string to utf8",
"Rüby ist eine Programmiersprache. Wir verlängern den text damit ICU die Sprache erkennen kann.".encode("ISO-8859-1", "UTF-8"),
"Rüby ist eine Programmiersprache. Wir verlängern den text damit ICU die Sprache erkennen kann.".encode("UTF-8")
+ ],
+ [
+ # Test case from https://gitlab.com/gitlab-org/gitlab-ce/issues/39227
+ "Equifax branch name",
+ "refs/heads/Equifax".encode("UTF-8"),
+ "refs/heads/Equifax".encode("UTF-8")
]
].each do |description, test_string, xpect|
it description do
- r = ext_class.encode_utf8(test_string.force_encoding('UTF-8'))
+ r = ext_class.encode_utf8(test_string)
expect(r).to eq(xpect)
- expect(r.encoding.name).to eq('UTF-8')
+ expect(r.encoding.name).to eq('UTF-8') if xpect
end
end
diff --git a/spec/lib/gitlab/fake_application_settings_spec.rb b/spec/lib/gitlab/fake_application_settings_spec.rb
index 34322c2a693..af12e13d36d 100644
--- a/spec/lib/gitlab/fake_application_settings_spec.rb
+++ b/spec/lib/gitlab/fake_application_settings_spec.rb
@@ -1,25 +1,25 @@
require 'spec_helper'
describe Gitlab::FakeApplicationSettings do
- let(:defaults) { { password_authentication_enabled: false, foobar: 'asdf', signup_enabled: true, 'test?' => 123 } }
+ let(:defaults) { { password_authentication_enabled_for_web: false, foobar: 'asdf', signup_enabled: true, 'test?' => 123 } }
subject { described_class.new(defaults) }
it 'wraps OpenStruct variables properly' do
- expect(subject.password_authentication_enabled).to be_falsey
+ expect(subject.password_authentication_enabled_for_web).to be_falsey
expect(subject.signup_enabled).to be_truthy
expect(subject.foobar).to eq('asdf')
end
it 'defines predicate methods' do
- expect(subject.password_authentication_enabled?).to be_falsey
+ expect(subject.password_authentication_enabled_for_web?).to be_falsey
expect(subject.signup_enabled?).to be_truthy
end
it 'predicate method changes when value is updated' do
- subject.password_authentication_enabled = true
+ subject.password_authentication_enabled_for_web = true
- expect(subject.password_authentication_enabled?).to be_truthy
+ expect(subject.password_authentication_enabled_for_web?).to be_truthy
end
it 'does not define a predicate method' do
diff --git a/spec/lib/gitlab/file_detector_spec.rb b/spec/lib/gitlab/file_detector_spec.rb
index 695fd6f8573..8e524f9b05a 100644
--- a/spec/lib/gitlab/file_detector_spec.rb
+++ b/spec/lib/gitlab/file_detector_spec.rb
@@ -18,6 +18,10 @@ describe Gitlab::FileDetector do
expect(described_class.type_of('README.md')).to eq(:readme)
end
+ it 'returns nil for a README file in a directory' do
+ expect(described_class.type_of('foo/README.md')).to be_nil
+ end
+
it 'returns the type of a changelog file' do
%w(CHANGELOG HISTORY CHANGES NEWS).each do |file|
expect(described_class.type_of(file)).to eq(:changelog)
@@ -52,6 +56,14 @@ describe Gitlab::FileDetector do
end
end
+ it 'returns the type of an issue template' do
+ expect(described_class.type_of('.gitlab/issue_templates/foo.md')).to eq(:issue_template)
+ end
+
+ it 'returns the type of a merge request template' do
+ expect(described_class.type_of('.gitlab/merge_request_templates/foo.md')).to eq(:merge_request_template)
+ end
+
it 'returns nil for an unknown file' do
expect(described_class.type_of('foo.txt')).to be_nil
end
diff --git a/spec/lib/gitlab/git/blame_spec.rb b/spec/lib/gitlab/git/blame_spec.rb
index 465c2012b05..793228701cf 100644
--- a/spec/lib/gitlab/git/blame_spec.rb
+++ b/spec/lib/gitlab/git/blame_spec.rb
@@ -73,7 +73,7 @@ describe Gitlab::Git::Blame, seed_helper: true do
it_behaves_like 'blaming a file'
end
- context 'when Gitaly blame feature is disabled', skip_gitaly_mock: true do
+ context 'when Gitaly blame feature is disabled', :skip_gitaly_mock do
it_behaves_like 'blaming a file'
end
end
diff --git a/spec/lib/gitlab/git/blob_spec.rb b/spec/lib/gitlab/git/blob_spec.rb
index f3945e748ab..c04a9688503 100644
--- a/spec/lib/gitlab/git/blob_spec.rb
+++ b/spec/lib/gitlab/git/blob_spec.rb
@@ -112,7 +112,7 @@ describe Gitlab::Git::Blob, seed_helper: true do
it_behaves_like 'finding blobs'
end
- context 'when project_raw_show Gitaly feature is disabled', skip_gitaly_mock: true do
+ context 'when project_raw_show Gitaly feature is disabled', :skip_gitaly_mock do
it_behaves_like 'finding blobs'
end
end
@@ -143,6 +143,16 @@ describe Gitlab::Git::Blob, seed_helper: true do
expect(blob.loaded_size).to eq(blob_size)
end
end
+
+ context 'when sha references a tree' do
+ it 'returns nil' do
+ tree = Gitlab::Git::Commit.find(repository, 'master').tree
+
+ blob = Gitlab::Git::Blob.raw(repository, tree.oid)
+
+ expect(blob).to be_nil
+ end
+ end
end
describe '.raw' do
@@ -150,7 +160,7 @@ describe Gitlab::Git::Blob, seed_helper: true do
it_behaves_like 'finding blobs by ID'
end
- context 'when the blob_raw Gitaly feature is disabled', skip_gitaly_mock: true do
+ context 'when the blob_raw Gitaly feature is disabled', :skip_gitaly_mock do
it_behaves_like 'finding blobs by ID'
end
end
@@ -226,6 +236,51 @@ describe Gitlab::Git::Blob, seed_helper: true do
end
end
+ describe '.batch_lfs_pointers' do
+ let(:tree_object) { Gitlab::Git::Commit.find(repository, 'master').tree }
+
+ let(:non_lfs_blob) do
+ Gitlab::Git::Blob.find(
+ repository,
+ 'master',
+ 'README.md'
+ )
+ end
+
+ let(:lfs_blob) do
+ Gitlab::Git::Blob.find(
+ repository,
+ '33bcff41c232a11727ac6d660bd4b0c2ba86d63d',
+ 'files/lfs/image.jpg'
+ )
+ end
+
+ it 'returns a list of Gitlab::Git::Blob' do
+ blobs = described_class.batch_lfs_pointers(repository, [lfs_blob.id])
+
+ expect(blobs.count).to eq(1)
+ expect(blobs).to all( be_a(Gitlab::Git::Blob) )
+ end
+
+ it 'silently ignores tree objects' do
+ blobs = described_class.batch_lfs_pointers(repository, [tree_object.oid])
+
+ expect(blobs).to eq([])
+ end
+
+ it 'silently ignores non lfs objects' do
+ blobs = described_class.batch_lfs_pointers(repository, [non_lfs_blob.id])
+
+ expect(blobs).to eq([])
+ end
+
+ it 'avoids loading large blobs into memory' do
+ expect(repository).not_to receive(:lookup)
+
+ described_class.batch_lfs_pointers(repository, [non_lfs_blob.id])
+ end
+ end
+
describe 'encoding' do
context 'file with russian text' do
let(:blob) { Gitlab::Git::Blob.find(repository, SeedRepo::Commit::ID, "encoding/russian.rb") }
diff --git a/spec/lib/gitlab/git/branch_spec.rb b/spec/lib/gitlab/git/branch_spec.rb
index 318a7b7a332..708870060e7 100644
--- a/spec/lib/gitlab/git/branch_spec.rb
+++ b/spec/lib/gitlab/git/branch_spec.rb
@@ -7,6 +7,38 @@ describe Gitlab::Git::Branch, seed_helper: true do
it { is_expected.to be_kind_of Array }
+ describe '.find' do
+ subject { described_class.find(repository, branch) }
+
+ before do
+ allow(repository).to receive(:find_branch).with(branch)
+ .and_call_original
+ end
+
+ context 'when finding branch via branch name' do
+ let(:branch) { 'master' }
+
+ it 'returns a branch object' do
+ expect(subject).to be_a(described_class)
+ expect(subject.name).to eq(branch)
+
+ expect(repository).to have_received(:find_branch).with(branch)
+ end
+ end
+
+ context 'when the branch is already a branch' do
+ let(:commit) { repository.commit('master') }
+ let(:branch) { described_class.new(repository, 'master', commit.sha, commit) }
+
+ it 'returns a branch object' do
+ expect(subject).to be_a(described_class)
+ expect(subject).to eq(branch)
+
+ expect(repository).not_to have_received(:find_branch).with(branch)
+ end
+ end
+ end
+
describe '#size' do
subject { super().size }
it { is_expected.to eq(SeedRepo::Repo::BRANCHES.size) }
diff --git a/spec/lib/gitlab/git/commit_spec.rb b/spec/lib/gitlab/git/commit_spec.rb
index 3815055139a..9f4e3c49adc 100644
--- a/spec/lib/gitlab/git/commit_spec.rb
+++ b/spec/lib/gitlab/git/commit_spec.rb
@@ -261,7 +261,7 @@ describe Gitlab::Git::Commit, seed_helper: true do
it_should_behave_like '.where'
end
- describe '.where without gitaly', skip_gitaly_mock: true do
+ describe '.where without gitaly', :skip_gitaly_mock do
it_should_behave_like '.where'
end
@@ -336,7 +336,7 @@ describe Gitlab::Git::Commit, seed_helper: true do
it_behaves_like 'finding all commits'
end
- context 'when Gitaly find_all_commits feature is disabled', skip_gitaly_mock: true do
+ context 'when Gitaly find_all_commits feature is disabled', :skip_gitaly_mock do
it_behaves_like 'finding all commits'
context 'while applying a sort order based on the `order` option' do
@@ -405,7 +405,7 @@ describe Gitlab::Git::Commit, seed_helper: true do
it_should_behave_like '#stats'
end
- describe '#stats with gitaly disabled', skip_gitaly_mock: true do
+ describe '#stats with gitaly disabled', :skip_gitaly_mock do
it_should_behave_like '#stats'
end
diff --git a/spec/lib/gitlab/conflict/parser_spec.rb b/spec/lib/gitlab/git/conflict/parser_spec.rb
index fce606a2bb5..7b035a381f1 100644
--- a/spec/lib/gitlab/conflict/parser_spec.rb
+++ b/spec/lib/gitlab/git/conflict/parser_spec.rb
@@ -1,11 +1,9 @@
require 'spec_helper'
-describe Gitlab::Conflict::Parser do
- let(:parser) { described_class.new }
-
- describe '#parse' do
+describe Gitlab::Git::Conflict::Parser do
+ describe '.parse' do
def parse_text(text)
- parser.parse(text, our_path: 'README.md', their_path: 'README.md')
+ described_class.parse(text, our_path: 'README.md', their_path: 'README.md')
end
context 'when the file has valid conflicts' do
@@ -87,33 +85,37 @@ CONFLICT
end
let(:lines) do
- parser.parse(text, our_path: 'files/ruby/regex.rb', their_path: 'files/ruby/regex.rb')
+ described_class.parse(text, our_path: 'files/ruby/regex.rb', their_path: 'files/ruby/regex.rb')
+ end
+ let(:old_line_numbers) do
+ lines.select { |line| line[:type] != 'new' }.map { |line| line[:line_old] }
end
+ let(:new_line_numbers) do
+ lines.select { |line| line[:type] != 'old' }.map { |line| line[:line_new] }
+ end
+ let(:line_indexes) { lines.map { |line| line[:line_obj_index] } }
it 'sets our lines as new lines' do
- expect(lines[8..13]).to all(have_attributes(type: 'new'))
- expect(lines[26..27]).to all(have_attributes(type: 'new'))
- expect(lines[56..57]).to all(have_attributes(type: 'new'))
+ expect(lines[8..13]).to all(include(type: 'new'))
+ expect(lines[26..27]).to all(include(type: 'new'))
+ expect(lines[56..57]).to all(include(type: 'new'))
end
it 'sets their lines as old lines' do
- expect(lines[14..19]).to all(have_attributes(type: 'old'))
- expect(lines[28..29]).to all(have_attributes(type: 'old'))
- expect(lines[58..59]).to all(have_attributes(type: 'old'))
+ expect(lines[14..19]).to all(include(type: 'old'))
+ expect(lines[28..29]).to all(include(type: 'old'))
+ expect(lines[58..59]).to all(include(type: 'old'))
end
it 'sets non-conflicted lines as both' do
- expect(lines[0..7]).to all(have_attributes(type: nil))
- expect(lines[20..25]).to all(have_attributes(type: nil))
- expect(lines[30..55]).to all(have_attributes(type: nil))
- expect(lines[60..62]).to all(have_attributes(type: nil))
+ expect(lines[0..7]).to all(include(type: nil))
+ expect(lines[20..25]).to all(include(type: nil))
+ expect(lines[30..55]).to all(include(type: nil))
+ expect(lines[60..62]).to all(include(type: nil))
end
- it 'sets consecutive line numbers for index, old_pos, and new_pos' do
- old_line_numbers = lines.select { |line| line.type != 'new' }.map(&:old_pos)
- new_line_numbers = lines.select { |line| line.type != 'old' }.map(&:new_pos)
-
- expect(lines.map(&:index)).to eq(0.upto(62).to_a)
+ it 'sets consecutive line numbers for line_obj_index, line_old, and line_new' do
+ expect(line_indexes).to eq(0.upto(62).to_a)
expect(old_line_numbers).to eq(1.upto(53).to_a)
expect(new_line_numbers).to eq(1.upto(53).to_a)
end
@@ -123,12 +125,12 @@ CONFLICT
context 'when there is a non-start delimiter first' do
it 'raises UnexpectedDelimiter when there is a middle delimiter first' do
expect { parse_text('=======') }
- .to raise_error(Gitlab::Conflict::Parser::UnexpectedDelimiter)
+ .to raise_error(Gitlab::Git::Conflict::Parser::UnexpectedDelimiter)
end
it 'raises UnexpectedDelimiter when there is an end delimiter first' do
expect { parse_text('>>>>>>> README.md') }
- .to raise_error(Gitlab::Conflict::Parser::UnexpectedDelimiter)
+ .to raise_error(Gitlab::Git::Conflict::Parser::UnexpectedDelimiter)
end
it 'does not raise when there is an end delimiter for a different path first' do
@@ -143,12 +145,12 @@ CONFLICT
it 'raises UnexpectedDelimiter when it is followed by an end delimiter' do
expect { parse_text(start_text + '>>>>>>> README.md' + end_text) }
- .to raise_error(Gitlab::Conflict::Parser::UnexpectedDelimiter)
+ .to raise_error(Gitlab::Git::Conflict::Parser::UnexpectedDelimiter)
end
it 'raises UnexpectedDelimiter when it is followed by another start delimiter' do
expect { parse_text(start_text + start_text + end_text) }
- .to raise_error(Gitlab::Conflict::Parser::UnexpectedDelimiter)
+ .to raise_error(Gitlab::Git::Conflict::Parser::UnexpectedDelimiter)
end
it 'does not raise when it is followed by a start delimiter for a different path' do
@@ -163,12 +165,12 @@ CONFLICT
it 'raises UnexpectedDelimiter when it is followed by another middle delimiter' do
expect { parse_text(start_text + '=======' + end_text) }
- .to raise_error(Gitlab::Conflict::Parser::UnexpectedDelimiter)
+ .to raise_error(Gitlab::Git::Conflict::Parser::UnexpectedDelimiter)
end
it 'raises UnexpectedDelimiter when it is followed by a start delimiter' do
expect { parse_text(start_text + start_text + end_text) }
- .to raise_error(Gitlab::Conflict::Parser::UnexpectedDelimiter)
+ .to raise_error(Gitlab::Git::Conflict::Parser::UnexpectedDelimiter)
end
it 'does not raise when it is followed by a start delimiter for another path' do
@@ -181,25 +183,25 @@ CONFLICT
start_text = "<<<<<<< README.md\n=======\n"
expect { parse_text(start_text) }
- .to raise_error(Gitlab::Conflict::Parser::MissingEndDelimiter)
+ .to raise_error(Gitlab::Git::Conflict::Parser::MissingEndDelimiter)
expect { parse_text(start_text + '>>>>>>> some-other-path.md') }
- .to raise_error(Gitlab::Conflict::Parser::MissingEndDelimiter)
+ .to raise_error(Gitlab::Git::Conflict::Parser::MissingEndDelimiter)
end
end
context 'other file types' do
it 'raises UnmergeableFile when lines is blank, indicating a binary file' do
expect { parse_text('') }
- .to raise_error(Gitlab::Conflict::Parser::UnmergeableFile)
+ .to raise_error(Gitlab::Git::Conflict::Parser::UnmergeableFile)
expect { parse_text(nil) }
- .to raise_error(Gitlab::Conflict::Parser::UnmergeableFile)
+ .to raise_error(Gitlab::Git::Conflict::Parser::UnmergeableFile)
end
it 'raises UnmergeableFile when the file is over 200 KB' do
expect { parse_text('a' * 204801) }
- .to raise_error(Gitlab::Conflict::Parser::UnmergeableFile)
+ .to raise_error(Gitlab::Git::Conflict::Parser::UnmergeableFile)
end
# All text from Rugged has an encoding of ASCII_8BIT, so force that in
@@ -214,7 +216,7 @@ CONFLICT
context 'when the file contains non-UTF-8 characters' do
it 'raises UnsupportedEncoding' do
expect { parse_text("a\xC4\xFC".force_encoding(Encoding::ASCII_8BIT)) }
- .to raise_error(Gitlab::Conflict::Parser::UnsupportedEncoding)
+ .to raise_error(Gitlab::Git::Conflict::Parser::UnsupportedEncoding)
end
end
end
diff --git a/spec/lib/gitlab/git/diff_collection_spec.rb b/spec/lib/gitlab/git/diff_collection_spec.rb
index ee657101f4c..65edc750f39 100644
--- a/spec/lib/gitlab/git/diff_collection_spec.rb
+++ b/spec/lib/gitlab/git/diff_collection_spec.rb
@@ -487,6 +487,7 @@ describe Gitlab::Git::DiffCollection, seed_helper: true do
loop do
break if @count.zero?
+
# It is critical to decrement before yielding. We may never reach the lines after 'yield'.
@count -= 1
yield @value
diff --git a/spec/lib/gitlab/git/env_spec.rb b/spec/lib/gitlab/git/env_spec.rb
index d9df99bfe05..03836d49518 100644
--- a/spec/lib/gitlab/git/env_spec.rb
+++ b/spec/lib/gitlab/git/env_spec.rb
@@ -1,7 +1,7 @@
require 'spec_helper'
describe Gitlab::Git::Env do
- describe "#set" do
+ describe ".set" do
context 'with RequestStore.store disabled' do
before do
allow(RequestStore).to receive(:active?).and_return(false)
@@ -34,25 +34,57 @@ describe Gitlab::Git::Env do
end
end
- describe "#all" do
+ describe ".all" do
context 'with RequestStore.store enabled' do
before do
allow(RequestStore).to receive(:active?).and_return(true)
described_class.set(
GIT_OBJECT_DIRECTORY: 'foo',
- GIT_ALTERNATE_OBJECT_DIRECTORIES: 'bar')
+ GIT_ALTERNATE_OBJECT_DIRECTORIES: ['bar'])
end
it 'returns an env hash' do
expect(described_class.all).to eq({
'GIT_OBJECT_DIRECTORY' => 'foo',
- 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => 'bar'
+ 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => ['bar']
})
end
end
end
- describe "#[]" do
+ describe ".to_env_hash" do
+ context 'with RequestStore.store enabled' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:key) { 'GIT_OBJECT_DIRECTORY' }
+ subject { described_class.to_env_hash }
+
+ where(:input, :output) do
+ nil | nil
+ 'foo' | 'foo'
+ [] | ''
+ ['foo'] | 'foo'
+ %w[foo bar] | 'foo:bar'
+ end
+
+ with_them do
+ before do
+ allow(RequestStore).to receive(:active?).and_return(true)
+ described_class.set(key.to_sym => input)
+ end
+
+ it 'puts the right value in the hash' do
+ if output
+ expect(subject.fetch(key)).to eq(output)
+ else
+ expect(subject.has_key?(key)).to eq(false)
+ end
+ end
+ end
+ end
+ end
+
+ describe ".[]" do
context 'with RequestStore.store enabled' do
before do
allow(RequestStore).to receive(:active?).and_return(true)
diff --git a/spec/lib/gitlab/git/hooks_service_spec.rb b/spec/lib/gitlab/git/hooks_service_spec.rb
index 51e4e3fdad1..3ed3feb4c74 100644
--- a/spec/lib/gitlab/git/hooks_service_spec.rb
+++ b/spec/lib/gitlab/git/hooks_service_spec.rb
@@ -1,24 +1,26 @@
require 'spec_helper'
describe Gitlab::Git::HooksService, seed_helper: true do
- let(:user) { Gitlab::Git::User.new('janedoe', 'Jane Doe', 'janedoe@example.com', 'user-456') }
+ let(:gl_id) { 'user-456' }
+ let(:gl_username) { 'janedoe' }
+ let(:user) { Gitlab::Git::User.new(gl_username, 'Jane Doe', 'janedoe@example.com', gl_id) }
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, 'project-123') }
let(:service) { described_class.new }
-
- before do
- @blankrev = Gitlab::Git::BLANK_SHA
- @oldrev = SeedRepo::Commit::PARENT_ID
- @newrev = SeedRepo::Commit::ID
- @ref = 'refs/heads/feature'
- end
+ let(:blankrev) { Gitlab::Git::BLANK_SHA }
+ let(:oldrev) { SeedRepo::Commit::PARENT_ID }
+ let(:newrev) { SeedRepo::Commit::ID }
+ let(:ref) { 'refs/heads/feature' }
describe '#execute' do
context 'when receive hooks were successful' do
- it 'calls post-receive hook' do
- hook = double(trigger: [true, nil])
+ let(:hook) { double(:hook) }
+
+ it 'calls all three hooks' do
expect(Gitlab::Git::Hook).to receive(:new).exactly(3).times.and_return(hook)
+ expect(hook).to receive(:trigger).with(gl_id, gl_username, blankrev, newrev, ref)
+ .exactly(3).times.and_return([true, nil])
- service.execute(user, repository, @blankrev, @newrev, @ref) { }
+ service.execute(user, repository, blankrev, newrev, ref) { }
end
end
@@ -28,7 +30,7 @@ describe Gitlab::Git::HooksService, seed_helper: true do
expect(service).not_to receive(:run_hook).with('post-receive')
expect do
- service.execute(user, repository, @blankrev, @newrev, @ref)
+ service.execute(user, repository, blankrev, newrev, ref)
end.to raise_error(Gitlab::Git::HooksService::PreReceiveError)
end
end
@@ -40,7 +42,7 @@ describe Gitlab::Git::HooksService, seed_helper: true do
expect(service).not_to receive(:run_hook).with('post-receive')
expect do
- service.execute(user, repository, @blankrev, @newrev, @ref)
+ service.execute(user, repository, blankrev, newrev, ref)
end.to raise_error(Gitlab::Git::HooksService::PreReceiveError)
end
end
diff --git a/spec/lib/gitlab/git/lfs_changes_spec.rb b/spec/lib/gitlab/git/lfs_changes_spec.rb
new file mode 100644
index 00000000000..c9007d7d456
--- /dev/null
+++ b/spec/lib/gitlab/git/lfs_changes_spec.rb
@@ -0,0 +1,48 @@
+require 'spec_helper'
+
+describe Gitlab::Git::LfsChanges do
+ let(:project) { create(:project, :repository) }
+ let(:newrev) { '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51' }
+ let(:blob_object_id) { '0c304a93cb8430108629bbbcaa27db3343299bc0' }
+
+ subject { described_class.new(project.repository, newrev) }
+
+ describe 'new_pointers' do
+ before do
+ allow_any_instance_of(Gitlab::Git::RevList).to receive(:new_objects).and_yield([blob_object_id])
+ end
+
+ it 'uses rev-list to find new objects' do
+ rev_list = double
+ allow(Gitlab::Git::RevList).to receive(:new).and_return(rev_list)
+
+ expect(rev_list).to receive(:new_objects).and_return([])
+
+ subject.new_pointers
+ end
+
+ it 'filters new objects to find lfs pointers' do
+ expect(Gitlab::Git::Blob).to receive(:batch_lfs_pointers).with(project.repository, [blob_object_id])
+
+ subject.new_pointers(object_limit: 1)
+ end
+
+ it 'limits new_objects using object_limit' do
+ expect(Gitlab::Git::Blob).to receive(:batch_lfs_pointers).with(project.repository, [])
+
+ subject.new_pointers(object_limit: 0)
+ end
+ end
+
+ describe 'all_pointers' do
+ it 'uses rev-list to find all objects' do
+ rev_list = double
+ allow(Gitlab::Git::RevList).to receive(:new).and_return(rev_list)
+ allow(rev_list).to receive(:all_objects).and_yield([blob_object_id])
+
+ expect(Gitlab::Git::Blob).to receive(:batch_lfs_pointers).with(project.repository, [blob_object_id])
+
+ subject.all_pointers
+ end
+ end
+end
diff --git a/spec/lib/gitlab/git/popen_spec.rb b/spec/lib/gitlab/git/popen_spec.rb
new file mode 100644
index 00000000000..b033ede9062
--- /dev/null
+++ b/spec/lib/gitlab/git/popen_spec.rb
@@ -0,0 +1,149 @@
+require 'spec_helper'
+
+describe 'Gitlab::Git::Popen' do
+ let(:path) { Rails.root.join('tmp').to_s }
+
+ let(:klass) do
+ Class.new(Object) do
+ include Gitlab::Git::Popen
+ end
+ end
+
+ context 'popen' do
+ context 'zero status' do
+ let(:result) { klass.new.popen(%w(ls), path) }
+ let(:output) { result.first }
+ let(:status) { result.last }
+
+ it { expect(status).to be_zero }
+ it { expect(output).to include('tests') }
+ end
+
+ context 'non-zero status' do
+ let(:result) { klass.new.popen(%w(cat NOTHING), path) }
+ let(:output) { result.first }
+ let(:status) { result.last }
+
+ it { expect(status).to eq(1) }
+ it { expect(output).to include('No such file or directory') }
+ end
+
+ context 'unsafe string command' do
+ it 'raises an error when it gets called with a string argument' do
+ expect { klass.new.popen('ls', path) }.to raise_error(RuntimeError)
+ end
+ end
+
+ context 'with custom options' do
+ let(:vars) { { 'foobar' => 123, 'PWD' => path } }
+ let(:options) { { chdir: path } }
+
+ it 'calls popen3 with the provided environment variables' do
+ expect(Open3).to receive(:popen3).with(vars, 'ls', options)
+
+ klass.new.popen(%w(ls), path, { 'foobar' => 123 })
+ end
+ end
+
+ context 'use stdin' do
+ let(:result) { klass.new.popen(%w[cat], path) { |stdin| stdin.write 'hello' } }
+ let(:output) { result.first }
+ let(:status) { result.last }
+
+ it { expect(status).to be_zero }
+ it { expect(output).to eq('hello') }
+ end
+
+ context 'with lazy block' do
+ it 'yields a lazy io' do
+ expect_lazy_io = lambda do |io|
+ expect(io).to be_a Enumerator::Lazy
+ expect(io.inspect).to include('#<IO:fd')
+ end
+
+ klass.new.popen(%w[ls], path, lazy_block: expect_lazy_io)
+ end
+
+ it "doesn't wait for process exit" do
+ Timeout.timeout(2) do
+ klass.new.popen(%w[yes], path, lazy_block: ->(io) {})
+ end
+ end
+ end
+ end
+
+ context 'popen_with_timeout' do
+ let(:timeout) { 1.second }
+
+ context 'no timeout' do
+ context 'zero status' do
+ let(:result) { klass.new.popen_with_timeout(%w(ls), timeout, path) }
+ let(:output) { result.first }
+ let(:status) { result.last }
+
+ it { expect(status).to be_zero }
+ it { expect(output).to include('tests') }
+ end
+
+ context 'non-zero status' do
+ let(:result) { klass.new.popen_with_timeout(%w(cat NOTHING), timeout, path) }
+ let(:output) { result.first }
+ let(:status) { result.last }
+
+ it { expect(status).to eq(1) }
+ it { expect(output).to include('No such file or directory') }
+ end
+
+ context 'unsafe string command' do
+ it 'raises an error when it gets called with a string argument' do
+ expect { klass.new.popen_with_timeout('ls', timeout, path) }.to raise_error(RuntimeError)
+ end
+ end
+ end
+
+ context 'timeout' do
+ context 'timeout' do
+ it "raises a Timeout::Error" do
+ expect { klass.new.popen_with_timeout(%w(sleep 1000), timeout, path) }.to raise_error(Timeout::Error)
+ end
+
+ it "handles processes that do not shutdown correctly" do
+ expect { klass.new.popen_with_timeout(['bash', '-c', "trap -- '' SIGTERM; sleep 1000"], timeout, path) }.to raise_error(Timeout::Error)
+ end
+ end
+
+ context 'timeout period' do
+ let(:time_taken) do
+ begin
+ start = Time.now
+ klass.new.popen_with_timeout(%w(sleep 1000), timeout, path)
+ rescue
+ Time.now - start
+ end
+ end
+
+ it { expect(time_taken).to be >= timeout }
+ end
+
+ context 'clean up' do
+ let(:instance) { klass.new }
+
+ it 'kills the child process' do
+ expect(instance).to receive(:kill_process_group_for_pid).and_wrap_original do |m, *args|
+ # is the PID, and it should not be running at this point
+ m.call(*args)
+
+ pid = args.first
+ begin
+ Process.getpgid(pid)
+ raise "The child process should have been killed"
+ rescue Errno::ESRCH
+ end
+ end
+
+ expect { instance.popen_with_timeout(['bash', '-c', "trap -- '' SIGTERM; sleep 1000"], timeout, path) }.to raise_error(Timeout::Error)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/git/remote_repository_spec.rb b/spec/lib/gitlab/git/remote_repository_spec.rb
new file mode 100644
index 00000000000..0506210887c
--- /dev/null
+++ b/spec/lib/gitlab/git/remote_repository_spec.rb
@@ -0,0 +1,99 @@
+require 'spec_helper'
+
+describe Gitlab::Git::RemoteRepository, seed_helper: true do
+ let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '') }
+ subject { described_class.new(repository) }
+
+ describe '#empty_repo?' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:repository, :result) do
+ Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '') | false
+ Gitlab::Git::Repository.new('default', 'does-not-exist.git', '') | true
+ end
+
+ with_them do
+ it { expect(subject.empty_repo?).to eq(result) }
+ end
+ end
+
+ describe '#commit_id' do
+ it 'returns an OID if the revision exists' do
+ expect(subject.commit_id('v1.0.0')).to eq('6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9')
+ end
+
+ it 'is nil when the revision does not exist' do
+ expect(subject.commit_id('does-not-exist')).to be_nil
+ end
+ end
+
+ describe '#branch_exists?' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:branch, :result) do
+ 'master' | true
+ 'does-not-exist' | false
+ end
+
+ with_them do
+ it { expect(subject.branch_exists?(branch)).to eq(result) }
+ end
+ end
+
+ describe '#same_repository?' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:other_repository, :result) do
+ repository | true
+ Gitlab::Git::Repository.new(repository.storage, repository.relative_path, '') | true
+ Gitlab::Git::Repository.new('broken', TEST_REPO_PATH, '') | false
+ Gitlab::Git::Repository.new(repository.storage, 'wrong/relative-path.git', '') | false
+ Gitlab::Git::Repository.new('broken', 'wrong/relative-path.git', '') | false
+ end
+
+ with_them do
+ it { expect(subject.same_repository?(other_repository)).to eq(result) }
+ end
+ end
+
+ describe '#fetch_env' do
+ let(:remote_repository) { described_class.new(repository) }
+
+ let(:gitaly_client) { double(:gitaly_client) }
+ let(:address) { 'fake-address' }
+ let(:token) { 'fake-token' }
+
+ subject { remote_repository.fetch_env }
+
+ before do
+ allow(remote_repository).to receive(:gitaly_client).and_return(gitaly_client)
+
+ expect(gitaly_client).to receive(:address).with(repository.storage).and_return(address)
+ expect(gitaly_client).to receive(:token).with(repository.storage).and_return(token)
+ end
+
+ it { expect(subject).to be_a(Hash) }
+ it { expect(subject['GITALY_ADDRESS']).to eq(address) }
+ it { expect(subject['GITALY_TOKEN']).to eq(token) }
+ it { expect(subject['GITALY_WD']).to eq(Dir.pwd) }
+
+ it 'creates a plausible GIT_SSH_COMMAND' do
+ git_ssh_command = subject['GIT_SSH_COMMAND']
+
+ expect(git_ssh_command).to start_with('/')
+ expect(git_ssh_command).to end_with('/gitaly-ssh upload-pack')
+ end
+
+ it 'creates a plausible GITALY_PAYLOAD' do
+ req = Gitaly::SSHUploadPackRequest.decode_json(subject['GITALY_PAYLOAD'])
+
+ expect(remote_repository.gitaly_repository).to eq(req.repository)
+ end
+
+ context 'when the token is blank' do
+ let(:token) { '' }
+
+ it { expect(subject.keys).not_to include('GITALY_TOKEN') }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index 5f12125beb2..2f49bd1bcf2 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -54,7 +54,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
describe "#rugged" do
- describe 'when storage is broken', broken_storage: true do
+ describe 'when storage is broken', :broken_storage do
it 'raises a storage exception when storage is not available' do
broken_repo = described_class.new('broken', 'a/path.git', '')
@@ -68,31 +68,52 @@ describe Gitlab::Git::Repository, seed_helper: true do
expect { broken_repo.rugged }.to raise_error(Gitlab::Git::Repository::NoRepository)
end
- context 'with no Git env stored' do
- before do
- expect(Gitlab::Git::Env).to receive(:all).and_return({})
- end
+ describe 'alternates keyword argument' do
+ context 'with no Git env stored' do
+ before do
+ allow(Gitlab::Git::Env).to receive(:all).and_return({})
+ end
- it "whitelist some variables and pass them via the alternates keyword argument" do
- expect(Rugged::Repository).to receive(:new).with(repository.path, alternates: [])
+ it "is passed an empty array" do
+ expect(Rugged::Repository).to receive(:new).with(repository.path, alternates: [])
- repository.rugged
+ repository.rugged
+ end
end
- end
- context 'with some Git env stored' do
- before do
- expect(Gitlab::Git::Env).to receive(:all).and_return({
- 'GIT_OBJECT_DIRECTORY' => 'foo',
- 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => 'bar',
- 'GIT_OTHER' => 'another_env'
- })
+ context 'with absolute and relative Git object dir envvars stored' do
+ before do
+ allow(Gitlab::Git::Env).to receive(:all).and_return({
+ 'GIT_OBJECT_DIRECTORY_RELATIVE' => './objects/foo',
+ 'GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE' => ['./objects/bar', './objects/baz'],
+ 'GIT_OBJECT_DIRECTORY' => 'ignored',
+ 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => %w[ignored ignored],
+ 'GIT_OTHER' => 'another_env'
+ })
+ end
+
+ it "is passed the relative object dir envvars after being converted to absolute ones" do
+ alternates = %w[foo bar baz].map { |d| File.join(repository.path, './objects', d) }
+ expect(Rugged::Repository).to receive(:new).with(repository.path, alternates: alternates)
+
+ repository.rugged
+ end
end
- it "whitelist some variables and pass them via the alternates keyword argument" do
- expect(Rugged::Repository).to receive(:new).with(repository.path, alternates: %w[foo bar])
+ context 'with only absolute Git object dir envvars stored' do
+ before do
+ allow(Gitlab::Git::Env).to receive(:all).and_return({
+ 'GIT_OBJECT_DIRECTORY' => 'foo',
+ 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => %w[bar baz],
+ 'GIT_OTHER' => 'another_env'
+ })
+ end
+
+ it "is passed the absolute object dir envvars as is" do
+ expect(Rugged::Repository).to receive(:new).with(repository.path, alternates: %w[foo bar baz])
- repository.rugged
+ repository.rugged
+ end
end
end
end
@@ -384,7 +405,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
end
- context 'when Gitaly commit_count feature is disabled', skip_gitaly_mock: true do
+ context 'when Gitaly commit_count feature is disabled', :skip_gitaly_mock do
it_behaves_like 'simple commit counting'
end
end
@@ -418,7 +439,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
it_behaves_like 'check for local branches'
end
- context 'without gitaly', skip_gitaly_mock: true do
+ context 'without gitaly', :skip_gitaly_mock do
it_behaves_like 'check for local branches'
end
end
@@ -428,7 +449,6 @@ describe Gitlab::Git::Repository, seed_helper: true do
let(:repository) { Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '') }
after do
- FileUtils.rm_rf(TEST_MUTABLE_REPO_PATH)
ensure_seeds
end
@@ -453,7 +473,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
it_behaves_like "deleting a branch"
end
- context "when Gitaly delete_branch is disabled", skip_gitaly_mock: true do
+ context "when Gitaly delete_branch is disabled", :skip_gitaly_mock do
it_behaves_like "deleting a branch"
end
end
@@ -463,7 +483,6 @@ describe Gitlab::Git::Repository, seed_helper: true do
let(:repository) { Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '') }
after do
- FileUtils.rm_rf(TEST_MUTABLE_REPO_PATH)
ensure_seeds
end
@@ -489,7 +508,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
it_behaves_like 'creating a branch'
end
- context 'when Gitaly create_branch feature is disabled', skip_gitaly_mock: true do
+ context 'when Gitaly create_branch feature is disabled', :skip_gitaly_mock do
it_behaves_like 'creating a branch'
end
end
@@ -523,7 +542,6 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
after(:all) do
- FileUtils.rm_rf(TEST_MUTABLE_REPO_PATH)
ensure_seeds
end
end
@@ -538,10 +556,10 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
end
- describe "#remote_delete" do
+ describe "#remove_remote" do
before(:all) do
@repo = Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '')
- @repo.remote_delete("expendable")
+ @repo.remove_remote("expendable")
end
it "should remove the remote" do
@@ -549,42 +567,107 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
after(:all) do
- FileUtils.rm_rf(TEST_MUTABLE_REPO_PATH)
ensure_seeds
end
end
- describe "#remote_add" do
+ describe "#remote_update" do
before(:all) do
@repo = Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '')
- @repo.remote_add("new_remote", SeedHelper::GITLAB_GIT_TEST_REPO_URL)
+ @repo.remote_update("expendable", url: TEST_NORMAL_REPO_PATH)
end
it "should add the remote" do
- expect(@repo.rugged.remotes.each_name.to_a).to include("new_remote")
+ expect(@repo.rugged.remotes["expendable"].url).to(
+ eq(TEST_NORMAL_REPO_PATH)
+ )
end
after(:all) do
- FileUtils.rm_rf(TEST_MUTABLE_REPO_PATH)
ensure_seeds
end
end
- describe "#remote_update" do
+ describe '#fetch_as_mirror_without_shell' do
+ let(:new_repository) do
+ Gitlab::Git::Repository.new('default', 'my_project.git', '')
+ end
+
+ subject { new_repository.fetch_as_mirror_without_shell(repository.path) }
+
+ before do
+ Gitlab::Shell.new.add_repository('default', 'my_project')
+ end
+
+ after do
+ Gitlab::Shell.new.remove_repository(TestEnv.repos_path, 'my_project')
+ end
+
+ it 'fetches a url as a mirror remote' do
+ subject
+
+ expect(refs(new_repository.path)).to eq(refs(repository.path))
+ end
+
+ context 'with keep-around refs' do
+ let(:sha) { SeedRepo::Commit::ID }
+ let(:keep_around_ref) { "refs/keep-around/#{sha}" }
+ let(:tmp_ref) { "refs/tmp/#{SecureRandom.hex}" }
+
+ before do
+ repository.rugged.references.create(keep_around_ref, sha, force: true)
+ repository.rugged.references.create(tmp_ref, sha, force: true)
+ end
+
+ it 'includes the temporary and keep-around refs' do
+ subject
+
+ expect(refs(new_repository.path)).to include(keep_around_ref)
+ expect(refs(new_repository.path)).to include(tmp_ref)
+ end
+ end
+ end
+
+ describe '#remote_tags' do
+ let(:remote_name) { 'upstream' }
+ let(:target_commit_id) { SeedRepo::Commit::ID }
+ let(:user) { create(:user) }
+ let(:tag_name) { 'v0.0.1' }
+ let(:tag_message) { 'My tag' }
+ let(:remote_repository) do
+ Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '')
+ end
+
+ subject { repository.remote_tags(remote_name) }
+
+ before do
+ repository.add_remote(remote_name, remote_repository.path)
+ remote_repository.add_tag(tag_name, user: user, target: target_commit_id)
+ end
+
+ after do
+ ensure_seeds
+ end
+
+ it 'gets the remote tags' do
+ expect(subject.first).to be_an_instance_of(Gitlab::Git::Tag)
+ expect(subject.first.name).to eq(tag_name)
+ expect(subject.first.dereferenced_target.id).to eq(target_commit_id)
+ end
+ end
+
+ describe '#remote_exists?' do
before(:all) do
@repo = Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '')
- @repo.remote_update("expendable", url: TEST_NORMAL_REPO_PATH)
+ @repo.add_remote("new_remote", SeedHelper::GITLAB_GIT_TEST_REPO_URL)
end
- it "should add the remote" do
- expect(@repo.rugged.remotes["expendable"].url).to(
- eq(TEST_NORMAL_REPO_PATH)
- )
+ it 'returns true for an existing remote' do
+ expect(@repo.remote_exists?('new_remote')).to eq(true)
end
- after(:all) do
- FileUtils.rm_rf(TEST_MUTABLE_REPO_PATH)
- ensure_seeds
+ it 'returns false for a non-existing remote' do
+ expect(@repo.remote_exists?('foo')).to eq(false)
end
end
@@ -929,7 +1012,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
it_behaves_like 'extended commit counting'
end
- context 'when Gitaly count_commits feature is disabled', skip_gitaly_mock: true do
+ context 'when Gitaly count_commits feature is disabled', :skip_gitaly_mock do
it_behaves_like 'extended commit counting'
end
end
@@ -996,7 +1079,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
it_behaves_like 'finding a branch'
end
- context 'when Gitaly find_branch feature is disabled', skip_gitaly_mock: true do
+ context 'when Gitaly find_branch feature is disabled', :skip_gitaly_mock do
it_behaves_like 'finding a branch'
it 'should reload Rugged::Repository and return master' do
@@ -1047,7 +1130,6 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
after do
- FileUtils.rm_rf(TEST_MUTABLE_REPO_PATH)
ensure_seeds
end
@@ -1094,7 +1176,6 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
after do
- FileUtils.rm_rf(TEST_MUTABLE_REPO_PATH)
ensure_seeds
end
@@ -1114,8 +1195,44 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
end
+ describe '#merged_branch_names' do
+ context 'when branch names are passed' do
+ it 'only returns the names we are asking' do
+ names = repository.merged_branch_names(%w[merge-test])
+
+ expect(names).to contain_exactly('merge-test')
+ end
+
+ it 'does not return unmerged branch names' do
+ names = repository.merged_branch_names(%w[feature])
+
+ expect(names).to be_empty
+ end
+ end
+
+ context 'when no branch names are specified' do
+ before do
+ repository.create_branch('identical', 'master')
+ end
+
+ after do
+ ensure_seeds
+ end
+
+ it 'returns all merged branch names except for identical one' do
+ names = repository.merged_branch_names
+
+ expect(names).to include('merge-test')
+ expect(names).to include('fix-mode')
+ expect(names).not_to include('feature')
+ expect(names).not_to include('identical')
+ end
+ end
+ end
+
describe "#ls_files" do
let(:master_file_paths) { repository.ls_files("master") }
+ let(:utf8_file_paths) { repository.ls_files("ls-files-utf8") }
let(:not_existed_branch) { repository.ls_files("not_existed_branch") }
it "read every file paths of master branch" do
@@ -1137,6 +1254,10 @@ describe Gitlab::Git::Repository, seed_helper: true do
it "returns empty array when not existed branch" do
expect(not_existed_branch.length).to equal(0)
end
+
+ it "returns valid utf-8 data" do
+ expect(utf8_file_paths.map { |file| file.force_encoding('utf-8') }).to all(be_valid_encoding)
+ end
end
describe "#copy_gitattributes" do
@@ -1238,7 +1359,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
it_behaves_like 'checks the existence of refs'
end
- context 'when Gitaly ref_exists feature is disabled', skip_gitaly_mock: true do
+ context 'when Gitaly ref_exists feature is disabled', :skip_gitaly_mock do
it_behaves_like 'checks the existence of refs'
end
end
@@ -1260,7 +1381,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
it_behaves_like 'checks the existence of tags'
end
- context 'when Gitaly ref_exists_tags feature is disabled', skip_gitaly_mock: true do
+ context 'when Gitaly ref_exists_tags feature is disabled', :skip_gitaly_mock do
it_behaves_like 'checks the existence of tags'
end
end
@@ -1284,18 +1405,35 @@ describe Gitlab::Git::Repository, seed_helper: true do
it_behaves_like 'checks the existence of branches'
end
- context 'when Gitaly ref_exists_branches feature is disabled', skip_gitaly_mock: true do
+ context 'when Gitaly ref_exists_branches feature is disabled', :skip_gitaly_mock do
it_behaves_like 'checks the existence of branches'
end
end
+ describe '#batch_existence' do
+ let(:refs) { ['deadbeef', SeedRepo::RubyBlob::ID, '909e6157199'] }
+
+ it 'returns existing refs back' do
+ result = repository.batch_existence(refs)
+
+ expect(result).to eq([SeedRepo::RubyBlob::ID])
+ end
+
+ context 'existing: true' do
+ it 'inverts meaning and returns non-existing refs' do
+ result = repository.batch_existence(refs, existing: false)
+
+ expect(result).to eq(%w(deadbeef 909e6157199))
+ end
+ end
+ end
+
describe '#local_branches' do
before(:all) do
@repo = Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '')
end
after(:all) do
- FileUtils.rm_rf(TEST_MUTABLE_REPO_PATH)
ensure_seeds
end
@@ -1361,7 +1499,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
it_behaves_like 'languages'
- context 'with rugged', skip_gitaly_mock: true do
+ context 'with rugged', :skip_gitaly_mock do
it_behaves_like 'languages'
end
end
@@ -1412,36 +1550,61 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
end
- describe '#fetch_source_branch' do
- let(:local_ref) { 'refs/merge-requests/1/head' }
+ describe '#fetch_source_branch!' do
+ shared_examples '#fetch_source_branch!' do
+ let(:local_ref) { 'refs/merge-requests/1/head' }
+ let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '') }
+ let(:source_repository) { Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '') }
- context 'when the branch exists' do
- let(:source_branch) { 'master' }
+ after do
+ ensure_seeds
+ end
- it 'writes the ref' do
- expect(repository).to receive(:write_ref).with(local_ref, /\h{40}/)
+ context 'when the branch exists' do
+ context 'when the commit does not exist locally' do
+ let(:source_branch) { 'new-branch-for-fetch-source-branch' }
+ let(:source_rugged) { source_repository.rugged }
+ let(:new_oid) { new_commit_edit_old_file(source_rugged).oid }
- repository.fetch_source_branch(repository, source_branch, local_ref)
- end
+ before do
+ source_rugged.branches.create(source_branch, new_oid)
+ end
- it 'returns true' do
- expect(repository.fetch_source_branch(repository, source_branch, local_ref)).to eq(true)
- end
- end
+ it 'writes the ref' do
+ expect(repository.fetch_source_branch!(source_repository, source_branch, local_ref)).to eq(true)
+ expect(repository.commit(local_ref).sha).to eq(new_oid)
+ end
+ end
- context 'when the branch does not exist' do
- let(:source_branch) { 'definitely-not-master' }
+ context 'when the commit exists locally' do
+ let(:source_branch) { 'master' }
+ let(:expected_oid) { SeedRepo::LastCommit::ID }
- it 'does not write the ref' do
- expect(repository).not_to receive(:write_ref)
+ it 'writes the ref' do
+ # Sanity check: the commit should already exist
+ expect(repository.commit(expected_oid)).not_to be_nil
- repository.fetch_source_branch(repository, source_branch, local_ref)
+ expect(repository.fetch_source_branch!(source_repository, source_branch, local_ref)).to eq(true)
+ expect(repository.commit(local_ref).sha).to eq(expected_oid)
+ end
+ end
end
- it 'returns false' do
- expect(repository.fetch_source_branch(repository, source_branch, local_ref)).to eq(false)
+ context 'when the branch does not exist' do
+ let(:source_branch) { 'definitely-not-master' }
+
+ it 'does not write the ref' do
+ expect(repository.fetch_source_branch!(source_repository, source_branch, local_ref)).to eq(false)
+ expect(repository.commit(local_ref)).to be_nil
+ end
end
end
+
+ it_behaves_like '#fetch_source_branch!'
+
+ context 'without gitaly', :skip_gitaly_mock do
+ it_behaves_like '#fetch_source_branch!'
+ end
end
describe '#rm_branch' do
@@ -1467,7 +1630,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
it_behaves_like "user deleting a branch"
end
- context "when Gitaly user_delete_branch is disabled", skip_gitaly_mock: true do
+ context "when Gitaly user_delete_branch is disabled", :skip_gitaly_mock do
it_behaves_like "user deleting a branch"
end
end
@@ -1489,6 +1652,157 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
end
+ describe '#fetch_remote_without_shell' do
+ let(:git_path) { Gitlab.config.git.bin_path }
+ let(:remote_name) { 'my_remote' }
+
+ subject { repository.fetch_remote_without_shell(remote_name) }
+
+ it 'fetches the remote and returns true if the command was successful' do
+ expect(repository).to receive(:popen)
+ .with(%W(#{git_path} fetch #{remote_name}), repository.path, {})
+ .and_return(['', 0])
+
+ expect(subject).to be(true)
+ end
+ end
+
+ describe '#merge' do
+ let(:repository) do
+ Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '')
+ end
+ let(:source_sha) { '913c66a37b4a45b9769037c55c2d238bd0942d2e' }
+ let(:user) { build(:user) }
+ let(:target_branch) { 'test-merge-target-branch' }
+
+ before do
+ repository.create_branch(target_branch, '6d394385cf567f80a8fd85055db1ab4c5295806f')
+ end
+
+ after do
+ ensure_seeds
+ end
+
+ shared_examples '#merge' do
+ it 'can perform a merge' do
+ merge_commit_id = nil
+ result = repository.merge(user, source_sha, target_branch, 'Test merge') do |commit_id|
+ merge_commit_id = commit_id
+ end
+
+ expect(result.newrev).to eq(merge_commit_id)
+ expect(result.repo_created).to eq(false)
+ expect(result.branch_created).to eq(false)
+ end
+ end
+
+ context 'with gitaly' do
+ it_behaves_like '#merge'
+ end
+
+ context 'without gitaly', :skip_gitaly_mock do
+ it_behaves_like '#merge'
+ end
+ end
+
+ describe '#ff_merge' do
+ let(:repository) do
+ Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '')
+ end
+ let(:branch_head) { '6d394385cf567f80a8fd85055db1ab4c5295806f' }
+ let(:source_sha) { 'cfe32cf61b73a0d5e9f13e774abde7ff789b1660' }
+ let(:user) { build(:user) }
+ let(:target_branch) { 'test-ff-target-branch' }
+
+ before do
+ repository.create_branch(target_branch, branch_head)
+ end
+
+ after do
+ ensure_seeds
+ end
+
+ subject { repository.ff_merge(user, source_sha, target_branch) }
+
+ shared_examples '#ff_merge' do
+ it 'performs a ff_merge' do
+ expect(subject.newrev).to eq(source_sha)
+ expect(subject.repo_created).to be(false)
+ expect(subject.branch_created).to be(false)
+
+ expect(repository.commit(target_branch).id).to eq(source_sha)
+ end
+
+ context 'with a non-existing target branch' do
+ subject { repository.ff_merge(user, source_sha, 'this-isnt-real') }
+
+ it 'throws an ArgumentError' do
+ expect { subject }.to raise_error(ArgumentError)
+ end
+ end
+
+ context 'with a non-existing source commit' do
+ let(:source_sha) { 'f001' }
+
+ it 'throws an ArgumentError' do
+ expect { subject }.to raise_error(ArgumentError)
+ end
+ end
+
+ context 'when the source sha is not a descendant of the branch head' do
+ let(:source_sha) { '1a0b36b3cdad1d2ee32457c102a8c0b7056fa863' }
+
+ it "doesn't perform the ff_merge" do
+ expect { subject }.to raise_error(Gitlab::Git::CommitError)
+
+ expect(repository.commit(target_branch).id).to eq(branch_head)
+ end
+ end
+ end
+
+ context 'with gitaly' do
+ it "calls Gitaly's OperationService" do
+ expect_any_instance_of(Gitlab::GitalyClient::OperationService)
+ .to receive(:user_ff_branch).with(user, source_sha, target_branch)
+ .and_return(nil)
+
+ subject
+ end
+
+ it_behaves_like '#ff_merge'
+ end
+
+ context 'without gitaly', :skip_gitaly_mock do
+ it_behaves_like '#ff_merge'
+ end
+ end
+
+ describe '#delete_all_refs_except' do
+ let(:repository) do
+ Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '')
+ end
+
+ before do
+ repository.write_ref("refs/delete/a", "0b4bc9a49b562e85de7cc9e834518ea6828729b9")
+ repository.write_ref("refs/also-delete/b", "12d65c8dd2b2676fa3ac47d955accc085a37a9c1")
+ repository.write_ref("refs/keep/c", "6473c90867124755509e100d0d35ebdc85a0b6ae")
+ repository.write_ref("refs/also-keep/d", "0b4bc9a49b562e85de7cc9e834518ea6828729b9")
+ end
+
+ after do
+ ensure_seeds
+ end
+
+ it 'deletes all refs except those with the specified prefixes' do
+ repository.delete_all_refs_except(%w(refs/keep refs/also-keep refs/heads))
+ expect(repository.ref_exists?("refs/delete/a")).to be(false)
+ expect(repository.ref_exists?("refs/also-delete/b")).to be(false)
+ expect(repository.ref_exists?("refs/keep/c")).to be(true)
+ expect(repository.ref_exists?("refs/also-keep/d")).to be(true)
+ expect(repository.ref_exists?("refs/heads/master")).to be(true)
+ end
+ end
+
def create_remote_branch(repository, remote_name, branch_name, source_branch_name)
source_branch = repository.branches.find { |branch| branch.name == source_branch_name }
rugged = repository.rugged
@@ -1564,4 +1878,10 @@ describe Gitlab::Git::Repository, seed_helper: true do
sha = Rugged::Commit.create(repo, options)
repo.lookup(sha)
end
+
+ def refs(dir)
+ IO.popen(%W[git -C #{dir} for-each-ref], &:read).split("\n").map do |line|
+ line.split("\t").last
+ end
+ end
end
diff --git a/spec/lib/gitlab/git/rev_list_spec.rb b/spec/lib/gitlab/git/rev_list_spec.rb
index c0eac98d718..eaf74951b0e 100644
--- a/spec/lib/gitlab/git/rev_list_spec.rb
+++ b/spec/lib/gitlab/git/rev_list_spec.rb
@@ -2,53 +2,112 @@ require 'spec_helper'
describe Gitlab::Git::RevList do
let(:project) { create(:project, :repository) }
+ let(:rev_list) { described_class.new(newrev: 'newrev', path_to_repo: project.repository.path_to_repo) }
+ let(:env_hash) do
+ {
+ 'GIT_OBJECT_DIRECTORY' => 'foo',
+ 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => 'bar'
+ }
+ end
before do
- expect(Gitlab::Git::Env).to receive(:all).and_return({
- GIT_OBJECT_DIRECTORY: 'foo',
- GIT_ALTERNATE_OBJECT_DIRECTORIES: 'bar'
- })
+ allow(Gitlab::Git::Env).to receive(:all).and_return(env_hash.symbolize_keys)
end
- context "#new_refs" do
- let(:rev_list) { described_class.new(newrev: 'newrev', path_to_repo: project.repository.path_to_repo) }
+ def args_for_popen(args_list)
+ [
+ Gitlab.config.git.bin_path,
+ "--git-dir=#{project.repository.path_to_repo}",
+ 'rev-list',
+ *args_list
+ ]
+ end
- it 'calls out to `popen`' do
- expect(rev_list).to receive(:popen).with([
- Gitlab.config.git.bin_path,
- "--git-dir=#{project.repository.path_to_repo}",
- 'rev-list',
- 'newrev',
- '--not',
- '--all'
- ],
+ def stub_popen_rev_list(*additional_args, output:)
+ args = args_for_popen(additional_args)
+
+ expect(rev_list).to receive(:popen).with(args, nil, env_hash)
+ .and_return([output, 0])
+ end
+
+ def stub_lazy_popen_rev_list(*additional_args, output:)
+ params = [
+ args_for_popen(additional_args),
nil,
- {
- 'GIT_OBJECT_DIRECTORY' => 'foo',
- 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => 'bar'
- }).and_return(["sha1\nsha2", 0])
+ env_hash,
+ hash_including(lazy_block: anything)
+ ]
+
+ expect(rev_list).to receive(:popen).with(*params) do |*_, lazy_block:|
+ lazy_block.call(output.split("\n").lazy)
+ end
+ end
+
+ context "#new_refs" do
+ it 'calls out to `popen`' do
+ stub_popen_rev_list('newrev', '--not', '--all', output: "sha1\nsha2")
expect(rev_list.new_refs).to eq(%w[sha1 sha2])
end
end
+ context '#new_objects' do
+ it 'fetches list of newly pushed objects using rev-list' do
+ stub_popen_rev_list('newrev', '--not', '--all', '--objects', output: "sha1\nsha2")
+
+ expect(rev_list.new_objects).to eq(%w[sha1 sha2])
+ end
+
+ it 'can skip pathless objects' do
+ stub_popen_rev_list('newrev', '--not', '--all', '--objects', output: "sha1\nsha2 path/to/file")
+
+ expect(rev_list.new_objects(require_path: true)).to eq(%w[sha2])
+ end
+
+ it 'can yield a lazy enumerator' do
+ stub_lazy_popen_rev_list('newrev', '--not', '--all', '--objects', output: "sha1\nsha2")
+
+ rev_list.new_objects do |object_ids|
+ expect(object_ids).to be_a Enumerator::Lazy
+ end
+ end
+
+ it 'returns the result of the block when given' do
+ stub_lazy_popen_rev_list('newrev', '--not', '--all', '--objects', output: "sha1\nsha2")
+
+ objects = rev_list.new_objects do |object_ids|
+ object_ids.first
+ end
+
+ expect(objects).to eq 'sha1'
+ end
+
+ it 'can accept list of references to exclude' do
+ stub_popen_rev_list('newrev', '--not', 'master', '--objects', output: "sha1\nsha2")
+
+ expect(rev_list.new_objects(not_in: ['master'])).to eq(%w[sha1 sha2])
+ end
+
+ it 'handles empty list of references to exclude as listing all known objects' do
+ stub_popen_rev_list('newrev', '--objects', output: "sha1\nsha2")
+
+ expect(rev_list.new_objects(not_in: [])).to eq(%w[sha1 sha2])
+ end
+ end
+
+ context '#all_objects' do
+ it 'fetches list of all pushed objects using rev-list' do
+ stub_popen_rev_list('--all', '--objects', output: "sha1\nsha2")
+
+ expect(rev_list.all_objects).to eq(%w[sha1 sha2])
+ end
+ end
+
context "#missed_ref" do
let(:rev_list) { described_class.new(oldrev: 'oldrev', newrev: 'newrev', path_to_repo: project.repository.path_to_repo) }
it 'calls out to `popen`' do
- expect(rev_list).to receive(:popen).with([
- Gitlab.config.git.bin_path,
- "--git-dir=#{project.repository.path_to_repo}",
- 'rev-list',
- '--max-count=1',
- 'oldrev',
- '^newrev'
- ],
- nil,
- {
- 'GIT_OBJECT_DIRECTORY' => 'foo',
- 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => 'bar'
- }).and_return(["sha1\nsha2", 0])
+ stub_popen_rev_list('--max-count=1', 'oldrev', '^newrev', output: "sha1\nsha2")
expect(rev_list.missed_ref).to eq(%w[sha1 sha2])
end
diff --git a/spec/lib/gitlab/git/storage/circuit_breaker_spec.rb b/spec/lib/gitlab/git/storage/circuit_breaker_spec.rb
index 98cf7966dad..72dabca793a 100644
--- a/spec/lib/gitlab/git/storage/circuit_breaker_spec.rb
+++ b/spec/lib/gitlab/git/storage/circuit_breaker_spec.rb
@@ -10,18 +10,10 @@ describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state:
# Override test-settings for the circuitbreaker with something more realistic
# for these specs.
stub_storage_settings('default' => {
- 'path' => TestEnv.repos_path,
- 'failure_count_threshold' => 10,
- 'failure_wait_time' => 30,
- 'failure_reset_time' => 1800,
- 'storage_timeout' => 5
+ 'path' => TestEnv.repos_path
},
'broken' => {
- 'path' => 'tmp/tests/non-existent-repositories',
- 'failure_count_threshold' => 10,
- 'failure_wait_time' => 30,
- 'failure_reset_time' => 1800,
- 'storage_timeout' => 5
+ 'path' => 'tmp/tests/non-existent-repositories'
},
'nopath' => { 'path' => nil }
)
@@ -49,6 +41,10 @@ describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state:
expect(key_exists).to be_falsey
end
+
+ it 'does not break when there are no keys in redis' do
+ expect { described_class.reset_all! }.not_to raise_error
+ end
end
describe '.for_storage' do
@@ -75,19 +71,79 @@ describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state:
expect(circuit_breaker.hostname).to eq(hostname)
expect(circuit_breaker.storage).to eq('default')
expect(circuit_breaker.storage_path).to eq(TestEnv.repos_path)
- expect(circuit_breaker.failure_count_threshold).to eq(10)
- expect(circuit_breaker.failure_wait_time).to eq(30)
- expect(circuit_breaker.failure_reset_time).to eq(1800)
- expect(circuit_breaker.storage_timeout).to eq(5)
+ end
+ end
+
+ context 'circuitbreaker settings' do
+ before do
+ stub_application_setting(circuitbreaker_failure_count_threshold: 0,
+ circuitbreaker_failure_wait_time: 1,
+ circuitbreaker_failure_reset_time: 2,
+ circuitbreaker_storage_timeout: 3,
+ circuitbreaker_access_retries: 4,
+ circuitbreaker_backoff_threshold: 5)
+ end
+
+ describe '#failure_count_threshold' do
+ it 'reads the value from settings' do
+ expect(circuit_breaker.failure_count_threshold).to eq(0)
+ end
+ end
+
+ describe '#failure_wait_time' do
+ it 'reads the value from settings' do
+ expect(circuit_breaker.failure_wait_time).to eq(1)
+ end
+ end
+
+ describe '#failure_reset_time' do
+ it 'reads the value from settings' do
+ expect(circuit_breaker.failure_reset_time).to eq(2)
+ end
+ end
+
+ describe '#storage_timeout' do
+ it 'reads the value from settings' do
+ expect(circuit_breaker.storage_timeout).to eq(3)
+ end
+ end
+
+ describe '#access_retries' do
+ it 'reads the value from settings' do
+ expect(circuit_breaker.access_retries).to eq(4)
+ end
+ end
+
+ describe '#backoff_threshold' do
+ it 'reads the value from settings' do
+ expect(circuit_breaker.backoff_threshold).to eq(5)
+ end
end
end
describe '#perform' do
- it 'raises an exception with retry time when the circuit is open' do
- allow(circuit_breaker).to receive(:circuit_broken?).and_return(true)
+ it 'raises the correct exception when the circuit is open' do
+ set_in_redis(:last_failure, 1.day.ago.to_f)
+ set_in_redis(:failure_count, 999)
expect { |b| circuit_breaker.perform(&b) }
- .to raise_error(Gitlab::Git::Storage::CircuitOpen)
+ .to raise_error do |exception|
+ expect(exception).to be_kind_of(Gitlab::Git::Storage::CircuitOpen)
+ expect(exception.retry_after).to eq(1800)
+ end
+ end
+
+ it 'raises the correct exception when backing off' do
+ Timecop.freeze do
+ set_in_redis(:last_failure, 1.second.ago.to_f)
+ set_in_redis(:failure_count, 90)
+
+ expect { |b| circuit_breaker.perform(&b) }
+ .to raise_error do |exception|
+ expect(exception).to be_kind_of(Gitlab::Git::Storage::Failing)
+ expect(exception.retry_after).to eq(30)
+ end
+ end
end
it 'yields the block' do
@@ -97,6 +153,7 @@ describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state:
it 'checks if the storage is available' do
expect(circuit_breaker).to receive(:check_storage_accessible!)
+ .and_call_original
circuit_breaker.perform { 'hello world' }
end
@@ -112,204 +169,124 @@ describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state:
.to raise_error(Rugged::OSError)
end
- context 'with the feature disabled' do
- it 'returns the block without checking accessibility' do
- stub_feature_flags(git_storage_circuit_breaker: false)
-
- expect(circuit_breaker).not_to receive(:circuit_broken?)
+ it 'tracks that the storage was accessible' do
+ set_in_redis(:failure_count, 10)
+ set_in_redis(:last_failure, Time.now.to_f)
- result = circuit_breaker.perform { 'hello' }
+ circuit_breaker.perform { '' }
- expect(result).to eq('hello')
- end
+ expect(value_from_redis(:failure_count).to_i).to eq(0)
+ expect(value_from_redis(:last_failure)).to be_empty
+ expect(circuit_breaker.failure_count).to eq(0)
+ expect(circuit_breaker.last_failure).to be_nil
end
- end
- describe '#circuit_broken?' do
- it 'is working when there is no last failure' do
- set_in_redis(:last_failure, nil)
- set_in_redis(:failure_count, 0)
+ it 'only performs the accessibility check once' do
+ expect(Gitlab::Git::Storage::ForkedStorageCheck)
+ .to receive(:storage_available?).once.and_call_original
- expect(circuit_breaker.circuit_broken?).to be_falsey
+ 2.times { circuit_breaker.perform { '' } }
end
- it 'is broken when there was a recent failure' do
- Timecop.freeze do
- set_in_redis(:last_failure, 1.second.ago.to_f)
- set_in_redis(:failure_count, 1)
-
- expect(circuit_breaker.circuit_broken?).to be_truthy
- end
- end
+ it 'calls the check with the correct arguments' do
+ stub_application_setting(circuitbreaker_storage_timeout: 30,
+ circuitbreaker_access_retries: 3)
- it 'is broken when there are too many failures' do
- set_in_redis(:last_failure, 1.day.ago.to_f)
- set_in_redis(:failure_count, 200)
+ expect(Gitlab::Git::Storage::ForkedStorageCheck)
+ .to receive(:storage_available?).with(TestEnv.repos_path, 30, 3)
+ .and_call_original
- expect(circuit_breaker.circuit_broken?).to be_truthy
+ circuit_breaker.perform { '' }
end
- context 'the `failure_wait_time` is set to 0' do
+ context 'with the feature disabled' do
before do
- stub_storage_settings('default' => {
- 'failure_wait_time' => 0,
- 'path' => TestEnv.repos_path
- })
- end
-
- it 'is working even when there is a recent failure' do
- Timecop.freeze do
- set_in_redis(:last_failure, 0.seconds.ago.to_f)
- set_in_redis(:failure_count, 1)
-
- expect(circuit_breaker.circuit_broken?).to be_falsey
- end
+ stub_feature_flags(git_storage_circuit_breaker: false)
end
- end
- end
-
- describe "storage_available?" do
- context 'the storage is available' do
- it 'tracks that the storage was accessible an raises the error' do
- expect(circuit_breaker).to receive(:track_storage_accessible)
- circuit_breaker.storage_available?
- end
+ it 'returns the block without checking accessibility' do
+ expect(circuit_breaker).not_to receive(:check_storage_accessible!)
- it 'only performs the check once' do
- expect(Gitlab::Git::Storage::ForkedStorageCheck)
- .to receive(:storage_available?).once.and_call_original
+ result = circuit_breaker.perform { 'hello' }
- 2.times { circuit_breaker.storage_available? }
+ expect(result).to eq('hello')
end
- end
-
- context 'storage is not available' do
- let(:storage_name) { 'broken' }
- it 'tracks that the storage was inaccessible' do
- expect(circuit_breaker).to receive(:track_storage_inaccessible)
+ it 'allows enabling the feature using an ENV var' do
+ stub_env('GIT_STORAGE_CIRCUIT_BREAKER', 'true')
+ expect(circuit_breaker).to receive(:check_storage_accessible!)
- circuit_breaker.storage_available?
- end
- end
- end
-
- describe '#check_storage_accessible!' do
- it 'raises an exception with retry time when the circuit is open' do
- allow(circuit_breaker).to receive(:circuit_broken?).and_return(true)
+ result = circuit_breaker.perform { 'hello' }
- expect { circuit_breaker.check_storage_accessible! }
- .to raise_error do |exception|
- expect(exception).to be_kind_of(Gitlab::Git::Storage::CircuitOpen)
- expect(exception.retry_after).to eq(30)
+ expect(result).to eq('hello')
end
end
context 'the storage is not available' do
let(:storage_name) { 'broken' }
- it 'raises an error' do
+ it 'raises the correct exception' do
expect(circuit_breaker).to receive(:track_storage_inaccessible)
- expect { circuit_breaker.check_storage_accessible! }
+ expect { circuit_breaker.perform { '' } }
.to raise_error do |exception|
expect(exception).to be_kind_of(Gitlab::Git::Storage::Inaccessible)
expect(exception.retry_after).to eq(30)
end
end
- end
- end
-
- describe '#track_storage_inaccessible' do
- around do |example|
- Timecop.freeze { example.run }
- end
- it 'records the failure time in redis' do
- circuit_breaker.track_storage_inaccessible
-
- failure_time = value_from_redis(:last_failure)
-
- expect(Time.at(failure_time.to_i)).to be_within(1.second).of(Time.now)
- end
-
- it 'sets the failure time on the breaker without reloading' do
- circuit_breaker.track_storage_inaccessible
-
- expect(circuit_breaker).not_to receive(:get_failure_info)
- expect(circuit_breaker.last_failure).to eq(Time.now)
- end
-
- it 'increments the failure count in redis' do
- set_in_redis(:failure_count, 10)
-
- circuit_breaker.track_storage_inaccessible
-
- expect(value_from_redis(:failure_count).to_i).to be(11)
- end
-
- it 'increments the failure count on the breaker without reloading' do
- set_in_redis(:failure_count, 10)
-
- circuit_breaker.track_storage_inaccessible
+ it 'tracks that the storage was inaccessible' do
+ Timecop.freeze do
+ expect { circuit_breaker.perform { '' } }.to raise_error(Gitlab::Git::Storage::Inaccessible)
- expect(circuit_breaker).not_to receive(:get_failure_info)
- expect(circuit_breaker.failure_count).to eq(11)
+ expect(value_from_redis(:failure_count).to_i).to eq(1)
+ expect(value_from_redis(:last_failure)).not_to be_empty
+ expect(circuit_breaker.failure_count).to eq(1)
+ expect(circuit_breaker.last_failure).to be_within(1.second).of(Time.now)
+ end
+ end
end
end
- describe '#track_storage_accessible' do
- it 'sets the failure count to zero in redis' do
- set_in_redis(:failure_count, 10)
-
- circuit_breaker.track_storage_accessible
-
- expect(value_from_redis(:failure_count).to_i).to be(0)
- end
-
- it 'sets the failure count to zero on the breaker without reloading' do
- set_in_redis(:failure_count, 10)
-
- circuit_breaker.track_storage_accessible
+ describe '#circuit_broken?' do
+ it 'is working when there is no last failure' do
+ set_in_redis(:last_failure, nil)
+ set_in_redis(:failure_count, 0)
- expect(circuit_breaker).not_to receive(:get_failure_info)
- expect(circuit_breaker.failure_count).to eq(0)
+ expect(circuit_breaker.circuit_broken?).to be_falsey
end
- it 'removes the last failure time from redis' do
- set_in_redis(:last_failure, Time.now.to_i)
-
- circuit_breaker.track_storage_accessible
+ it 'is broken when there are too many failures' do
+ set_in_redis(:last_failure, 1.day.ago.to_f)
+ set_in_redis(:failure_count, 200)
- expect(circuit_breaker).not_to receive(:get_failure_info)
- expect(circuit_breaker.last_failure).to be_nil
+ expect(circuit_breaker.circuit_broken?).to be_truthy
end
+ end
- it 'removes the last failure time from the breaker without reloading' do
- set_in_redis(:last_failure, Time.now.to_i)
-
- circuit_breaker.track_storage_accessible
+ describe '#backing_off?' do
+ it 'is true when there was a recent failure' do
+ Timecop.freeze do
+ set_in_redis(:last_failure, 1.second.ago.to_f)
+ set_in_redis(:failure_count, 90)
- expect(value_from_redis(:last_failure)).to be_empty
+ expect(circuit_breaker.backing_off?).to be_truthy
+ end
end
- it 'wont connect to redis when there are no failures' do
- expect(Gitlab::Git::Storage.redis).to receive(:with).once
- .and_call_original
- expect(circuit_breaker).to receive(:track_storage_accessible)
- .and_call_original
-
- circuit_breaker.track_storage_accessible
- end
- end
+ context 'the `failure_wait_time` is set to 0' do
+ before do
+ stub_application_setting(circuitbreaker_failure_wait_time: 0)
+ end
- describe '#no_failures?' do
- it 'is false when a failure was tracked' do
- set_in_redis(:last_failure, Time.now.to_i)
- set_in_redis(:failure_count, 1)
+ it 'is working even when there are failures' do
+ Timecop.freeze do
+ set_in_redis(:last_failure, 0.seconds.ago.to_f)
+ set_in_redis(:failure_count, 90)
- expect(circuit_breaker.no_failures?).to be_falsey
+ expect(circuit_breaker.backing_off?).to be_falsey
+ end
+ end
end
end
@@ -329,10 +306,4 @@ describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state:
expect(circuit_breaker.failure_count).to eq(7)
end
end
-
- describe '#cache_key' do
- it 'includes storage and host' do
- expect(circuit_breaker.cache_key).to eq(cache_key)
- end
- end
end
diff --git a/spec/lib/gitlab/git/storage/forked_storage_check_spec.rb b/spec/lib/gitlab/git/storage/forked_storage_check_spec.rb
index c708b15853a..39a5d020bb4 100644
--- a/spec/lib/gitlab/git/storage/forked_storage_check_spec.rb
+++ b/spec/lib/gitlab/git/storage/forked_storage_check_spec.rb
@@ -33,6 +33,21 @@ describe Gitlab::Git::Storage::ForkedStorageCheck, broken_storage: true, skip_da
expect(runtime).to be < 1.0
end
+ it 'will try the specified amount of times before failing' do
+ allow(described_class).to receive(:check_filesystem_in_process) do
+ Process.spawn("sleep 10")
+ end
+
+ expect(Process).to receive(:spawn).with('sleep 10').twice
+ .and_call_original
+
+ runtime = Benchmark.realtime do
+ described_class.storage_available?(existing_path, 0.5, 2)
+ end
+
+ expect(runtime).to be < 1.0
+ end
+
describe 'when using paths with spaces' do
let(:test_dir) { Rails.root.join('tmp', 'tests', 'storage_check') }
let(:path_with_spaces) { File.join(test_dir, 'path with spaces') }
diff --git a/spec/lib/gitlab/git/storage/health_spec.rb b/spec/lib/gitlab/git/storage/health_spec.rb
index 2d3af387971..4a14a5201d1 100644
--- a/spec/lib/gitlab/git/storage/health_spec.rb
+++ b/spec/lib/gitlab/git/storage/health_spec.rb
@@ -20,36 +20,6 @@ describe Gitlab::Git::Storage::Health, clean_gitlab_redis_shared_state: true, br
end
end
- describe '.load_for_keys' do
- let(:subject) do
- results = Gitlab::Git::Storage.redis.with do |redis|
- fake_future = double
- allow(fake_future).to receive(:value).and_return([host1_key])
- described_class.load_for_keys({ 'broken' => fake_future }, redis)
- end
-
- # Make sure the `Redis#future is loaded
- results.inject({}) do |result, (name, info)|
- info.each { |i| i[:failure_count] = i[:failure_count].value.to_i }
-
- result[name] = info
-
- result
- end
- end
-
- it 'loads when there is no info in redis' do
- expect(subject).to eq('broken' => [{ name: host1_key, failure_count: 0 }])
- end
-
- it 'reads the correct values for a storage from redis' do
- set_in_redis(host1_key, 5)
- set_in_redis(host2_key, 7)
-
- expect(subject).to eq('broken' => [{ name: host1_key, failure_count: 5 }])
- end
- end
-
describe '.for_all_storages' do
it 'loads health status for all configured storages' do
healths = described_class.for_all_storages
diff --git a/spec/lib/gitlab/git/storage/null_circuit_breaker_spec.rb b/spec/lib/gitlab/git/storage/null_circuit_breaker_spec.rb
index 0e645008c88..5db37f55e03 100644
--- a/spec/lib/gitlab/git/storage/null_circuit_breaker_spec.rb
+++ b/spec/lib/gitlab/git/storage/null_circuit_breaker_spec.rb
@@ -54,6 +54,10 @@ describe Gitlab::Git::Storage::NullCircuitBreaker do
end
describe '#failure_count_threshold' do
+ before do
+ stub_application_setting(circuitbreaker_failure_count_threshold: 1)
+ end
+
it { expect(breaker.failure_count_threshold).to eq(1) }
end
@@ -61,17 +65,6 @@ describe Gitlab::Git::Storage::NullCircuitBreaker do
ours = described_class.public_instance_methods
theirs = Gitlab::Git::Storage::CircuitBreaker.public_instance_methods
- # These methods are not part of the public API, but are public to allow the
- # CircuitBreaker specs to operate. They should be made private over time.
- exceptions = %i[
- cache_key
- check_storage_accessible!
- no_failures?
- storage_available?
- track_storage_accessible
- track_storage_inaccessible
- ]
-
- expect(theirs - ours).to contain_exactly(*exceptions)
+ expect(theirs - ours).to be_empty
end
end
diff --git a/spec/lib/gitlab/git/tag_spec.rb b/spec/lib/gitlab/git/tag_spec.rb
index cc10679ef1e..6c4f538bf01 100644
--- a/spec/lib/gitlab/git/tag_spec.rb
+++ b/spec/lib/gitlab/git/tag_spec.rb
@@ -29,7 +29,7 @@ describe Gitlab::Git::Tag, seed_helper: true do
it_behaves_like 'Gitlab::Git::Repository#tags'
end
- context 'when Gitaly tags feature is disabled', skip_gitaly_mock: true do
+ context 'when Gitaly tags feature is disabled', :skip_gitaly_mock do
it_behaves_like 'Gitlab::Git::Repository#tags'
end
end
diff --git a/spec/lib/gitlab/git/user_spec.rb b/spec/lib/gitlab/git/user_spec.rb
index 31d5f59a562..99d850e1df9 100644
--- a/spec/lib/gitlab/git/user_spec.rb
+++ b/spec/lib/gitlab/git/user_spec.rb
@@ -1,18 +1,24 @@
require 'spec_helper'
describe Gitlab::Git::User do
- let(:username) { 'janedo' }
- let(:name) { 'Jane Doe' }
- let(:email) { 'janedoe@example.com' }
+ let(:username) { 'janedoe' }
+ let(:name) { 'Jane Doé' }
+ let(:email) { 'janedoé@example.com' }
let(:gl_id) { 'user-123' }
+ let(:user) do
+ described_class.new(username, name, email, gl_id)
+ end
subject { described_class.new(username, name, email, gl_id) }
describe '.from_gitaly' do
- let(:gitaly_user) { Gitaly::User.new(name: name, email: email, gl_id: gl_id) }
+ let(:gitaly_user) do
+ Gitaly::User.new(gl_username: username, name: name.b, email: email.b, gl_id: gl_id)
+ end
+
subject { described_class.from_gitaly(gitaly_user) }
- it { expect(subject).to eq(described_class.new('', name, email, gl_id)) }
+ it { expect(subject).to eq(user) }
end
describe '.from_gitlab' do
@@ -35,4 +41,21 @@ describe Gitlab::Git::User do
it { expect(subject).not_to eq_other(username, name, email + 'x', gl_id) }
it { expect(subject).not_to eq_other(username, name, email, gl_id + 'x') }
end
+
+ describe '#to_gitaly' do
+ subject { user.to_gitaly }
+
+ it 'creates a Gitaly::User with the correct data' do
+ expect(subject).to be_a(Gitaly::User)
+ expect(subject.gl_username).to eq(username)
+
+ expect(subject.name).to eq(name.b)
+ expect(subject.name).to be_a_binary_string
+
+ expect(subject.email).to eq(email.b)
+ expect(subject.email).to be_a_binary_string
+
+ expect(subject.gl_id).to eq(gl_id)
+ end
+ end
end
diff --git a/spec/lib/gitlab/git_access_spec.rb b/spec/lib/gitlab/git_access_spec.rb
index c54327bd2e4..c9643c5da47 100644
--- a/spec/lib/gitlab/git_access_spec.rb
+++ b/spec/lib/gitlab/git_access_spec.rb
@@ -165,7 +165,7 @@ describe Gitlab::GitAccess do
stub_application_setting(rsa_key_restriction: 4096)
end
- it 'does not allow keys which are too small', aggregate_failures: true do
+ it 'does not allow keys which are too small', :aggregate_failures do
expect(actor).not_to be_valid
expect { pull_access_check }.to raise_unauthorized('Your SSH key must be at least 4096 bits.')
expect { push_access_check }.to raise_unauthorized('Your SSH key must be at least 4096 bits.')
@@ -177,7 +177,7 @@ describe Gitlab::GitAccess do
stub_application_setting(rsa_key_restriction: ApplicationSetting::FORBIDDEN_KEY_VALUE)
end
- it 'does not allow keys which are too small', aggregate_failures: true do
+ it 'does not allow keys which are too small', :aggregate_failures do
expect(actor).not_to be_valid
expect { pull_access_check }.to raise_unauthorized(/Your SSH key type is forbidden/)
expect { push_access_check }.to raise_unauthorized(/Your SSH key type is forbidden/)
diff --git a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
index 7bd6a7fa842..d9ec28ab02e 100644
--- a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
@@ -5,7 +5,7 @@ describe Gitlab::GitalyClient::OperationService do
let(:repository) { project.repository.raw }
let(:client) { described_class.new(repository) }
let(:user) { create(:user) }
- let(:gitaly_user) { Gitlab::GitalyClient::Util.gitaly_user(user) }
+ let(:gitaly_user) { Gitlab::Git::User.from_gitlab(user).to_gitaly }
describe '#user_create_branch' do
let(:branch_name) { 'new' }
@@ -89,4 +89,38 @@ describe Gitlab::GitalyClient::OperationService do
end
end
end
+
+ describe '#user_ff_branch' do
+ let(:target_branch) { 'my-branch' }
+ let(:source_sha) { 'cfe32cf61b73a0d5e9f13e774abde7ff789b1660' }
+ let(:request) do
+ Gitaly::UserFFBranchRequest.new(
+ repository: repository.gitaly_repository,
+ branch: target_branch,
+ commit_id: source_sha,
+ user: gitaly_user
+ )
+ end
+ let(:branch_update) do
+ Gitaly::OperationBranchUpdate.new(
+ commit_id: source_sha,
+ repo_created: false,
+ branch_created: false
+ )
+ end
+ let(:response) { Gitaly::UserFFBranchResponse.new(branch_update: branch_update) }
+
+ subject { client.user_ff_branch(user, source_sha, target_branch) }
+
+ it 'sends a user_ff_branch message and returns a BranchUpdate object' do
+ expect_any_instance_of(Gitaly::OperationService::Stub)
+ .to receive(:user_ff_branch).with(request, kind_of(Hash))
+ .and_return(response)
+
+ expect(subject).to be_a(Gitlab::Git::OperationService::BranchUpdate)
+ expect(subject.newrev).to eq(source_sha)
+ expect(subject.repo_created).to be(false)
+ expect(subject.branch_created).to be(false)
+ end
+ end
end
diff --git a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
index 6f59750b4da..951e146a30a 100644
--- a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
@@ -84,14 +84,14 @@ describe Gitlab::GitalyClient::RefService do
end
end
- describe '#find_ref_name', seed_helper: true do
+ describe '#find_ref_name', :seed_helper do
subject { client.find_ref_name(SeedRepo::Commit::ID, 'refs/heads/master') }
it { is_expected.to be_utf8 }
it { is_expected.to eq('refs/heads/master') }
end
- describe '#ref_exists?', seed_helper: true do
+ describe '#ref_exists?', :seed_helper do
it 'finds the master branch ref' do
expect(client.ref_exists?('refs/heads/master')).to eq(true)
end
@@ -104,4 +104,17 @@ describe Gitlab::GitalyClient::RefService do
expect { client.ref_exists?('reXXXXX') }.to raise_error(ArgumentError)
end
end
+
+ describe '#delete_refs' do
+ let(:prefixes) { %w(refs/heads refs/keep-around) }
+
+ it 'sends a delete_refs message' do
+ expect_any_instance_of(Gitaly::RefService::Stub)
+ .to receive(:delete_refs)
+ .with(gitaly_request_with_params(except_with_prefix: prefixes), kind_of(Hash))
+ .and_return(double('delete_refs_response'))
+
+ client.delete_refs(except_with_prefixes: prefixes)
+ end
+ end
end
diff --git a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
index fd5f984601e..cbc7ce1c1b0 100644
--- a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
@@ -73,4 +73,15 @@ describe Gitlab::GitalyClient::RepositoryService do
client.apply_gitattributes(revision)
end
end
+
+ describe '#has_local_branches?' do
+ it 'sends a has_local_branches message' do
+ expect_any_instance_of(Gitaly::RepositoryService::Stub)
+ .to receive(:has_local_branches)
+ .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+ .and_return(double(value: true))
+
+ expect(client.has_local_branches?).to be(true)
+ end
+ end
end
diff --git a/spec/lib/gitlab/gitaly_client/util_spec.rb b/spec/lib/gitlab/gitaly_client/util_spec.rb
index 498f6886bee..d1e0136f8c1 100644
--- a/spec/lib/gitlab/gitaly_client/util_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/util_spec.rb
@@ -6,16 +6,16 @@ describe Gitlab::GitalyClient::Util do
let(:relative_path) { 'my/repo.git' }
let(:gl_repository) { 'project-1' }
let(:git_object_directory) { '.git/objects' }
- let(:git_alternate_object_directory) { '/dir/one:/dir/two' }
+ let(:git_alternate_object_directory) { ['/dir/one', '/dir/two'] }
subject do
described_class.repository(repository_storage, relative_path, gl_repository)
end
it 'creates a Gitaly::Repository with the given data' do
- expect(Gitlab::Git::Env).to receive(:[]).with('GIT_OBJECT_DIRECTORY')
+ allow(Gitlab::Git::Env).to receive(:[]).with('GIT_OBJECT_DIRECTORY_RELATIVE')
.and_return(git_object_directory)
- expect(Gitlab::Git::Env).to receive(:[]).with('GIT_ALTERNATE_OBJECT_DIRECTORIES')
+ allow(Gitlab::Git::Env).to receive(:[]).with('GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE')
.and_return(git_alternate_object_directory)
expect(subject).to be_a(Gitaly::Repository)
@@ -23,21 +23,7 @@ describe Gitlab::GitalyClient::Util do
expect(subject.relative_path).to eq(relative_path)
expect(subject.gl_repository).to eq(gl_repository)
expect(subject.git_object_directory).to eq(git_object_directory)
- expect(subject.git_alternate_object_directories).to eq([git_alternate_object_directory])
- end
- end
-
- describe '.gitaly_user' do
- let(:user) { create(:user) }
- let(:gl_id) { Gitlab::GlId.gl_id(user) }
-
- subject { described_class.gitaly_user(user) }
-
- it 'creates a Gitaly::User from a GitLab user' do
- expect(subject).to be_a(Gitaly::User)
- expect(subject.name).to eq(user.name)
- expect(subject.email).to eq(user.email)
- expect(subject.gl_id).to eq(gl_id)
+ expect(subject.git_alternate_object_directories).to eq(git_alternate_object_directory)
end
end
end
diff --git a/spec/lib/gitlab/gitaly_client/wiki_service_spec.rb b/spec/lib/gitlab/gitaly_client/wiki_service_spec.rb
new file mode 100644
index 00000000000..6ad9f5ef766
--- /dev/null
+++ b/spec/lib/gitlab/gitaly_client/wiki_service_spec.rb
@@ -0,0 +1,88 @@
+require 'spec_helper'
+
+describe Gitlab::GitalyClient::WikiService do
+ let(:project) { create(:project) }
+ let(:storage_name) { project.repository_storage }
+ let(:relative_path) { project.disk_path + '.git' }
+ let(:client) { described_class.new(project.repository) }
+ let(:commit) { create(:gitaly_commit) }
+ let(:page_version) { Gitaly::WikiPageVersion.new(format: 'markdown', commit: commit) }
+ let(:page_info) { { title: 'My Page', raw_data: 'a', version: page_version } }
+
+ describe '#find_page' do
+ let(:response) do
+ [
+ Gitaly::WikiFindPageResponse.new(page: Gitaly::WikiPage.new(page_info)),
+ Gitaly::WikiFindPageResponse.new(page: Gitaly::WikiPage.new(raw_data: 'b'))
+ ]
+ end
+ let(:wiki_page) { subject.first }
+ let(:wiki_page_version) { subject.last }
+
+ subject { client.find_page(title: 'My Page', version: 'master', dir: '') }
+
+ it 'sends a wiki_find_page message' do
+ expect_any_instance_of(Gitaly::WikiService::Stub)
+ .to receive(:wiki_find_page)
+ .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+ .and_return([].each)
+
+ subject
+ end
+
+ it 'concatenates the raw data and returns a pair of WikiPage and WikiPageVersion' do
+ expect_any_instance_of(Gitaly::WikiService::Stub)
+ .to receive(:wiki_find_page)
+ .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+ .and_return(response.each)
+
+ expect(wiki_page.title).to eq('My Page')
+ expect(wiki_page.raw_data).to eq('ab')
+ expect(wiki_page_version.format).to eq('markdown')
+ end
+ end
+
+ describe '#get_all_pages' do
+ let(:page_2_info) { { title: 'My Page 2', raw_data: 'c', version: page_version } }
+ let(:response) do
+ [
+ Gitaly::WikiGetAllPagesResponse.new(page: Gitaly::WikiPage.new(page_info)),
+ Gitaly::WikiGetAllPagesResponse.new(page: Gitaly::WikiPage.new(raw_data: 'b')),
+ Gitaly::WikiGetAllPagesResponse.new(end_of_page: true),
+ Gitaly::WikiGetAllPagesResponse.new(page: Gitaly::WikiPage.new(page_2_info)),
+ Gitaly::WikiGetAllPagesResponse.new(page: Gitaly::WikiPage.new(raw_data: 'd')),
+ Gitaly::WikiGetAllPagesResponse.new(end_of_page: true)
+ ]
+ end
+ let(:wiki_page_1) { subject[0].first }
+ let(:wiki_page_1_version) { subject[0].last }
+ let(:wiki_page_2) { subject[1].first }
+ let(:wiki_page_2_version) { subject[1].last }
+
+ subject { client.get_all_pages }
+
+ it 'sends a wiki_get_all_pages message' do
+ expect_any_instance_of(Gitaly::WikiService::Stub)
+ .to receive(:wiki_get_all_pages)
+ .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+ .and_return([].each)
+
+ subject
+ end
+
+ it 'concatenates the raw data and returns a pair of WikiPage and WikiPageVersion for each page' do
+ expect_any_instance_of(Gitaly::WikiService::Stub)
+ .to receive(:wiki_get_all_pages)
+ .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+ .and_return(response.each)
+
+ expect(subject.size).to be(2)
+ expect(wiki_page_1.title).to eq('My Page')
+ expect(wiki_page_1.raw_data).to eq('ab')
+ expect(wiki_page_1_version.format).to eq('markdown')
+ expect(wiki_page_2.title).to eq('My Page 2')
+ expect(wiki_page_2.raw_data).to eq('cd')
+ expect(wiki_page_2_version.format).to eq('markdown')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/gitaly_client_spec.rb b/spec/lib/gitlab/gitaly_client_spec.rb
index a1f4e65b8d4..a871ed0df0e 100644
--- a/spec/lib/gitlab/gitaly_client_spec.rb
+++ b/spec/lib/gitlab/gitaly_client_spec.rb
@@ -278,4 +278,20 @@ describe Gitlab::GitalyClient, skip_gitaly_mock: true do
end
end
end
+
+ describe 'timeouts' do
+ context 'with default values' do
+ before do
+ stub_application_setting(gitaly_timeout_default: 55)
+ stub_application_setting(gitaly_timeout_medium: 30)
+ stub_application_setting(gitaly_timeout_fast: 10)
+ end
+
+ it 'returns expected values' do
+ expect(described_class.default_timeout).to be(55)
+ expect(described_class.medium_timeout).to be(30)
+ expect(described_class.fast_timeout).to be(10)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/github_import/bulk_importing_spec.rb b/spec/lib/gitlab/github_import/bulk_importing_spec.rb
new file mode 100644
index 00000000000..91229d9c7d4
--- /dev/null
+++ b/spec/lib/gitlab/github_import/bulk_importing_spec.rb
@@ -0,0 +1,62 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::BulkImporting do
+ let(:importer) do
+ Class.new { include(Gitlab::GithubImport::BulkImporting) }.new
+ end
+
+ describe '#build_database_rows' do
+ it 'returns an Array containing the rows to insert' do
+ object = double(:object, title: 'Foo')
+
+ expect(importer)
+ .to receive(:build)
+ .with(object)
+ .and_return({ title: 'Foo' })
+
+ expect(importer)
+ .to receive(:already_imported?)
+ .with(object)
+ .and_return(false)
+
+ enum = [[object, 1]].to_enum
+
+ expect(importer.build_database_rows(enum)).to eq([{ title: 'Foo' }])
+ end
+
+ it 'does not import objects that have already been imported' do
+ object = double(:object, title: 'Foo')
+
+ expect(importer)
+ .not_to receive(:build)
+
+ expect(importer)
+ .to receive(:already_imported?)
+ .with(object)
+ .and_return(true)
+
+ enum = [[object, 1]].to_enum
+
+ expect(importer.build_database_rows(enum)).to be_empty
+ end
+ end
+
+ describe '#bulk_insert' do
+ it 'bulk inserts rows into the database' do
+ rows = [{ title: 'Foo' }] * 10
+ model = double(:model, table_name: 'kittens')
+
+ expect(Gitlab::Database)
+ .to receive(:bulk_insert)
+ .ordered
+ .with('kittens', rows.first(5))
+
+ expect(Gitlab::Database)
+ .to receive(:bulk_insert)
+ .ordered
+ .with('kittens', rows.last(5))
+
+ importer.bulk_insert(model, rows, batch_size: 5)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/caching_spec.rb b/spec/lib/gitlab/github_import/caching_spec.rb
new file mode 100644
index 00000000000..70ecdc16da1
--- /dev/null
+++ b/spec/lib/gitlab/github_import/caching_spec.rb
@@ -0,0 +1,117 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Caching, :clean_gitlab_redis_cache do
+ describe '.read' do
+ it 'reads a value from the cache' do
+ described_class.write('foo', 'bar')
+
+ expect(described_class.read('foo')).to eq('bar')
+ end
+
+ it 'returns nil if the cache key does not exist' do
+ expect(described_class.read('foo')).to be_nil
+ end
+
+ it 'refreshes the cache key if a value is present' do
+ described_class.write('foo', 'bar')
+
+ redis = double(:redis)
+
+ expect(redis).to receive(:get).with(/foo/).and_return('bar')
+ expect(redis).to receive(:expire).with(/foo/, described_class::TIMEOUT)
+ expect(Gitlab::Redis::Cache).to receive(:with).twice.and_yield(redis)
+
+ described_class.read('foo')
+ end
+
+ it 'does not refresh the cache key if a value is empty' do
+ described_class.write('foo', nil)
+
+ redis = double(:redis)
+
+ expect(redis).to receive(:get).with(/foo/).and_return('')
+ expect(redis).not_to receive(:expire)
+ expect(Gitlab::Redis::Cache).to receive(:with).and_yield(redis)
+
+ described_class.read('foo')
+ end
+ end
+
+ describe '.read_integer' do
+ it 'returns an Integer' do
+ described_class.write('foo', '10')
+
+ expect(described_class.read_integer('foo')).to eq(10)
+ end
+
+ it 'returns nil if no value was found' do
+ expect(described_class.read_integer('foo')).to be_nil
+ end
+ end
+
+ describe '.write' do
+ it 'writes a value to the cache and returns the written value' do
+ expect(described_class.write('foo', 10)).to eq(10)
+ expect(described_class.read('foo')).to eq('10')
+ end
+ end
+
+ describe '.set_add' do
+ it 'adds a value to a set' do
+ described_class.set_add('foo', 10)
+ described_class.set_add('foo', 10)
+
+ key = described_class.cache_key_for('foo')
+ values = Gitlab::Redis::Cache.with { |r| r.smembers(key) }
+
+ expect(values).to eq(['10'])
+ end
+ end
+
+ describe '.set_includes?' do
+ it 'returns false when the key does not exist' do
+ expect(described_class.set_includes?('foo', 10)).to eq(false)
+ end
+
+ it 'returns false when the value is not present in the set' do
+ described_class.set_add('foo', 10)
+
+ expect(described_class.set_includes?('foo', 20)).to eq(false)
+ end
+
+ it 'returns true when the set includes the given value' do
+ described_class.set_add('foo', 10)
+
+ expect(described_class.set_includes?('foo', 10)).to eq(true)
+ end
+ end
+
+ describe '.write_multiple' do
+ it 'sets multiple keys' do
+ mapping = { 'foo' => 10, 'bar' => 20 }
+
+ described_class.write_multiple(mapping)
+
+ mapping.each do |key, value|
+ full_key = described_class.cache_key_for(key)
+ found = Gitlab::Redis::Cache.with { |r| r.get(full_key) }
+
+ expect(found).to eq(value.to_s)
+ end
+ end
+ end
+
+ describe '.expire' do
+ it 'sets the expiration time of a key' do
+ timeout = 1.hour.to_i
+
+ described_class.write('foo', 'bar', timeout: 2.hours.to_i)
+ described_class.expire('foo', timeout)
+
+ key = described_class.cache_key_for('foo')
+ found_ttl = Gitlab::Redis::Cache.with { |r| r.ttl(key) }
+
+ expect(found_ttl).to be <= timeout
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/client_spec.rb b/spec/lib/gitlab/github_import/client_spec.rb
index 66273255b6f..5b2642d9473 100644
--- a/spec/lib/gitlab/github_import/client_spec.rb
+++ b/spec/lib/gitlab/github_import/client_spec.rb
@@ -1,97 +1,392 @@
require 'spec_helper'
describe Gitlab::GithubImport::Client do
- let(:token) { '123456' }
- let(:github_provider) { Settingslogic.new('app_id' => 'asd123', 'app_secret' => 'asd123', 'name' => 'github', 'args' => { 'client_options' => {} }) }
+ describe '#parallel?' do
+ it 'returns true when the client is running in parallel mode' do
+ client = described_class.new('foo', parallel: true)
- subject(:client) { described_class.new(token) }
+ expect(client).to be_parallel
+ end
+
+ it 'returns false when the client is running in sequential mode' do
+ client = described_class.new('foo', parallel: false)
- before do
- allow(Gitlab.config.omniauth).to receive(:providers).and_return([github_provider])
+ expect(client).not_to be_parallel
+ end
end
- it 'convert OAuth2 client options to symbols' do
- client.client.options.keys.each do |key|
- expect(key).to be_kind_of(Symbol)
+ describe '#user' do
+ it 'returns the details for the given username' do
+ client = described_class.new('foo')
+
+ expect(client.octokit).to receive(:user).with('foo')
+ expect(client).to receive(:with_rate_limit).and_yield
+
+ client.user('foo')
end
end
- it 'does not crash (e.g. Settingslogic::MissingSetting) when verify_ssl config is not present' do
- expect { client.api }.not_to raise_error
+ describe '#repository' do
+ it 'returns the details of a repository' do
+ client = described_class.new('foo')
+
+ expect(client.octokit).to receive(:repo).with('foo/bar')
+ expect(client).to receive(:with_rate_limit).and_yield
+
+ client.repository('foo/bar')
+ end
end
- context 'when config is missing' do
- before do
- allow(Gitlab.config.omniauth).to receive(:providers).and_return([])
+ describe '#labels' do
+ it 'returns the labels' do
+ client = described_class.new('foo')
+
+ expect(client)
+ .to receive(:each_object)
+ .with(:labels, 'foo/bar')
+
+ client.labels('foo/bar')
end
+ end
- it 'is still possible to get an Octokit client' do
- expect { client.api }.not_to raise_error
+ describe '#milestones' do
+ it 'returns the milestones' do
+ client = described_class.new('foo')
+
+ expect(client)
+ .to receive(:each_object)
+ .with(:milestones, 'foo/bar')
+
+ client.milestones('foo/bar')
end
+ end
- it 'is not be possible to get an OAuth2 client' do
- expect { client.client }.to raise_error(Projects::ImportService::Error)
+ describe '#releases' do
+ it 'returns the releases' do
+ client = described_class.new('foo')
+
+ expect(client)
+ .to receive(:each_object)
+ .with(:releases, 'foo/bar')
+
+ client.releases('foo/bar')
end
end
- context 'allow SSL verification to be configurable on API' do
+ describe '#each_page' do
+ let(:client) { described_class.new('foo') }
+ let(:object1) { double(:object1) }
+ let(:object2) { double(:object2) }
+
before do
- github_provider['verify_ssl'] = false
+ allow(client)
+ .to receive(:with_rate_limit)
+ .and_yield
+
+ allow(client.octokit)
+ .to receive(:public_send)
+ .and_return([object1])
+
+ response = double(:response, data: [object2], rels: { next: nil })
+ next_page = double(:next_page, get: response)
+
+ allow(client.octokit)
+ .to receive(:last_response)
+ .and_return(double(:last_response, rels: { next: next_page }))
+ end
+
+ context 'without a block' do
+ it 'returns an Enumerator' do
+ expect(client.each_page(:foo)).to be_an_instance_of(Enumerator)
+ end
+
+ it 'the returned Enumerator returns Page objects' do
+ enum = client.each_page(:foo)
+
+ page1 = enum.next
+ page2 = enum.next
+
+ expect(page1).to be_an_instance_of(described_class::Page)
+ expect(page2).to be_an_instance_of(described_class::Page)
+
+ expect(page1.objects).to eq([object1])
+ expect(page1.number).to eq(1)
+
+ expect(page2.objects).to eq([object2])
+ expect(page2.number).to eq(2)
+ end
+ end
+
+ context 'with a block' do
+ it 'yields every retrieved page to the supplied block' do
+ pages = []
+
+ client.each_page(:foo) { |page| pages << page }
+
+ expect(pages[0]).to be_an_instance_of(described_class::Page)
+ expect(pages[1]).to be_an_instance_of(described_class::Page)
+
+ expect(pages[0].objects).to eq([object1])
+ expect(pages[0].number).to eq(1)
+
+ expect(pages[1].objects).to eq([object2])
+ expect(pages[1].number).to eq(2)
+ end
+
+ it 'starts at the given page' do
+ pages = []
+
+ client.each_page(:foo, page: 2) { |page| pages << page }
+
+ expect(pages[0].number).to eq(2)
+ expect(pages[1].number).to eq(3)
+ end
+ end
+ end
+
+ describe '#with_rate_limit' do
+ let(:client) { described_class.new('foo') }
+
+ it 'yields the supplied block when enough requests remain' do
+ expect(client).to receive(:requests_remaining?).and_return(true)
+
+ expect { |b| client.with_rate_limit(&b) }.to yield_control
+ end
+
+ it 'waits before yielding if not enough requests remain' do
+ expect(client).to receive(:requests_remaining?).and_return(false)
+ expect(client).to receive(:raise_or_wait_for_rate_limit)
+
+ expect { |b| client.with_rate_limit(&b) }.to yield_control
+ end
+
+ it 'waits and retries the operation if all requests were consumed in the supplied block' do
+ retries = 0
+
+ expect(client).to receive(:requests_remaining?).and_return(true)
+ expect(client).to receive(:raise_or_wait_for_rate_limit)
+
+ client.with_rate_limit do
+ if retries.zero?
+ retries += 1
+ raise(Octokit::TooManyRequests)
+ end
+ end
+
+ expect(retries).to eq(1)
+ end
+
+ it 'increments the request count counter' do
+ expect(client.request_count_counter)
+ .to receive(:increment)
+ .and_call_original
+
+ expect(client).to receive(:requests_remaining?).and_return(true)
+
+ client.with_rate_limit { }
+ end
+
+ it 'ignores rate limiting when disabled' do
+ expect(client)
+ .to receive(:rate_limiting_enabled?)
+ .and_return(false)
+
+ expect(client)
+ .not_to receive(:requests_remaining?)
+
+ expect(client.with_rate_limit { 10 }).to eq(10)
+ end
+ end
+
+ describe '#requests_remaining?' do
+ let(:client) { described_class.new('foo') }
+
+ it 'returns true if enough requests remain' do
+ expect(client).to receive(:remaining_requests).and_return(9000)
+
+ expect(client.requests_remaining?).to eq(true)
+ end
+
+ it 'returns false if not enough requests remain' do
+ expect(client).to receive(:remaining_requests).and_return(1)
+
+ expect(client.requests_remaining?).to eq(false)
+ end
+ end
+
+ describe '#raise_or_wait_for_rate_limit' do
+ it 'raises RateLimitError when running in parallel mode' do
+ client = described_class.new('foo', parallel: true)
+
+ expect { client.raise_or_wait_for_rate_limit }
+ .to raise_error(Gitlab::GithubImport::RateLimitError)
end
- it 'uses supplied value' do
- expect(client.client.options[:connection_opts][:ssl]).to eq({ verify: false })
- expect(client.api.connection_options[:ssl]).to eq({ verify: false })
+ it 'sleeps when running in sequential mode' do
+ client = described_class.new('foo', parallel: false)
+
+ expect(client).to receive(:rate_limit_resets_in).and_return(1)
+ expect(client).to receive(:sleep).with(1)
+
+ client.raise_or_wait_for_rate_limit
+ end
+
+ it 'increments the rate limit counter' do
+ client = described_class.new('foo', parallel: false)
+
+ expect(client)
+ .to receive(:rate_limit_resets_in)
+ .and_return(1)
+
+ expect(client)
+ .to receive(:sleep)
+ .with(1)
+
+ expect(client.rate_limit_counter)
+ .to receive(:increment)
+ .and_call_original
+
+ client.raise_or_wait_for_rate_limit
+ end
+ end
+
+ describe '#remaining_requests' do
+ it 'returns the number of remaining requests' do
+ client = described_class.new('foo')
+ rate_limit = double(remaining: 1)
+
+ expect(client.octokit).to receive(:rate_limit).and_return(rate_limit)
+ expect(client.remaining_requests).to eq(1)
+ end
+ end
+
+ describe '#rate_limit_resets_in' do
+ it 'returns the number of seconds after which the rate limit is reset' do
+ client = described_class.new('foo')
+ rate_limit = double(resets_in: 1)
+
+ expect(client.octokit).to receive(:rate_limit).and_return(rate_limit)
+
+ expect(client.rate_limit_resets_in).to eq(6)
end
end
describe '#api_endpoint' do
- context 'when provider does not specity an API endpoint' do
- it 'uses GitHub root API endpoint' do
- expect(client.api.api_endpoint).to eq 'https://api.github.com/'
+ let(:client) { described_class.new('foo') }
+
+ context 'without a custom endpoint configured in Omniauth' do
+ it 'returns the default API endpoint' do
+ expect(client)
+ .to receive(:custom_api_endpoint)
+ .and_return(nil)
+
+ expect(client.api_endpoint).to eq('https://api.github.com')
end
end
- context 'when provider specify a custom API endpoint' do
- before do
- github_provider['args']['client_options']['site'] = 'https://github.company.com/'
+ context 'with a custom endpoint configured in Omniauth' do
+ it 'returns the custom endpoint' do
+ endpoint = 'https://github.kittens.com'
+
+ expect(client)
+ .to receive(:custom_api_endpoint)
+ .and_return(endpoint)
+
+ expect(client.api_endpoint).to eq(endpoint)
end
+ end
+ end
+
+ describe '#custom_api_endpoint' do
+ let(:client) { described_class.new('foo') }
+
+ context 'without a custom endpoint' do
+ it 'returns nil' do
+ expect(client)
+ .to receive(:github_omniauth_provider)
+ .and_return({})
+
+ expect(client.custom_api_endpoint).to be_nil
+ end
+ end
+
+ context 'with a custom endpoint' do
+ it 'returns the API endpoint' do
+ endpoint = 'https://github.kittens.com'
+
+ expect(client)
+ .to receive(:github_omniauth_provider)
+ .and_return({ 'args' => { 'client_options' => { 'site' => endpoint } } })
- it 'uses the custom API endpoint' do
- expect(OmniAuth::Strategies::GitHub).not_to receive(:default_options)
- expect(client.api.api_endpoint).to eq 'https://github.company.com/'
+ expect(client.custom_api_endpoint).to eq(endpoint)
end
end
+ end
+
+ describe '#default_api_endpoint' do
+ it 'returns the default API endpoint' do
+ client = described_class.new('foo')
+
+ expect(client.default_api_endpoint).to eq('https://api.github.com')
+ end
+ end
+
+ describe '#verify_ssl' do
+ let(:client) { described_class.new('foo') }
- context 'when given a host' do
- subject(:client) { described_class.new(token, host: 'https://try.gitea.io/') }
+ context 'without a custom configuration' do
+ it 'returns true' do
+ expect(client)
+ .to receive(:github_omniauth_provider)
+ .and_return({})
- it 'builds a endpoint with the given host and the default API version' do
- expect(client.api.api_endpoint).to eq 'https://try.gitea.io/api/v3/'
+ expect(client.verify_ssl).to eq(true)
end
end
- context 'when given an API version' do
- subject(:client) { described_class.new(token, api_version: 'v3') }
+ context 'with a custom configuration' do
+ it 'returns the configured value' do
+ expect(client.verify_ssl).to eq(false)
+ end
+ end
+ end
+
+ describe '#github_omniauth_provider' do
+ let(:client) { described_class.new('foo') }
- it 'does not use the API version without a host' do
- expect(client.api.api_endpoint).to eq 'https://api.github.com/'
+ context 'without a configured provider' do
+ it 'returns an empty Hash' do
+ expect(Gitlab.config.omniauth)
+ .to receive(:providers)
+ .and_return([])
+
+ expect(client.github_omniauth_provider).to eq({})
end
end
- context 'when given a host and version' do
- subject(:client) { described_class.new(token, host: 'https://try.gitea.io/', api_version: 'v3') }
+ context 'with a configured provider' do
+ it 'returns the provider details as a Hash' do
+ hash = client.github_omniauth_provider
- it 'builds a endpoint with the given options' do
- expect(client.api.api_endpoint).to eq 'https://try.gitea.io/api/v3/'
+ expect(hash['name']).to eq('github')
+ expect(hash['url']).to eq('https://github.com/')
end
end
end
- it 'does not raise error when rate limit is disabled' do
- stub_request(:get, /api.github.com/)
- allow(client.api).to receive(:rate_limit!).and_raise(Octokit::NotFound)
+ describe '#rate_limiting_enabled?' do
+ let(:client) { described_class.new('foo') }
- expect { client.issues {} }.not_to raise_error
+ it 'returns true when using GitHub.com' do
+ expect(client.rate_limiting_enabled?).to eq(true)
+ end
+
+ it 'returns false for GitHub enterprise installations' do
+ expect(client)
+ .to receive(:api_endpoint)
+ .and_return('https://github.kittens.com/')
+
+ expect(client.rate_limiting_enabled?).to eq(false)
+ end
end
end
diff --git a/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb b/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
new file mode 100644
index 00000000000..1568c657a1e
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
@@ -0,0 +1,152 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Importer::DiffNoteImporter do
+ let(:project) { create(:project) }
+ let(:client) { double(:client) }
+ let(:user) { create(:user) }
+ let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
+ let(:updated_at) { Time.new(2017, 1, 1, 12, 15) }
+
+ let(:hunk) do
+ '@@ -1 +1 @@
+ -Hello
+ +Hello world'
+ end
+
+ let(:note) do
+ Gitlab::GithubImport::Representation::DiffNote.new(
+ noteable_type: 'MergeRequest',
+ noteable_id: 1,
+ commit_id: '123abc',
+ file_path: 'README.md',
+ diff_hunk: hunk,
+ author: Gitlab::GithubImport::Representation::User
+ .new(id: user.id, login: user.username),
+ note: 'Hello',
+ created_at: created_at,
+ updated_at: updated_at,
+ github_id: 1
+ )
+ end
+
+ let(:importer) { described_class.new(note, project, client) }
+
+ describe '#execute' do
+ context 'when the merge request no longer exists' do
+ it 'does not import anything' do
+ expect(Gitlab::Database).not_to receive(:bulk_insert)
+
+ importer.execute
+ end
+ end
+
+ context 'when the merge request exists' do
+ let!(:merge_request) do
+ create(:merge_request, source_project: project, target_project: project)
+ end
+
+ before do
+ allow(importer)
+ .to receive(:find_merge_request_id)
+ .and_return(merge_request.id)
+ end
+
+ it 'imports the note' do
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .and_return([user.id, true])
+
+ expect(Gitlab::Database)
+ .to receive(:bulk_insert)
+ .with(
+ LegacyDiffNote.table_name,
+ [
+ {
+ noteable_type: 'MergeRequest',
+ noteable_id: merge_request.id,
+ project_id: project.id,
+ author_id: user.id,
+ note: 'Hello',
+ system: false,
+ commit_id: '123abc',
+ line_code: note.line_code,
+ type: 'LegacyDiffNote',
+ created_at: created_at,
+ updated_at: updated_at,
+ st_diff: note.diff_hash.to_yaml
+ }
+ ]
+ )
+ .and_call_original
+
+ importer.execute
+ end
+
+ it 'imports the note when the author could not be found' do
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .and_return([project.creator_id, false])
+
+ expect(Gitlab::Database)
+ .to receive(:bulk_insert)
+ .with(
+ LegacyDiffNote.table_name,
+ [
+ {
+ noteable_type: 'MergeRequest',
+ noteable_id: merge_request.id,
+ project_id: project.id,
+ author_id: project.creator_id,
+ note: "*Created by: #{user.username}*\n\nHello",
+ system: false,
+ commit_id: '123abc',
+ line_code: note.line_code,
+ type: 'LegacyDiffNote',
+ created_at: created_at,
+ updated_at: updated_at,
+ st_diff: note.diff_hash.to_yaml
+ }
+ ]
+ )
+ .and_call_original
+
+ importer.execute
+ end
+
+ it 'produces a valid LegacyDiffNote' do
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .and_return([user.id, true])
+
+ importer.execute
+
+ note = project.notes.diff_notes.take
+
+ expect(note).to be_valid
+ expect(note.diff).to be_an_instance_of(Gitlab::Git::Diff)
+ end
+
+ it 'does not import the note when a foreign key error is raised' do
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .and_return([project.creator_id, false])
+
+ expect(Gitlab::Database)
+ .to receive(:bulk_insert)
+ .and_raise(ActiveRecord::InvalidForeignKey, 'invalid foreign key')
+
+ expect { importer.execute }.not_to raise_error
+ end
+ end
+ end
+
+ describe '#find_merge_request_id' do
+ it 'returns a merge request ID' do
+ expect_any_instance_of(Gitlab::GithubImport::IssuableFinder)
+ .to receive(:database_id)
+ .and_return(10)
+
+ expect(importer.find_merge_request_id).to eq(10)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb
new file mode 100644
index 00000000000..4713c6795bb
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb
@@ -0,0 +1,119 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Importer::DiffNotesImporter do
+ let(:project) { double(:project, id: 4, import_source: 'foo/bar') }
+ let(:client) { double(:client) }
+
+ let(:github_comment) do
+ double(
+ :response,
+ html_url: 'https://github.com/foo/bar/pull/42',
+ path: 'README.md',
+ commit_id: '123abc',
+ diff_hunk: "@@ -1 +1 @@\n-Hello\n+Hello world",
+ user: double(:user, id: 4, login: 'alice'),
+ body: 'Hello world',
+ created_at: Time.zone.now,
+ updated_at: Time.zone.now,
+ id: 1
+ )
+ end
+
+ describe '#parallel?' do
+ it 'returns true when running in parallel mode' do
+ importer = described_class.new(project, client)
+ expect(importer).to be_parallel
+ end
+
+ it 'returns false when running in sequential mode' do
+ importer = described_class.new(project, client, parallel: false)
+ expect(importer).not_to be_parallel
+ end
+ end
+
+ describe '#execute' do
+ context 'when running in parallel mode' do
+ it 'imports diff notes in parallel' do
+ importer = described_class.new(project, client)
+
+ expect(importer).to receive(:parallel_import)
+
+ importer.execute
+ end
+ end
+
+ context 'when running in sequential mode' do
+ it 'imports diff notes in sequence' do
+ importer = described_class.new(project, client, parallel: false)
+
+ expect(importer).to receive(:sequential_import)
+
+ importer.execute
+ end
+ end
+ end
+
+ describe '#sequential_import' do
+ it 'imports each diff note in sequence' do
+ importer = described_class.new(project, client, parallel: false)
+ diff_note_importer = double(:diff_note_importer)
+
+ allow(importer)
+ .to receive(:each_object_to_import)
+ .and_yield(github_comment)
+
+ expect(Gitlab::GithubImport::Importer::DiffNoteImporter)
+ .to receive(:new)
+ .with(
+ an_instance_of(Gitlab::GithubImport::Representation::DiffNote),
+ project,
+ client
+ )
+ .and_return(diff_note_importer)
+
+ expect(diff_note_importer).to receive(:execute)
+
+ importer.sequential_import
+ end
+ end
+
+ describe '#parallel_import' do
+ it 'imports each diff note in parallel' do
+ importer = described_class.new(project, client)
+
+ allow(importer)
+ .to receive(:each_object_to_import)
+ .and_yield(github_comment)
+
+ expect(Gitlab::GithubImport::ImportDiffNoteWorker)
+ .to receive(:perform_async)
+ .with(project.id, an_instance_of(Hash), an_instance_of(String))
+
+ waiter = importer.parallel_import
+
+ expect(waiter).to be_an_instance_of(Gitlab::JobWaiter)
+ expect(waiter.jobs_remaining).to eq(1)
+ end
+ end
+
+ describe '#id_for_already_imported_cache' do
+ it 'returns the ID of the given note' do
+ importer = described_class.new(project, client)
+
+ expect(importer.id_for_already_imported_cache(github_comment))
+ .to eq(1)
+ end
+ end
+
+ describe '#collection_options' do
+ it 'returns an empty Hash' do
+ # For large projects (e.g. kubernetes/kubernetes) GitHub's API may produce
+ # HTTP 500 errors when using explicit sorting options, regardless of what
+ # order you sort in. Not using any sorting options at all allows us to
+ # work around this.
+ importer = described_class.new(project, client)
+
+ expect(importer.collection_options).to eq({})
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/issue_and_label_links_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issue_and_label_links_importer_spec.rb
new file mode 100644
index 00000000000..665b31ef244
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/issue_and_label_links_importer_spec.rb
@@ -0,0 +1,27 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Importer::IssueAndLabelLinksImporter do
+ describe '#execute' do
+ it 'imports an issue and its labels' do
+ issue = double(:issue)
+ project = double(:project)
+ client = double(:client)
+ label_links_instance = double(:label_links_importer)
+ importer = described_class.new(issue, project, client)
+
+ expect(Gitlab::GithubImport::Importer::IssueImporter)
+ .to receive(:import_if_issue)
+ .with(issue, project, client)
+
+ expect(Gitlab::GithubImport::Importer::LabelLinksImporter)
+ .to receive(:new)
+ .with(issue, project, client)
+ .and_return(label_links_instance)
+
+ expect(label_links_instance)
+ .to receive(:execute)
+
+ importer.execute
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb
new file mode 100644
index 00000000000..d34ca0b76b8
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb
@@ -0,0 +1,201 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Importer::IssueImporter, :clean_gitlab_redis_cache do
+ let(:project) { create(:project) }
+ let(:client) { double(:client) }
+ let(:user) { create(:user) }
+ let(:milestone) { create(:milestone, project: project) }
+ let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
+ let(:updated_at) { Time.new(2017, 1, 1, 12, 15) }
+
+ let(:issue) do
+ Gitlab::GithubImport::Representation::Issue.new(
+ iid: 42,
+ title: 'My Issue',
+ description: 'This is my issue',
+ milestone_number: 1,
+ state: :opened,
+ assignees: [
+ Gitlab::GithubImport::Representation::User.new(id: 4, login: 'alice'),
+ Gitlab::GithubImport::Representation::User.new(id: 5, login: 'bob')
+ ],
+ label_names: %w[bug],
+ author: Gitlab::GithubImport::Representation::User.new(id: 4, login: 'alice'),
+ created_at: created_at,
+ updated_at: updated_at,
+ pull_request: false
+ )
+ end
+
+ describe '.import_if_issue' do
+ it 'imports an issuable if it is a regular issue' do
+ importer = double(:importer)
+
+ expect(described_class)
+ .to receive(:new)
+ .with(issue, project, client)
+ .and_return(importer)
+
+ expect(importer).to receive(:execute)
+
+ described_class.import_if_issue(issue, project, client)
+ end
+
+ it 'does not import the issuable if it is a pull request' do
+ expect(issue).to receive(:pull_request?).and_return(true)
+
+ expect(described_class).not_to receive(:new)
+
+ described_class.import_if_issue(issue, project, client)
+ end
+ end
+
+ describe '#execute' do
+ let(:importer) { described_class.new(issue, project, client) }
+
+ it 'creates the issue and assignees' do
+ expect(importer)
+ .to receive(:create_issue)
+ .and_return(10)
+
+ expect(importer)
+ .to receive(:create_assignees)
+ .with(10)
+
+ expect(importer.issuable_finder)
+ .to receive(:cache_database_id)
+ .with(10)
+
+ importer.execute
+ end
+ end
+
+ describe '#create_issue' do
+ let(:importer) { described_class.new(issue, project, client) }
+
+ before do
+ allow(importer.milestone_finder)
+ .to receive(:id_for)
+ .with(issue)
+ .and_return(milestone.id)
+ end
+
+ context 'when the issue author could be found' do
+ it 'creates the issue with the found author as the issue author' do
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .with(issue)
+ .and_return([user.id, true])
+
+ expect(Gitlab::GithubImport)
+ .to receive(:insert_and_return_id)
+ .with(
+ {
+ iid: 42,
+ title: 'My Issue',
+ author_id: user.id,
+ project_id: project.id,
+ description: 'This is my issue',
+ milestone_id: milestone.id,
+ state: :opened,
+ created_at: created_at,
+ updated_at: updated_at
+ },
+ project.issues
+ )
+ .and_call_original
+
+ importer.create_issue
+ end
+ end
+
+ context 'when the issue author could not be found' do
+ it 'creates the issue with the project creator as the issue author' do
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .with(issue)
+ .and_return([project.creator_id, false])
+
+ expect(Gitlab::GithubImport)
+ .to receive(:insert_and_return_id)
+ .with(
+ {
+ iid: 42,
+ title: 'My Issue',
+ author_id: project.creator_id,
+ project_id: project.id,
+ description: "*Created by: alice*\n\nThis is my issue",
+ milestone_id: milestone.id,
+ state: :opened,
+ created_at: created_at,
+ updated_at: updated_at
+ },
+ project.issues
+ )
+ .and_call_original
+
+ importer.create_issue
+ end
+ end
+
+ context 'when the import fails due to a foreign key error' do
+ it 'does not raise any errors' do
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .with(issue)
+ .and_return([user.id, true])
+
+ expect(Gitlab::GithubImport)
+ .to receive(:insert_and_return_id)
+ .and_raise(ActiveRecord::InvalidForeignKey, 'invalid foreign key')
+
+ expect { importer.create_issue }.not_to raise_error
+ end
+ end
+
+ it 'produces a valid Issue' do
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .with(issue)
+ .and_return([user.id, true])
+
+ importer.create_issue
+
+ expect(project.issues.take).to be_valid
+ end
+
+ it 'returns the ID of the created issue' do
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .with(issue)
+ .and_return([user.id, true])
+
+ expect(importer.create_issue).to be_a_kind_of(Numeric)
+ end
+ end
+
+ describe '#create_assignees' do
+ it 'inserts the issue assignees in bulk' do
+ importer = described_class.new(issue, project, client)
+
+ allow(importer.user_finder)
+ .to receive(:user_id_for)
+ .ordered.with(issue.assignees[0])
+ .and_return(4)
+
+ allow(importer.user_finder)
+ .to receive(:user_id_for)
+ .ordered.with(issue.assignees[1])
+ .and_return(5)
+
+ expect(Gitlab::Database)
+ .to receive(:bulk_insert)
+ .with(
+ IssueAssignee.table_name,
+ [{ issue_id: 1, user_id: 4 }, { issue_id: 1, user_id: 5 }]
+ )
+
+ importer.create_assignees(1)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb
new file mode 100644
index 00000000000..e237e79e94b
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb
@@ -0,0 +1,111 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Importer::IssuesImporter do
+ let(:project) { double(:project, id: 4, import_source: 'foo/bar') }
+ let(:client) { double(:client) }
+ let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
+ let(:updated_at) { Time.new(2017, 1, 1, 12, 15) }
+
+ let(:github_issue) do
+ double(
+ :response,
+ number: 42,
+ title: 'My Issue',
+ body: 'This is my issue',
+ milestone: double(:milestone, number: 4),
+ state: 'open',
+ assignees: [double(:user, id: 4, login: 'alice')],
+ labels: [double(:label, name: 'bug')],
+ user: double(:user, id: 4, login: 'alice'),
+ created_at: created_at,
+ updated_at: updated_at,
+ pull_request: false
+ )
+ end
+
+ describe '#parallel?' do
+ it 'returns true when running in parallel mode' do
+ importer = described_class.new(project, client)
+ expect(importer).to be_parallel
+ end
+
+ it 'returns false when running in sequential mode' do
+ importer = described_class.new(project, client, parallel: false)
+ expect(importer).not_to be_parallel
+ end
+ end
+
+ describe '#execute' do
+ context 'when running in parallel mode' do
+ it 'imports issues in parallel' do
+ importer = described_class.new(project, client)
+
+ expect(importer).to receive(:parallel_import)
+
+ importer.execute
+ end
+ end
+
+ context 'when running in sequential mode' do
+ it 'imports issues in sequence' do
+ importer = described_class.new(project, client, parallel: false)
+
+ expect(importer).to receive(:sequential_import)
+
+ importer.execute
+ end
+ end
+ end
+
+ describe '#sequential_import' do
+ it 'imports each issue in sequence' do
+ importer = described_class.new(project, client, parallel: false)
+ issue_importer = double(:importer)
+
+ allow(importer)
+ .to receive(:each_object_to_import)
+ .and_yield(github_issue)
+
+ expect(Gitlab::GithubImport::Importer::IssueAndLabelLinksImporter)
+ .to receive(:new)
+ .with(
+ an_instance_of(Gitlab::GithubImport::Representation::Issue),
+ project,
+ client
+ )
+ .and_return(issue_importer)
+
+ expect(issue_importer).to receive(:execute)
+
+ importer.sequential_import
+ end
+ end
+
+ describe '#parallel_import' do
+ it 'imports each issue in parallel' do
+ importer = described_class.new(project, client)
+
+ allow(importer)
+ .to receive(:each_object_to_import)
+ .and_yield(github_issue)
+
+ expect(Gitlab::GithubImport::ImportIssueWorker)
+ .to receive(:perform_async)
+ .with(project.id, an_instance_of(Hash), an_instance_of(String))
+
+ waiter = importer.parallel_import
+
+ expect(waiter).to be_an_instance_of(Gitlab::JobWaiter)
+ expect(waiter.jobs_remaining).to eq(1)
+ end
+ end
+
+ describe '#id_for_already_imported_cache' do
+ it 'returns the issue number of the given issue' do
+ importer = described_class.new(project, client)
+
+ expect(importer.id_for_already_imported_cache(github_issue))
+ .to eq(42)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/label_links_importer_spec.rb b/spec/lib/gitlab/github_import/importer/label_links_importer_spec.rb
new file mode 100644
index 00000000000..e2a71e78574
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/label_links_importer_spec.rb
@@ -0,0 +1,82 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Importer::LabelLinksImporter do
+ let(:project) { create(:project) }
+ let(:client) { double(:client) }
+ let(:issue) do
+ double(
+ :issue,
+ iid: 4,
+ label_names: %w[bug],
+ issuable_type: Issue,
+ pull_request?: false
+ )
+ end
+
+ let(:importer) { described_class.new(issue, project, client) }
+
+ describe '#execute' do
+ it 'creates the label links' do
+ importer = described_class.new(issue, project, client)
+
+ expect(importer).to receive(:create_labels)
+
+ importer.execute
+ end
+ end
+
+ describe '#create_labels' do
+ it 'inserts the label links in bulk' do
+ expect(importer.label_finder)
+ .to receive(:id_for)
+ .with('bug')
+ .and_return(2)
+
+ expect(importer)
+ .to receive(:find_target_id)
+ .and_return(1)
+
+ Timecop.freeze do
+ expect(Gitlab::Database)
+ .to receive(:bulk_insert)
+ .with(
+ LabelLink.table_name,
+ [
+ {
+ label_id: 2,
+ target_id: 1,
+ target_type: Issue,
+ created_at: Time.zone.now,
+ updated_at: Time.zone.now
+ }
+ ]
+ )
+
+ importer.create_labels
+ end
+ end
+
+ it 'does not insert label links for non-existing labels' do
+ expect(importer.label_finder)
+ .to receive(:id_for)
+ .with('bug')
+ .and_return(nil)
+
+ expect(Gitlab::Database)
+ .to receive(:bulk_insert)
+ .with(LabelLink.table_name, [])
+
+ importer.create_labels
+ end
+ end
+
+ describe '#find_target_id' do
+ it 'returns the ID of the issuable to create the label link for' do
+ expect_any_instance_of(Gitlab::GithubImport::IssuableFinder)
+ .to receive(:database_id)
+ .and_return(10)
+
+ expect(importer.find_target_id).to eq(10)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/labels_importer_spec.rb b/spec/lib/gitlab/github_import/importer/labels_importer_spec.rb
new file mode 100644
index 00000000000..156ef96a0fa
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/labels_importer_spec.rb
@@ -0,0 +1,107 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Importer::LabelsImporter, :clean_gitlab_redis_cache do
+ let(:project) { create(:project, import_source: 'foo/bar') }
+ let(:client) { double(:client) }
+ let(:importer) { described_class.new(project, client) }
+
+ describe '#execute' do
+ it 'imports the labels in bulk' do
+ label_hash = { title: 'bug', color: '#fffaaa' }
+
+ expect(importer)
+ .to receive(:build_labels)
+ .and_return([label_hash])
+
+ expect(importer)
+ .to receive(:bulk_insert)
+ .with(Label, [label_hash])
+
+ expect(importer)
+ .to receive(:build_labels_cache)
+
+ importer.execute
+ end
+ end
+
+ describe '#build_labels' do
+ it 'returns an Array containnig label rows' do
+ label = double(:label, name: 'bug', color: 'ffffff')
+
+ expect(importer).to receive(:each_label).and_return([label])
+
+ rows = importer.build_labels
+
+ expect(rows.length).to eq(1)
+ expect(rows[0][:title]).to eq('bug')
+ end
+
+ it 'does not create labels that already exist' do
+ create(:label, project: project, title: 'bug')
+
+ label = double(:label, name: 'bug', color: 'ffffff')
+
+ expect(importer).to receive(:each_label).and_return([label])
+ expect(importer.build_labels).to be_empty
+ end
+ end
+
+ describe '#build_labels_cache' do
+ it 'builds the labels cache' do
+ expect_any_instance_of(Gitlab::GithubImport::LabelFinder)
+ .to receive(:build_cache)
+
+ importer.build_labels_cache
+ end
+ end
+
+ describe '#build' do
+ let(:label_hash) do
+ importer.build(double(:label, name: 'bug', color: 'ffffff'))
+ end
+
+ it 'returns the attributes of the label as a Hash' do
+ expect(label_hash).to be_an_instance_of(Hash)
+ end
+
+ context 'the returned Hash' do
+ it 'includes the label title' do
+ expect(label_hash[:title]).to eq('bug')
+ end
+
+ it 'includes the label color' do
+ expect(label_hash[:color]).to eq('#ffffff')
+ end
+
+ it 'includes the project ID' do
+ expect(label_hash[:project_id]).to eq(project.id)
+ end
+
+ it 'includes the label type' do
+ expect(label_hash[:type]).to eq('ProjectLabel')
+ end
+
+ it 'includes the created timestamp' do
+ Timecop.freeze do
+ expect(label_hash[:created_at]).to eq(Time.zone.now)
+ end
+ end
+
+ it 'includes the updated timestamp' do
+ Timecop.freeze do
+ expect(label_hash[:updated_at]).to eq(Time.zone.now)
+ end
+ end
+ end
+ end
+
+ describe '#each_label' do
+ it 'returns the labels' do
+ expect(client)
+ .to receive(:labels)
+ .with('foo/bar')
+
+ importer.each_label
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb b/spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb
new file mode 100644
index 00000000000..b1cac3b6e46
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb
@@ -0,0 +1,120 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Importer::MilestonesImporter, :clean_gitlab_redis_cache do
+ let(:project) { create(:project, import_source: 'foo/bar') }
+ let(:client) { double(:client) }
+ let(:importer) { described_class.new(project, client) }
+ let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
+ let(:updated_at) { Time.new(2017, 1, 1, 12, 15) }
+
+ let(:milestone) do
+ double(
+ :milestone,
+ number: 1,
+ title: '1.0',
+ description: 'The first release',
+ state: 'open',
+ created_at: created_at,
+ updated_at: updated_at
+ )
+ end
+
+ describe '#execute' do
+ it 'imports the milestones in bulk' do
+ milestone_hash = { number: 1, title: '1.0' }
+
+ expect(importer)
+ .to receive(:build_milestones)
+ .and_return([milestone_hash])
+
+ expect(importer)
+ .to receive(:bulk_insert)
+ .with(Milestone, [milestone_hash])
+
+ expect(importer)
+ .to receive(:build_milestones_cache)
+
+ importer.execute
+ end
+ end
+
+ describe '#build_milestones' do
+ it 'returns an Array containnig milestone rows' do
+ expect(importer)
+ .to receive(:each_milestone)
+ .and_return([milestone])
+
+ rows = importer.build_milestones
+
+ expect(rows.length).to eq(1)
+ expect(rows[0][:title]).to eq('1.0')
+ end
+
+ it 'does not create milestones that already exist' do
+ create(:milestone, project: project, title: '1.0', iid: 1)
+
+ expect(importer)
+ .to receive(:each_milestone)
+ .and_return([milestone])
+
+ expect(importer.build_milestones).to be_empty
+ end
+ end
+
+ describe '#build_milestones_cache' do
+ it 'builds the milestones cache' do
+ expect_any_instance_of(Gitlab::GithubImport::MilestoneFinder)
+ .to receive(:build_cache)
+
+ importer.build_milestones_cache
+ end
+ end
+
+ describe '#build' do
+ let(:milestone_hash) { importer.build(milestone) }
+
+ it 'returns the attributes of the milestone as a Hash' do
+ expect(milestone_hash).to be_an_instance_of(Hash)
+ end
+
+ context 'the returned Hash' do
+ it 'includes the milestone number' do
+ expect(milestone_hash[:iid]).to eq(1)
+ end
+
+ it 'includes the milestone title' do
+ expect(milestone_hash[:title]).to eq('1.0')
+ end
+
+ it 'includes the milestone description' do
+ expect(milestone_hash[:description]).to eq('The first release')
+ end
+
+ it 'includes the project ID' do
+ expect(milestone_hash[:project_id]).to eq(project.id)
+ end
+
+ it 'includes the milestone state' do
+ expect(milestone_hash[:state]).to eq(:active)
+ end
+
+ it 'includes the created timestamp' do
+ expect(milestone_hash[:created_at]).to eq(created_at)
+ end
+
+ it 'includes the updated timestamp' do
+ expect(milestone_hash[:updated_at]).to eq(updated_at)
+ end
+ end
+ end
+
+ describe '#each_milestone' do
+ it 'returns the milestones' do
+ expect(client)
+ .to receive(:milestones)
+ .with('foo/bar', state: 'all')
+
+ importer.each_milestone
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/note_importer_spec.rb b/spec/lib/gitlab/github_import/importer/note_importer_spec.rb
new file mode 100644
index 00000000000..9bdcc42be19
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/note_importer_spec.rb
@@ -0,0 +1,151 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Importer::NoteImporter do
+ let(:client) { double(:client) }
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+ let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
+ let(:updated_at) { Time.new(2017, 1, 1, 12, 15) }
+
+ let(:github_note) do
+ Gitlab::GithubImport::Representation::Note.new(
+ noteable_id: 1,
+ noteable_type: 'Issue',
+ author: Gitlab::GithubImport::Representation::User.new(id: 4, login: 'alice'),
+ note: 'This is my note',
+ created_at: created_at,
+ updated_at: updated_at,
+ github_id: 1
+ )
+ end
+
+ let(:importer) { described_class.new(github_note, project, client) }
+
+ describe '#execute' do
+ context 'when the noteable exists' do
+ let!(:issue_row) { create(:issue, project: project, iid: 1) }
+
+ before do
+ allow(importer)
+ .to receive(:find_noteable_id)
+ .and_return(issue_row.id)
+ end
+
+ context 'when the author could be found' do
+ it 'imports the note with the found author as the note author' do
+ expect(importer.user_finder)
+ .to receive(:author_id_for)
+ .with(github_note)
+ .and_return([user.id, true])
+
+ expect(Gitlab::Database)
+ .to receive(:bulk_insert)
+ .with(
+ Note.table_name,
+ [
+ {
+ noteable_type: 'Issue',
+ noteable_id: issue_row.id,
+ project_id: project.id,
+ author_id: user.id,
+ note: 'This is my note',
+ system: false,
+ created_at: created_at,
+ updated_at: updated_at
+ }
+ ]
+ )
+ .and_call_original
+
+ importer.execute
+ end
+ end
+
+ context 'when the note author could not be found' do
+ it 'imports the note with the project creator as the note author' do
+ expect(importer.user_finder)
+ .to receive(:author_id_for)
+ .with(github_note)
+ .and_return([project.creator_id, false])
+
+ expect(Gitlab::Database)
+ .to receive(:bulk_insert)
+ .with(
+ Note.table_name,
+ [
+ {
+ noteable_type: 'Issue',
+ noteable_id: issue_row.id,
+ project_id: project.id,
+ author_id: project.creator_id,
+ note: "*Created by: alice*\n\nThis is my note",
+ system: false,
+ created_at: created_at,
+ updated_at: updated_at
+ }
+ ]
+ )
+ .and_call_original
+
+ importer.execute
+ end
+ end
+ end
+
+ context 'when the noteable does not exist' do
+ it 'does not import the note' do
+ expect(Gitlab::Database).not_to receive(:bulk_insert)
+
+ importer.execute
+ end
+ end
+
+ context 'when the import fails due to a foreign key error' do
+ it 'does not raise any errors' do
+ issue_row = create(:issue, project: project, iid: 1)
+
+ allow(importer)
+ .to receive(:find_noteable_id)
+ .and_return(issue_row.id)
+
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .with(github_note)
+ .and_return([user.id, true])
+
+ expect(Gitlab::Database)
+ .to receive(:bulk_insert)
+ .and_raise(ActiveRecord::InvalidForeignKey, 'invalid foreign key')
+
+ expect { importer.execute }.not_to raise_error
+ end
+ end
+
+ it 'produces a valid Note' do
+ issue_row = create(:issue, project: project, iid: 1)
+
+ allow(importer)
+ .to receive(:find_noteable_id)
+ .and_return(issue_row.id)
+
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .with(github_note)
+ .and_return([user.id, true])
+
+ importer.execute
+
+ expect(project.notes.take).to be_valid
+ end
+ end
+
+ describe '#find_noteable_id' do
+ it 'returns the ID of the noteable' do
+ expect_any_instance_of(Gitlab::GithubImport::IssuableFinder)
+ .to receive(:database_id)
+ .and_return(10)
+
+ expect(importer.find_noteable_id).to eq(10)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb
new file mode 100644
index 00000000000..f046d13f879
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb
@@ -0,0 +1,116 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Importer::NotesImporter do
+ let(:project) { double(:project, id: 4, import_source: 'foo/bar') }
+ let(:client) { double(:client) }
+
+ let(:github_comment) do
+ double(
+ :response,
+ html_url: 'https://github.com/foo/bar/issues/42',
+ user: double(:user, id: 4, login: 'alice'),
+ body: 'Hello world',
+ created_at: Time.zone.now,
+ updated_at: Time.zone.now,
+ id: 1
+ )
+ end
+
+ describe '#parallel?' do
+ it 'returns true when running in parallel mode' do
+ importer = described_class.new(project, client)
+ expect(importer).to be_parallel
+ end
+
+ it 'returns false when running in sequential mode' do
+ importer = described_class.new(project, client, parallel: false)
+ expect(importer).not_to be_parallel
+ end
+ end
+
+ describe '#execute' do
+ context 'when running in parallel mode' do
+ it 'imports notes in parallel' do
+ importer = described_class.new(project, client)
+
+ expect(importer).to receive(:parallel_import)
+
+ importer.execute
+ end
+ end
+
+ context 'when running in sequential mode' do
+ it 'imports notes in sequence' do
+ importer = described_class.new(project, client, parallel: false)
+
+ expect(importer).to receive(:sequential_import)
+
+ importer.execute
+ end
+ end
+ end
+
+ describe '#sequential_import' do
+ it 'imports each note in sequence' do
+ importer = described_class.new(project, client, parallel: false)
+ note_importer = double(:note_importer)
+
+ allow(importer)
+ .to receive(:each_object_to_import)
+ .and_yield(github_comment)
+
+ expect(Gitlab::GithubImport::Importer::NoteImporter)
+ .to receive(:new)
+ .with(
+ an_instance_of(Gitlab::GithubImport::Representation::Note),
+ project,
+ client
+ )
+ .and_return(note_importer)
+
+ expect(note_importer).to receive(:execute)
+
+ importer.sequential_import
+ end
+ end
+
+ describe '#parallel_import' do
+ it 'imports each note in parallel' do
+ importer = described_class.new(project, client)
+
+ allow(importer)
+ .to receive(:each_object_to_import)
+ .and_yield(github_comment)
+
+ expect(Gitlab::GithubImport::ImportNoteWorker)
+ .to receive(:perform_async)
+ .with(project.id, an_instance_of(Hash), an_instance_of(String))
+
+ waiter = importer.parallel_import
+
+ expect(waiter).to be_an_instance_of(Gitlab::JobWaiter)
+ expect(waiter.jobs_remaining).to eq(1)
+ end
+ end
+
+ describe '#id_for_already_imported_cache' do
+ it 'returns the ID of the given note' do
+ importer = described_class.new(project, client)
+
+ expect(importer.id_for_already_imported_cache(github_comment))
+ .to eq(1)
+ end
+ end
+
+ describe '#collection_options' do
+ it 'returns an empty Hash' do
+ # For large projects (e.g. kubernetes/kubernetes) GitHub's API may produce
+ # HTTP 500 errors when using explicit sorting options, regardless of what
+ # order you sort in. Not using any sorting options at all allows us to
+ # work around this.
+ importer = described_class.new(project, client)
+
+ expect(importer.collection_options).to eq({})
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb
new file mode 100644
index 00000000000..35f3fdf8304
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb
@@ -0,0 +1,221 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Importer::PullRequestImporter, :clean_gitlab_redis_cache do
+ let(:project) { create(:project, :repository) }
+ let(:client) { double(:client) }
+ let(:user) { create(:user) }
+ let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
+ let(:updated_at) { Time.new(2017, 1, 1, 12, 15) }
+ let(:merged_at) { Time.new(2017, 1, 1, 12, 17) }
+
+ let(:source_commit) { project.repository.commit('feature') }
+ let(:target_commit) { project.repository.commit('master') }
+ let(:milestone) { create(:milestone, project: project) }
+
+ let(:pull_request) do
+ alice = Gitlab::GithubImport::Representation::User.new(id: 4, login: 'alice')
+
+ Gitlab::GithubImport::Representation::PullRequest.new(
+ iid: 42,
+ title: 'My Pull Request',
+ description: 'This is my pull request',
+ source_branch: 'feature',
+ source_branch_sha: source_commit.id,
+ target_branch: 'master',
+ target_branch_sha: target_commit.id,
+ source_repository_id: 400,
+ target_repository_id: 200,
+ source_repository_owner: 'alice',
+ state: :closed,
+ milestone_number: milestone.iid,
+ author: alice,
+ assignee: alice,
+ created_at: created_at,
+ updated_at: updated_at,
+ merged_at: merged_at
+ )
+ end
+
+ let(:importer) { described_class.new(pull_request, project, client) }
+
+ describe '#execute' do
+ it 'imports the pull request' do
+ expect(importer)
+ .to receive(:create_merge_request)
+ .and_return(10)
+
+ expect_any_instance_of(Gitlab::GithubImport::IssuableFinder)
+ .to receive(:cache_database_id)
+ .with(10)
+
+ importer.execute
+ end
+ end
+
+ describe '#create_merge_request' do
+ before do
+ allow(importer.milestone_finder)
+ .to receive(:id_for)
+ .with(pull_request)
+ .and_return(milestone.id)
+ end
+
+ context 'when the author could be found' do
+ before do
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .with(pull_request)
+ .and_return([user.id, true])
+
+ allow(importer.user_finder)
+ .to receive(:assignee_id_for)
+ .with(pull_request)
+ .and_return(user.id)
+ end
+
+ it 'imports the pull request with the pull request author as the merge request author' do
+ expect(Gitlab::GithubImport)
+ .to receive(:insert_and_return_id)
+ .with(
+ {
+ iid: 42,
+ title: 'My Pull Request',
+ description: 'This is my pull request',
+ source_project_id: project.id,
+ target_project_id: project.id,
+ source_branch: 'alice:feature',
+ target_branch: 'master',
+ state: :merged,
+ milestone_id: milestone.id,
+ author_id: user.id,
+ assignee_id: user.id,
+ created_at: created_at,
+ updated_at: updated_at
+ },
+ project.merge_requests
+ )
+ .and_call_original
+
+ importer.create_merge_request
+ end
+
+ it 'returns the ID of the created merge request' do
+ id = importer.create_merge_request
+
+ expect(id).to be_a_kind_of(Numeric)
+ end
+
+ it 'creates the merge request diffs' do
+ importer.create_merge_request
+
+ mr = project.merge_requests.take
+
+ expect(mr.merge_request_diffs.exists?).to eq(true)
+ end
+ end
+
+ context 'when the author could not be found' do
+ it 'imports the pull request with the project creator as the merge request author' do
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .with(pull_request)
+ .and_return([project.creator_id, false])
+
+ allow(importer.user_finder)
+ .to receive(:assignee_id_for)
+ .with(pull_request)
+ .and_return(user.id)
+
+ expect(Gitlab::GithubImport)
+ .to receive(:insert_and_return_id)
+ .with(
+ {
+ iid: 42,
+ title: 'My Pull Request',
+ description: "*Created by: alice*\n\nThis is my pull request",
+ source_project_id: project.id,
+ target_project_id: project.id,
+ source_branch: 'alice:feature',
+ target_branch: 'master',
+ state: :merged,
+ milestone_id: milestone.id,
+ author_id: project.creator_id,
+ assignee_id: user.id,
+ created_at: created_at,
+ updated_at: updated_at
+ },
+ project.merge_requests
+ )
+ .and_call_original
+
+ importer.create_merge_request
+ end
+ end
+
+ context 'when the source and target branch are identical' do
+ it 'uses a generated source branch name for the merge request' do
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .with(pull_request)
+ .and_return([user.id, true])
+
+ allow(importer.user_finder)
+ .to receive(:assignee_id_for)
+ .with(pull_request)
+ .and_return(user.id)
+
+ allow(pull_request)
+ .to receive(:source_repository_id)
+ .and_return(pull_request.target_repository_id)
+
+ allow(pull_request)
+ .to receive(:source_branch)
+ .and_return('master')
+
+ expect(Gitlab::GithubImport)
+ .to receive(:insert_and_return_id)
+ .with(
+ {
+ iid: 42,
+ title: 'My Pull Request',
+ description: 'This is my pull request',
+ source_project_id: project.id,
+ target_project_id: project.id,
+ source_branch: 'master-42',
+ target_branch: 'master',
+ state: :merged,
+ milestone_id: milestone.id,
+ author_id: user.id,
+ assignee_id: user.id,
+ created_at: created_at,
+ updated_at: updated_at
+ },
+ project.merge_requests
+ )
+ .and_call_original
+
+ importer.create_merge_request
+ end
+ end
+
+ context 'when the import fails due to a foreign key error' do
+ it 'does not raise any errors' do
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .with(pull_request)
+ .and_return([user.id, true])
+
+ allow(importer.user_finder)
+ .to receive(:assignee_id_for)
+ .with(pull_request)
+ .and_return(user.id)
+
+ expect(Gitlab::GithubImport)
+ .to receive(:insert_and_return_id)
+ .and_raise(ActiveRecord::InvalidForeignKey, 'invalid foreign key')
+
+ expect { importer.create_merge_request }.not_to raise_error
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
new file mode 100644
index 00000000000..d72572cd510
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
@@ -0,0 +1,272 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Importer::PullRequestsImporter do
+ let(:project) { create(:project, import_source: 'foo/bar') }
+ let(:client) { double(:client) }
+
+ let(:pull_request) do
+ double(
+ :response,
+ number: 42,
+ title: 'My Pull Request',
+ body: 'This is my pull request',
+ state: 'closed',
+ head: double(
+ :head,
+ sha: '123abc',
+ ref: 'my-feature',
+ repo: double(:repo, id: 400),
+ user: double(:user, id: 4, login: 'alice')
+ ),
+ base: double(
+ :base,
+ sha: '456def',
+ ref: 'master',
+ repo: double(:repo, id: 200)
+ ),
+ milestone: double(:milestone, number: 4),
+ user: double(:user, id: 4, login: 'alice'),
+ assignee: double(:user, id: 4, login: 'alice'),
+ created_at: Time.zone.now,
+ updated_at: Time.zone.now,
+ merged_at: Time.zone.now
+ )
+ end
+
+ describe '#parallel?' do
+ it 'returns true when running in parallel mode' do
+ importer = described_class.new(project, client)
+ expect(importer).to be_parallel
+ end
+
+ it 'returns false when running in sequential mode' do
+ importer = described_class.new(project, client, parallel: false)
+ expect(importer).not_to be_parallel
+ end
+ end
+
+ describe '#execute' do
+ context 'when running in parallel mode' do
+ it 'imports pull requests in parallel' do
+ importer = described_class.new(project, client)
+
+ expect(importer).to receive(:parallel_import)
+
+ importer.execute
+ end
+ end
+
+ context 'when running in sequential mode' do
+ it 'imports pull requests in sequence' do
+ importer = described_class.new(project, client, parallel: false)
+
+ expect(importer).to receive(:sequential_import)
+
+ importer.execute
+ end
+ end
+ end
+
+ describe '#sequential_import' do
+ it 'imports each pull request in sequence' do
+ importer = described_class.new(project, client, parallel: false)
+ pull_request_importer = double(:pull_request_importer)
+
+ allow(importer)
+ .to receive(:each_object_to_import)
+ .and_yield(pull_request)
+
+ expect(Gitlab::GithubImport::Importer::PullRequestImporter)
+ .to receive(:new)
+ .with(
+ an_instance_of(Gitlab::GithubImport::Representation::PullRequest),
+ project,
+ client
+ )
+ .and_return(pull_request_importer)
+
+ expect(pull_request_importer).to receive(:execute)
+
+ importer.sequential_import
+ end
+ end
+
+ describe '#parallel_import' do
+ it 'imports each note in parallel' do
+ importer = described_class.new(project, client)
+
+ allow(importer)
+ .to receive(:each_object_to_import)
+ .and_yield(pull_request)
+
+ expect(Gitlab::GithubImport::ImportPullRequestWorker)
+ .to receive(:perform_async)
+ .with(project.id, an_instance_of(Hash), an_instance_of(String))
+
+ waiter = importer.parallel_import
+
+ expect(waiter).to be_an_instance_of(Gitlab::JobWaiter)
+ expect(waiter.jobs_remaining).to eq(1)
+ end
+ end
+
+ describe '#each_object_to_import', :clean_gitlab_redis_cache do
+ let(:importer) { described_class.new(project, client) }
+
+ before do
+ page = double(:page, objects: [pull_request], number: 1)
+
+ expect(client)
+ .to receive(:each_page)
+ .with(
+ :pull_requests,
+ 'foo/bar',
+ { state: 'all', sort: 'created', direction: 'asc', page: 1 }
+ )
+ .and_yield(page)
+ end
+
+ it 'yields every pull request to the supplied block' do
+ expect { |b| importer.each_object_to_import(&b) }
+ .to yield_with_args(pull_request)
+ end
+
+ it 'updates the repository if a pull request was updated after the last clone' do
+ expect(importer)
+ .to receive(:update_repository?)
+ .with(pull_request)
+ .and_return(true)
+
+ expect(importer)
+ .to receive(:update_repository)
+
+ importer.each_object_to_import { }
+ end
+ end
+
+ describe '#update_repository' do
+ it 'updates the repository' do
+ importer = described_class.new(project, client)
+
+ expect(project.repository)
+ .to receive(:fetch_remote)
+ .with('github', forced: false)
+
+ expect(Rails.logger)
+ .to receive(:info)
+ .with(an_instance_of(String))
+
+ expect(importer.repository_updates_counter)
+ .to receive(:increment)
+ .with(project: project.path_with_namespace)
+ .and_call_original
+
+ Timecop.freeze do
+ importer.update_repository
+
+ expect(project.last_repository_updated_at).to eq(Time.zone.now)
+ end
+ end
+ end
+
+ describe '#update_repository?' do
+ let(:importer) { described_class.new(project, client) }
+
+ context 'when the pull request was updated after the last update' do
+ let(:pr) do
+ double(
+ :pr,
+ updated_at: Time.zone.now,
+ head: double(:head, sha: '123'),
+ base: double(:base, sha: '456')
+ )
+ end
+
+ before do
+ allow(project)
+ .to receive(:last_repository_updated_at)
+ .and_return(1.year.ago)
+ end
+
+ it 'returns true when the head SHA is not present' do
+ expect(importer)
+ .to receive(:commit_exists?)
+ .with(pr.head.sha)
+ .and_return(false)
+
+ expect(importer.update_repository?(pr)).to eq(true)
+ end
+
+ it 'returns true when the base SHA is not present' do
+ expect(importer)
+ .to receive(:commit_exists?)
+ .with(pr.head.sha)
+ .and_return(true)
+
+ expect(importer)
+ .to receive(:commit_exists?)
+ .with(pr.base.sha)
+ .and_return(false)
+
+ expect(importer.update_repository?(pr)).to eq(true)
+ end
+
+ it 'returns false if both the head and base SHAs are present' do
+ expect(importer)
+ .to receive(:commit_exists?)
+ .with(pr.head.sha)
+ .and_return(true)
+
+ expect(importer)
+ .to receive(:commit_exists?)
+ .with(pr.base.sha)
+ .and_return(true)
+
+ expect(importer.update_repository?(pr)).to eq(false)
+ end
+ end
+
+ context 'when the pull request was updated before the last update' do
+ it 'returns false' do
+ pr = double(:pr, updated_at: 1.year.ago)
+
+ allow(project)
+ .to receive(:last_repository_updated_at)
+ .and_return(Time.zone.now)
+
+ expect(importer.update_repository?(pr)).to eq(false)
+ end
+ end
+ end
+
+ describe '#commit_exists?' do
+ let(:importer) { described_class.new(project, client) }
+
+ it 'returns true when a commit exists' do
+ expect(project.repository)
+ .to receive(:lookup)
+ .with('123')
+ .and_return(double(:commit))
+
+ expect(importer.commit_exists?('123')).to eq(true)
+ end
+
+ it 'returns false when a commit does not exist' do
+ expect(project.repository)
+ .to receive(:lookup)
+ .with('123')
+ .and_raise(Rugged::OdbError)
+
+ expect(importer.commit_exists?('123')).to eq(false)
+ end
+ end
+
+ describe '#id_for_already_imported_cache' do
+ it 'returns the PR number of the given PR' do
+ importer = described_class.new(project, client)
+
+ expect(importer.id_for_already_imported_cache(pull_request))
+ .to eq(42)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb b/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb
new file mode 100644
index 00000000000..23ae026fb14
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb
@@ -0,0 +1,125 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Importer::ReleasesImporter do
+ let(:project) { create(:project) }
+ let(:client) { double(:client) }
+ let(:importer) { described_class.new(project, client) }
+ let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
+ let(:updated_at) { Time.new(2017, 1, 1, 12, 15) }
+
+ let(:release) do
+ double(
+ :release,
+ tag_name: '1.0',
+ body: 'This is my release',
+ created_at: created_at,
+ updated_at: updated_at
+ )
+ end
+
+ describe '#execute' do
+ it 'imports the releases in bulk' do
+ release_hash = {
+ tag_name: '1.0',
+ description: 'This is my release',
+ created_at: created_at,
+ updated_at: updated_at
+ }
+
+ expect(importer).to receive(:build_releases).and_return([release_hash])
+ expect(importer).to receive(:bulk_insert).with(Release, [release_hash])
+
+ importer.execute
+ end
+ end
+
+ describe '#build_releases' do
+ it 'returns an Array containnig release rows' do
+ expect(importer).to receive(:each_release).and_return([release])
+
+ rows = importer.build_releases
+
+ expect(rows.length).to eq(1)
+ expect(rows[0][:tag]).to eq('1.0')
+ end
+
+ it 'does not create releases that already exist' do
+ create(:release, project: project, tag: '1.0', description: '1.0')
+
+ expect(importer).to receive(:each_release).and_return([release])
+ expect(importer.build_releases).to be_empty
+ end
+
+ it 'uses a default release description if none is provided' do
+ expect(release).to receive(:body).and_return('')
+ expect(importer).to receive(:each_release).and_return([release])
+
+ release = importer.build_releases.first
+
+ expect(release[:description]).to eq('Release for tag 1.0')
+ end
+ end
+
+ describe '#build' do
+ let(:release_hash) { importer.build(release) }
+
+ it 'returns the attributes of the release as a Hash' do
+ expect(release_hash).to be_an_instance_of(Hash)
+ end
+
+ context 'the returned Hash' do
+ it 'includes the tag name' do
+ expect(release_hash[:tag]).to eq('1.0')
+ end
+
+ it 'includes the release description' do
+ expect(release_hash[:description]).to eq('This is my release')
+ end
+
+ it 'includes the project ID' do
+ expect(release_hash[:project_id]).to eq(project.id)
+ end
+
+ it 'includes the created timestamp' do
+ expect(release_hash[:created_at]).to eq(created_at)
+ end
+
+ it 'includes the updated timestamp' do
+ expect(release_hash[:updated_at]).to eq(updated_at)
+ end
+ end
+ end
+
+ describe '#each_release' do
+ let(:release) { double(:release) }
+
+ before do
+ allow(project).to receive(:import_source).and_return('foo/bar')
+
+ allow(client)
+ .to receive(:releases)
+ .with('foo/bar')
+ .and_return([release].to_enum)
+ end
+
+ it 'returns an Enumerator' do
+ expect(importer.each_release).to be_an_instance_of(Enumerator)
+ end
+
+ it 'yields every release to the Enumerator' do
+ expect(importer.each_release.next).to eq(release)
+ end
+ end
+
+ describe '#description_for' do
+ it 'returns the description when present' do
+ expect(importer.description_for(release)).to eq(release.body)
+ end
+
+ it 'returns a generated description when one is not present' do
+ allow(release).to receive(:body).and_return('')
+
+ expect(importer.description_for(release)).to eq('Release for tag 1.0')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb b/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
new file mode 100644
index 00000000000..168e5d07504
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
@@ -0,0 +1,227 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Importer::RepositoryImporter do
+ let(:repository) { double(:repository) }
+ let(:client) { double(:client) }
+
+ let(:project) do
+ double(
+ :project,
+ import_url: 'foo.git',
+ import_source: 'foo/bar',
+ repository_storage_path: 'foo',
+ disk_path: 'foo',
+ repository: repository
+ )
+ end
+
+ let(:importer) { described_class.new(project, client) }
+ let(:shell_adapter) { Gitlab::Shell.new }
+
+ before do
+ # The method "gitlab_shell" returns a new instance every call, making
+ # it harder to set expectations. To work around this we'll stub the method
+ # and return the same instance on every call.
+ allow(importer).to receive(:gitlab_shell).and_return(shell_adapter)
+ end
+
+ describe '#import_wiki?' do
+ it 'returns true if the wiki should be imported' do
+ repo = double(:repo, has_wiki: true)
+
+ expect(client)
+ .to receive(:repository)
+ .with('foo/bar')
+ .and_return(repo)
+
+ expect(project)
+ .to receive(:wiki_repository_exists?)
+ .and_return(false)
+
+ expect(importer.import_wiki?).to eq(true)
+ end
+
+ it 'returns false if the GitHub wiki is disabled' do
+ repo = double(:repo, has_wiki: false)
+
+ expect(client)
+ .to receive(:repository)
+ .with('foo/bar')
+ .and_return(repo)
+
+ expect(importer.import_wiki?).to eq(false)
+ end
+
+ it 'returns false if the wiki has already been imported' do
+ repo = double(:repo, has_wiki: true)
+
+ expect(client)
+ .to receive(:repository)
+ .with('foo/bar')
+ .and_return(repo)
+
+ expect(project)
+ .to receive(:wiki_repository_exists?)
+ .and_return(true)
+
+ expect(importer.import_wiki?).to eq(false)
+ end
+ end
+
+ describe '#execute' do
+ it 'imports the repository and wiki' do
+ expect(repository)
+ .to receive(:empty_repo?)
+ .and_return(true)
+
+ expect(importer)
+ .to receive(:import_wiki?)
+ .and_return(true)
+
+ expect(importer)
+ .to receive(:import_repository)
+ .and_return(true)
+
+ expect(importer)
+ .to receive(:import_wiki_repository)
+ .and_return(true)
+
+ expect(importer)
+ .to receive(:update_clone_time)
+
+ expect(importer.execute).to eq(true)
+ end
+
+ it 'does not import the repository if it already exists' do
+ expect(repository)
+ .to receive(:empty_repo?)
+ .and_return(false)
+
+ expect(importer)
+ .to receive(:import_wiki?)
+ .and_return(true)
+
+ expect(importer)
+ .not_to receive(:import_repository)
+
+ expect(importer)
+ .to receive(:import_wiki_repository)
+ .and_return(true)
+
+ expect(importer)
+ .to receive(:update_clone_time)
+
+ expect(importer.execute).to eq(true)
+ end
+
+ it 'does not import the wiki if it is disabled' do
+ expect(repository)
+ .to receive(:empty_repo?)
+ .and_return(true)
+
+ expect(importer)
+ .to receive(:import_wiki?)
+ .and_return(false)
+
+ expect(importer)
+ .to receive(:import_repository)
+ .and_return(true)
+
+ expect(importer)
+ .to receive(:update_clone_time)
+
+ expect(importer)
+ .not_to receive(:import_wiki_repository)
+
+ expect(importer.execute).to eq(true)
+ end
+
+ it 'does not import the wiki if the repository could not be imported' do
+ expect(repository)
+ .to receive(:empty_repo?)
+ .and_return(true)
+
+ expect(importer)
+ .to receive(:import_wiki?)
+ .and_return(true)
+
+ expect(importer)
+ .to receive(:import_repository)
+ .and_return(false)
+
+ expect(importer)
+ .not_to receive(:update_clone_time)
+
+ expect(importer)
+ .not_to receive(:import_wiki_repository)
+
+ expect(importer.execute).to eq(false)
+ end
+ end
+
+ describe '#import_repository' do
+ it 'imports the repository' do
+ expect(project)
+ .to receive(:ensure_repository)
+
+ expect(repository)
+ .to receive(:fetch_as_mirror)
+ .with(project.import_url, refmap: Gitlab::GithubImport.refmap, forced: true, remote_name: 'github')
+
+ expect(importer.import_repository).to eq(true)
+ end
+
+ it 'marks the import as failed when an error was raised' do
+ expect(project).to receive(:ensure_repository)
+ .and_raise(Gitlab::Git::Repository::NoRepository)
+
+ expect(importer)
+ .to receive(:fail_import)
+ .and_return(false)
+
+ expect(importer.import_repository).to eq(false)
+ end
+ end
+
+ describe '#import_wiki_repository' do
+ it 'imports the wiki repository' do
+ expect(importer.gitlab_shell)
+ .to receive(:import_repository)
+ .with('foo', 'foo.wiki', 'foo.wiki.git')
+
+ expect(importer.import_wiki_repository).to eq(true)
+ end
+
+ it 'marks the import as failed if an error was raised' do
+ expect(importer.gitlab_shell)
+ .to receive(:import_repository)
+ .and_raise(Gitlab::Shell::Error)
+
+ expect(importer)
+ .to receive(:fail_import)
+ .and_return(false)
+
+ expect(importer.import_wiki_repository).to eq(false)
+ end
+ end
+
+ describe '#fail_import' do
+ it 'marks the import as failed' do
+ expect(project).to receive(:mark_import_as_failed).with('foo')
+
+ expect(importer.fail_import('foo')).to eq(false)
+ end
+ end
+
+ describe '#update_clone_time' do
+ it 'sets the timestamp for when the cloning process finished' do
+ Timecop.freeze do
+ expect(project)
+ .to receive(:update_column)
+ .with(:last_repository_updated_at, Time.zone.now)
+
+ importer.update_clone_time
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/issuable_finder_spec.rb b/spec/lib/gitlab/github_import/issuable_finder_spec.rb
new file mode 100644
index 00000000000..da69911812a
--- /dev/null
+++ b/spec/lib/gitlab/github_import/issuable_finder_spec.rb
@@ -0,0 +1,38 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::IssuableFinder, :clean_gitlab_redis_cache do
+ let(:project) { double(:project, id: 4) }
+ let(:issue) do
+ double(:issue, issuable_type: MergeRequest, iid: 1)
+ end
+
+ let(:finder) { described_class.new(project, issue) }
+
+ describe '#database_id' do
+ it 'returns nil when no cache is in place' do
+ expect(finder.database_id).to be_nil
+ end
+
+ it 'returns the ID of an issuable when the cache is in place' do
+ finder.cache_database_id(10)
+
+ expect(finder.database_id).to eq(10)
+ end
+
+ it 'raises TypeError when the object is not supported' do
+ finder = described_class.new(project, double(:issue))
+
+ expect { finder.database_id }.to raise_error(TypeError)
+ end
+ end
+
+ describe '#cache_database_id' do
+ it 'caches the ID of a database row' do
+ expect(Gitlab::GithubImport::Caching)
+ .to receive(:write)
+ .with('github-import/issuable-finder/4/MergeRequest/1', 10)
+
+ finder.cache_database_id(10)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/label_finder_spec.rb b/spec/lib/gitlab/github_import/label_finder_spec.rb
new file mode 100644
index 00000000000..8ba766944d6
--- /dev/null
+++ b/spec/lib/gitlab/github_import/label_finder_spec.rb
@@ -0,0 +1,61 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::LabelFinder, :clean_gitlab_redis_cache do
+ let(:project) { create(:project) }
+ let(:finder) { described_class.new(project) }
+ let!(:bug) { create(:label, project: project, name: 'Bug') }
+ let!(:feature) { create(:label, project: project, name: 'Feature') }
+
+ describe '#id_for' do
+ context 'with a cache in place' do
+ before do
+ finder.build_cache
+ end
+
+ it 'returns the ID of the given label' do
+ expect(finder.id_for(feature.name)).to eq(feature.id)
+ end
+
+ it 'returns nil for an empty cache key' do
+ key = finder.cache_key_for(bug.name)
+
+ Gitlab::GithubImport::Caching.write(key, '')
+
+ expect(finder.id_for(bug.name)).to be_nil
+ end
+
+ it 'returns nil for a non existing label name' do
+ expect(finder.id_for('kittens')).to be_nil
+ end
+ end
+
+ context 'without a cache in place' do
+ it 'returns nil for a label' do
+ expect(finder.id_for(feature.name)).to be_nil
+ end
+ end
+ end
+
+ describe '#build_cache' do
+ it 'builds the cache of all project labels' do
+ expect(Gitlab::GithubImport::Caching)
+ .to receive(:write_multiple)
+ .with(
+ {
+ "github-import/label-finder/#{project.id}/Bug" => bug.id,
+ "github-import/label-finder/#{project.id}/Feature" => feature.id
+ }
+ )
+ .and_call_original
+
+ finder.build_cache
+ end
+ end
+
+ describe '#cache_key_for' do
+ it 'returns the cache key for a label name' do
+ expect(finder.cache_key_for('foo'))
+ .to eq("github-import/label-finder/#{project.id}/foo")
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/markdown_text_spec.rb b/spec/lib/gitlab/github_import/markdown_text_spec.rb
new file mode 100644
index 00000000000..1ff5b9d66b3
--- /dev/null
+++ b/spec/lib/gitlab/github_import/markdown_text_spec.rb
@@ -0,0 +1,28 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::MarkdownText do
+ describe '.format' do
+ it 'formats the text' do
+ author = double(:author, login: 'Alice')
+ text = described_class.format('Hello', author)
+
+ expect(text).to eq("*Created by: Alice*\n\nHello")
+ end
+ end
+
+ describe '#to_s' do
+ it 'returns the text when the author was found' do
+ author = double(:author, login: 'Alice')
+ text = described_class.new('Hello', author, true)
+
+ expect(text.to_s).to eq('Hello')
+ end
+
+ it 'returns the text with an extra header when the author was not found' do
+ author = double(:author, login: 'Alice')
+ text = described_class.new('Hello', author)
+
+ expect(text.to_s).to eq("*Created by: Alice*\n\nHello")
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/milestone_finder_spec.rb b/spec/lib/gitlab/github_import/milestone_finder_spec.rb
new file mode 100644
index 00000000000..dff931a2fe8
--- /dev/null
+++ b/spec/lib/gitlab/github_import/milestone_finder_spec.rb
@@ -0,0 +1,57 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::MilestoneFinder, :clean_gitlab_redis_cache do
+ let!(:project) { create(:project) }
+ let!(:milestone) { create(:milestone, project: project) }
+ let(:finder) { described_class.new(project) }
+
+ describe '#id_for' do
+ let(:issuable) { double(:issuable, milestone_number: milestone.iid) }
+
+ context 'with a cache in place' do
+ before do
+ finder.build_cache
+ end
+
+ it 'returns the milestone ID of the given issuable' do
+ expect(finder.id_for(issuable)).to eq(milestone.id)
+ end
+
+ it 'returns nil for an empty cache key' do
+ key = finder.cache_key_for(milestone.iid)
+
+ Gitlab::GithubImport::Caching.write(key, '')
+
+ expect(finder.id_for(issuable)).to be_nil
+ end
+
+ it 'returns nil for an issuable with a non-existing milestone' do
+ expect(finder.id_for(double(:issuable, milestone_number: 5))).to be_nil
+ end
+ end
+
+ context 'without a cache in place' do
+ it 'returns nil' do
+ expect(finder.id_for(issuable)).to be_nil
+ end
+ end
+ end
+
+ describe '#build_cache' do
+ it 'builds the cache of all project milestones' do
+ expect(Gitlab::GithubImport::Caching)
+ .to receive(:write_multiple)
+ .with("github-import/milestone-finder/#{project.id}/1" => milestone.id)
+ .and_call_original
+
+ finder.build_cache
+ end
+ end
+
+ describe '#cache_key_for' do
+ it 'returns the cache key for an IID' do
+ expect(finder.cache_key_for(10))
+ .to eq("github-import/milestone-finder/#{project.id}/10")
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/page_counter_spec.rb b/spec/lib/gitlab/github_import/page_counter_spec.rb
new file mode 100644
index 00000000000..c2613a9a415
--- /dev/null
+++ b/spec/lib/gitlab/github_import/page_counter_spec.rb
@@ -0,0 +1,32 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::PageCounter, :clean_gitlab_redis_cache do
+ let(:project) { double(:project, id: 1) }
+ let(:counter) { described_class.new(project, :issues) }
+
+ describe '#initialize' do
+ it 'sets the initial page number to 1 when no value is cached' do
+ expect(counter.current).to eq(1)
+ end
+
+ it 'sets the initial page number to the cached value when one is present' do
+ Gitlab::GithubImport::Caching.write(counter.cache_key, 2)
+
+ expect(described_class.new(project, :issues).current).to eq(2)
+ end
+ end
+
+ describe '#set' do
+ it 'overwrites the page number when the given number is greater than the current number' do
+ counter.set(4)
+ expect(counter.current).to eq(4)
+ end
+
+ it 'does not overwrite the page number when the given number is lower than the current number' do
+ counter.set(2)
+ counter.set(1)
+
+ expect(counter.current).to eq(2)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/parallel_importer_spec.rb b/spec/lib/gitlab/github_import/parallel_importer_spec.rb
new file mode 100644
index 00000000000..e2a821d4d5c
--- /dev/null
+++ b/spec/lib/gitlab/github_import/parallel_importer_spec.rb
@@ -0,0 +1,40 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::ParallelImporter do
+ describe '.async?' do
+ it 'returns true' do
+ expect(described_class).to be_async
+ end
+ end
+
+ describe '#execute', :clean_gitlab_redis_shared_state do
+ let(:project) { create(:project) }
+ let(:importer) { described_class.new(project) }
+
+ before do
+ expect(Gitlab::GithubImport::Stage::ImportRepositoryWorker)
+ .to receive(:perform_async)
+ .with(project.id)
+ .and_return('123')
+ end
+
+ it 'schedules the importing of the repository' do
+ expect(importer.execute).to eq(true)
+ end
+
+ it 'sets the JID in Redis' do
+ expect(Gitlab::SidekiqStatus)
+ .to receive(:set)
+ .with("github-importer/#{project.id}", StuckImportJobsWorker::IMPORT_JOBS_EXPIRATION)
+ .and_call_original
+
+ importer.execute
+ end
+
+ it 'updates the import JID of the project' do
+ importer.execute
+
+ expect(project.import_jid).to eq("github-importer/#{project.id}")
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
new file mode 100644
index 00000000000..98205d3ee25
--- /dev/null
+++ b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
@@ -0,0 +1,296 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::ParallelScheduling do
+ let(:importer_class) do
+ Class.new do
+ include(Gitlab::GithubImport::ParallelScheduling)
+
+ def collection_method
+ :issues
+ end
+ end
+ end
+
+ let(:project) { double(:project, id: 4, import_source: 'foo/bar') }
+ let(:client) { double(:client) }
+
+ describe '#parallel?' do
+ it 'returns true when running in parallel mode' do
+ expect(importer_class.new(project, client)).to be_parallel
+ end
+
+ it 'returns false when running in sequential mode' do
+ importer = importer_class.new(project, client, parallel: false)
+
+ expect(importer).not_to be_parallel
+ end
+ end
+
+ describe '#execute' do
+ it 'imports data in parallel when running in parallel mode' do
+ importer = importer_class.new(project, client)
+ waiter = double(:waiter)
+
+ expect(importer)
+ .to receive(:parallel_import)
+ .and_return(waiter)
+
+ expect(importer.execute)
+ .to eq(waiter)
+ end
+
+ it 'imports data in parallel when running in sequential mode' do
+ importer = importer_class.new(project, client, parallel: false)
+
+ expect(importer)
+ .to receive(:sequential_import)
+ .and_return([])
+
+ expect(importer.execute)
+ .to eq([])
+ end
+
+ it 'expires the cache used for tracking already imported objects' do
+ importer = importer_class.new(project, client)
+
+ expect(importer).to receive(:parallel_import)
+
+ expect(Gitlab::GithubImport::Caching)
+ .to receive(:expire)
+ .with(importer.already_imported_cache_key, a_kind_of(Numeric))
+
+ importer.execute
+ end
+ end
+
+ describe '#sequential_import' do
+ let(:importer) { importer_class.new(project, client, parallel: false) }
+
+ it 'imports data in sequence' do
+ repr_class = double(:representation_class)
+ repr_instance = double(:representation_instance)
+ gh_importer = double(:github_importer)
+ gh_importer_instance = double(:github_importer_instance)
+ object = double(:object)
+
+ expect(importer)
+ .to receive(:each_object_to_import)
+ .and_yield(object)
+
+ expect(importer)
+ .to receive(:representation_class)
+ .and_return(repr_class)
+
+ expect(repr_class)
+ .to receive(:from_api_response)
+ .with(object)
+ .and_return(repr_instance)
+
+ expect(importer)
+ .to receive(:importer_class)
+ .and_return(gh_importer)
+
+ expect(gh_importer)
+ .to receive(:new)
+ .with(repr_instance, project, client)
+ .and_return(gh_importer_instance)
+
+ expect(gh_importer_instance)
+ .to receive(:execute)
+
+ importer.sequential_import
+ end
+ end
+
+ describe '#parallel_import' do
+ let(:importer) { importer_class.new(project, client) }
+
+ it 'imports data in parallel' do
+ repr_class = double(:representation)
+ worker_class = double(:worker)
+ object = double(:object)
+
+ expect(importer)
+ .to receive(:each_object_to_import)
+ .and_yield(object)
+
+ expect(importer)
+ .to receive(:representation_class)
+ .and_return(repr_class)
+
+ expect(importer)
+ .to receive(:sidekiq_worker_class)
+ .and_return(worker_class)
+
+ expect(repr_class)
+ .to receive(:from_api_response)
+ .with(object)
+ .and_return({ title: 'Foo' })
+
+ expect(worker_class)
+ .to receive(:perform_async)
+ .with(project.id, { title: 'Foo' }, an_instance_of(String))
+
+ expect(importer.parallel_import)
+ .to be_an_instance_of(Gitlab::JobWaiter)
+ end
+ end
+
+ describe '#each_object_to_import' do
+ let(:importer) { importer_class.new(project, client) }
+ let(:object) { double(:object) }
+
+ before do
+ expect(importer)
+ .to receive(:collection_options)
+ .and_return({ state: 'all' })
+ end
+
+ it 'yields every object to import' do
+ page = double(:page, objects: [object], number: 1)
+
+ expect(client)
+ .to receive(:each_page)
+ .with(:issues, 'foo/bar', { state: 'all', page: 1 })
+ .and_yield(page)
+
+ expect(importer.page_counter)
+ .to receive(:set)
+ .with(1)
+ .and_return(true)
+
+ expect(importer)
+ .to receive(:already_imported?)
+ .with(object)
+ .and_return(false)
+
+ expect(importer)
+ .to receive(:mark_as_imported)
+ .with(object)
+
+ expect { |b| importer.each_object_to_import(&b) }
+ .to yield_with_args(object)
+ end
+
+ it 'resumes from the last page' do
+ page = double(:page, objects: [object], number: 2)
+
+ expect(importer.page_counter)
+ .to receive(:current)
+ .and_return(2)
+
+ expect(client)
+ .to receive(:each_page)
+ .with(:issues, 'foo/bar', { state: 'all', page: 2 })
+ .and_yield(page)
+
+ expect(importer.page_counter)
+ .to receive(:set)
+ .with(2)
+ .and_return(true)
+
+ expect(importer)
+ .to receive(:already_imported?)
+ .with(object)
+ .and_return(false)
+
+ expect(importer)
+ .to receive(:mark_as_imported)
+ .with(object)
+
+ expect { |b| importer.each_object_to_import(&b) }
+ .to yield_with_args(object)
+ end
+
+ it 'does not yield any objects if the page number was not set' do
+ page = double(:page, objects: [object], number: 1)
+
+ expect(client)
+ .to receive(:each_page)
+ .with(:issues, 'foo/bar', { state: 'all', page: 1 })
+ .and_yield(page)
+
+ expect(importer.page_counter)
+ .to receive(:set)
+ .with(1)
+ .and_return(false)
+
+ expect { |b| importer.each_object_to_import(&b) }
+ .not_to yield_control
+ end
+
+ it 'does not yield the object if it was already imported' do
+ page = double(:page, objects: [object], number: 1)
+
+ expect(client)
+ .to receive(:each_page)
+ .with(:issues, 'foo/bar', { state: 'all', page: 1 })
+ .and_yield(page)
+
+ expect(importer.page_counter)
+ .to receive(:set)
+ .with(1)
+ .and_return(true)
+
+ expect(importer)
+ .to receive(:already_imported?)
+ .with(object)
+ .and_return(true)
+
+ expect(importer)
+ .not_to receive(:mark_as_imported)
+
+ expect { |b| importer.each_object_to_import(&b) }
+ .not_to yield_control
+ end
+ end
+
+ describe '#already_imported?', :clean_gitlab_redis_cache do
+ let(:importer) { importer_class.new(project, client) }
+
+ it 'returns false when an object has not yet been imported' do
+ object = double(:object, id: 10)
+
+ expect(importer)
+ .to receive(:id_for_already_imported_cache)
+ .with(object)
+ .and_return(object.id)
+
+ expect(importer.already_imported?(object))
+ .to eq(false)
+ end
+
+ it 'returns true when an object has already been imported' do
+ object = double(:object, id: 10)
+
+ allow(importer)
+ .to receive(:id_for_already_imported_cache)
+ .with(object)
+ .and_return(object.id)
+
+ importer.mark_as_imported(object)
+
+ expect(importer.already_imported?(object))
+ .to eq(true)
+ end
+ end
+
+ describe '#mark_as_imported', :clean_gitlab_redis_cache do
+ it 'marks an object as already imported' do
+ object = double(:object, id: 10)
+ importer = importer_class.new(project, client)
+
+ expect(importer)
+ .to receive(:id_for_already_imported_cache)
+ .with(object)
+ .and_return(object.id)
+
+ expect(Gitlab::GithubImport::Caching)
+ .to receive(:set_add)
+ .with(importer.already_imported_cache_key, object.id)
+ .and_call_original
+
+ importer.mark_as_imported(object)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/representation/diff_note_spec.rb b/spec/lib/gitlab/github_import/representation/diff_note_spec.rb
new file mode 100644
index 00000000000..7b0a1ea4948
--- /dev/null
+++ b/spec/lib/gitlab/github_import/representation/diff_note_spec.rb
@@ -0,0 +1,164 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Representation::DiffNote do
+ let(:hunk) do
+ '@@ -1 +1 @@
+ -Hello
+ +Hello world'
+ end
+
+ let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
+ let(:updated_at) { Time.new(2017, 1, 1, 12, 15) }
+
+ shared_examples 'a DiffNote' do
+ it 'returns an instance of DiffNote' do
+ expect(note).to be_an_instance_of(described_class)
+ end
+
+ context 'the returned DiffNote' do
+ it 'includes the number of the note' do
+ expect(note.noteable_id).to eq(42)
+ end
+
+ it 'includes the file path of the diff' do
+ expect(note.file_path).to eq('README.md')
+ end
+
+ it 'includes the commit ID' do
+ expect(note.commit_id).to eq('123abc')
+ end
+
+ it 'includes the user details' do
+ expect(note.author)
+ .to be_an_instance_of(Gitlab::GithubImport::Representation::User)
+
+ expect(note.author.id).to eq(4)
+ expect(note.author.login).to eq('alice')
+ end
+
+ it 'includes the note body' do
+ expect(note.note).to eq('Hello world')
+ end
+
+ it 'includes the created timestamp' do
+ expect(note.created_at).to eq(created_at)
+ end
+
+ it 'includes the updated timestamp' do
+ expect(note.updated_at).to eq(updated_at)
+ end
+
+ it 'includes the GitHub ID' do
+ expect(note.github_id).to eq(1)
+ end
+
+ it 'returns the noteable type' do
+ expect(note.noteable_type).to eq('MergeRequest')
+ end
+ end
+ end
+
+ describe '.from_api_response' do
+ let(:response) do
+ double(
+ :response,
+ html_url: 'https://github.com/foo/bar/pull/42',
+ path: 'README.md',
+ commit_id: '123abc',
+ diff_hunk: hunk,
+ user: double(:user, id: 4, login: 'alice'),
+ body: 'Hello world',
+ created_at: created_at,
+ updated_at: updated_at,
+ id: 1
+ )
+ end
+
+ it_behaves_like 'a DiffNote' do
+ let(:note) { described_class.from_api_response(response) }
+ end
+
+ it 'does not set the user if the response did not include a user' do
+ allow(response)
+ .to receive(:user)
+ .and_return(nil)
+
+ note = described_class.from_api_response(response)
+
+ expect(note.author).to be_nil
+ end
+ end
+
+ describe '.from_json_hash' do
+ it_behaves_like 'a DiffNote' do
+ let(:hash) do
+ {
+ 'noteable_type' => 'MergeRequest',
+ 'noteable_id' => 42,
+ 'file_path' => 'README.md',
+ 'commit_id' => '123abc',
+ 'diff_hunk' => hunk,
+ 'author' => { 'id' => 4, 'login' => 'alice' },
+ 'note' => 'Hello world',
+ 'created_at' => created_at.to_s,
+ 'updated_at' => updated_at.to_s,
+ 'github_id' => 1
+ }
+ end
+
+ let(:note) { described_class.from_json_hash(hash) }
+ end
+
+ it 'does not convert the author if it was not specified' do
+ hash = {
+ 'noteable_type' => 'MergeRequest',
+ 'noteable_id' => 42,
+ 'file_path' => 'README.md',
+ 'commit_id' => '123abc',
+ 'diff_hunk' => hunk,
+ 'note' => 'Hello world',
+ 'created_at' => created_at.to_s,
+ 'updated_at' => updated_at.to_s,
+ 'github_id' => 1
+ }
+
+ note = described_class.from_json_hash(hash)
+
+ expect(note.author).to be_nil
+ end
+ end
+
+ describe '#line_code' do
+ it 'returns a String' do
+ note = described_class.new(diff_hunk: hunk, file_path: 'README.md')
+
+ expect(note.line_code).to be_an_instance_of(String)
+ end
+ end
+
+ describe '#diff_hash' do
+ it 'returns a Hash containing the diff details' do
+ note = described_class.from_json_hash(
+ 'noteable_type' => 'MergeRequest',
+ 'noteable_id' => 42,
+ 'file_path' => 'README.md',
+ 'commit_id' => '123abc',
+ 'diff_hunk' => hunk,
+ 'author' => { 'id' => 4, 'login' => 'alice' },
+ 'note' => 'Hello world',
+ 'created_at' => created_at.to_s,
+ 'updated_at' => updated_at.to_s,
+ 'github_id' => 1
+ )
+
+ expect(note.diff_hash).to eq(
+ diff: hunk,
+ new_path: 'README.md',
+ old_path: 'README.md',
+ a_mode: '100644',
+ b_mode: '100644',
+ new_file: false
+ )
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/representation/expose_attribute_spec.rb b/spec/lib/gitlab/github_import/representation/expose_attribute_spec.rb
new file mode 100644
index 00000000000..15de0fe49ff
--- /dev/null
+++ b/spec/lib/gitlab/github_import/representation/expose_attribute_spec.rb
@@ -0,0 +1,19 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Representation::ExposeAttribute do
+ it 'defines a getter method that returns an attribute value' do
+ klass = Class.new do
+ include Gitlab::GithubImport::Representation::ExposeAttribute
+
+ expose_attribute :number
+
+ attr_reader :attributes
+
+ def initialize
+ @attributes = { number: 42 }
+ end
+ end
+
+ expect(klass.new.number).to eq(42)
+ end
+end
diff --git a/spec/lib/gitlab/github_import/representation/issue_spec.rb b/spec/lib/gitlab/github_import/representation/issue_spec.rb
new file mode 100644
index 00000000000..99330ce42cb
--- /dev/null
+++ b/spec/lib/gitlab/github_import/representation/issue_spec.rb
@@ -0,0 +1,182 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Representation::Issue do
+ let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
+ let(:updated_at) { Time.new(2017, 1, 1, 12, 15) }
+
+ shared_examples 'an Issue' do
+ it 'returns an instance of Issue' do
+ expect(issue).to be_an_instance_of(described_class)
+ end
+
+ context 'the returned Issue' do
+ it 'includes the issue number' do
+ expect(issue.iid).to eq(42)
+ end
+
+ it 'includes the issue title' do
+ expect(issue.title).to eq('My Issue')
+ end
+
+ it 'includes the issue description' do
+ expect(issue.description).to eq('This is my issue')
+ end
+
+ it 'includes the milestone number' do
+ expect(issue.milestone_number).to eq(4)
+ end
+
+ it 'includes the issue state' do
+ expect(issue.state).to eq(:opened)
+ end
+
+ it 'includes the issue assignees' do
+ expect(issue.assignees[0])
+ .to be_an_instance_of(Gitlab::GithubImport::Representation::User)
+
+ expect(issue.assignees[0].id).to eq(4)
+ expect(issue.assignees[0].login).to eq('alice')
+ end
+
+ it 'includes the label names' do
+ expect(issue.label_names).to eq(%w[bug])
+ end
+
+ it 'includes the author details' do
+ expect(issue.author)
+ .to be_an_instance_of(Gitlab::GithubImport::Representation::User)
+
+ expect(issue.author.id).to eq(4)
+ expect(issue.author.login).to eq('alice')
+ end
+
+ it 'includes the created timestamp' do
+ expect(issue.created_at).to eq(created_at)
+ end
+
+ it 'includes the updated timestamp' do
+ expect(issue.updated_at).to eq(updated_at)
+ end
+
+ it 'is not a pull request' do
+ expect(issue.pull_request?).to eq(false)
+ end
+ end
+ end
+
+ describe '.from_api_response' do
+ let(:response) do
+ double(
+ :response,
+ number: 42,
+ title: 'My Issue',
+ body: 'This is my issue',
+ milestone: double(:milestone, number: 4),
+ state: 'open',
+ assignees: [double(:user, id: 4, login: 'alice')],
+ labels: [double(:label, name: 'bug')],
+ user: double(:user, id: 4, login: 'alice'),
+ created_at: created_at,
+ updated_at: updated_at,
+ pull_request: false
+ )
+ end
+
+ it_behaves_like 'an Issue' do
+ let(:issue) { described_class.from_api_response(response) }
+ end
+
+ it 'does not set the user if the response did not include a user' do
+ allow(response)
+ .to receive(:user)
+ .and_return(nil)
+
+ issue = described_class.from_api_response(response)
+
+ expect(issue.author).to be_nil
+ end
+ end
+
+ describe '.from_json_hash' do
+ it_behaves_like 'an Issue' do
+ let(:hash) do
+ {
+ 'iid' => 42,
+ 'title' => 'My Issue',
+ 'description' => 'This is my issue',
+ 'milestone_number' => 4,
+ 'state' => 'opened',
+ 'assignees' => [{ 'id' => 4, 'login' => 'alice' }],
+ 'label_names' => %w[bug],
+ 'author' => { 'id' => 4, 'login' => 'alice' },
+ 'created_at' => created_at.to_s,
+ 'updated_at' => updated_at.to_s,
+ 'pull_request' => false
+ }
+ end
+
+ let(:issue) { described_class.from_json_hash(hash) }
+ end
+
+ it 'does not convert the author if it was not specified' do
+ hash = {
+ 'iid' => 42,
+ 'title' => 'My Issue',
+ 'description' => 'This is my issue',
+ 'milestone_number' => 4,
+ 'state' => 'opened',
+ 'assignees' => [{ 'id' => 4, 'login' => 'alice' }],
+ 'label_names' => %w[bug],
+ 'created_at' => created_at.to_s,
+ 'updated_at' => updated_at.to_s,
+ 'pull_request' => false
+ }
+
+ issue = described_class.from_json_hash(hash)
+
+ expect(issue.author).to be_nil
+ end
+ end
+
+ describe '#labels?' do
+ it 'returns true when the issue has labels assigned' do
+ issue = described_class.new(label_names: %w[bug])
+
+ expect(issue.labels?).to eq(true)
+ end
+
+ it 'returns false when the issue has no labels assigned' do
+ issue = described_class.new(label_names: [])
+
+ expect(issue.labels?).to eq(false)
+ end
+ end
+
+ describe '#pull_request?' do
+ it 'returns false for an issue' do
+ issue = described_class.new(pull_request: false)
+
+ expect(issue.pull_request?).to eq(false)
+ end
+
+ it 'returns true for a pull request' do
+ issue = described_class.new(pull_request: true)
+
+ expect(issue.pull_request?).to eq(true)
+ end
+ end
+
+ describe '#truncated_title' do
+ it 'truncates the title to 255 characters' do
+ object = described_class.new(title: 'm' * 300)
+
+ expect(object.truncated_title.length).to eq(255)
+ end
+
+ it 'does not truncate the title if it is shorter than 255 characters' do
+ object = described_class.new(title: 'foo')
+
+ expect(object.truncated_title).to eq('foo')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/representation/note_spec.rb b/spec/lib/gitlab/github_import/representation/note_spec.rb
new file mode 100644
index 00000000000..f2c1c66b357
--- /dev/null
+++ b/spec/lib/gitlab/github_import/representation/note_spec.rb
@@ -0,0 +1,107 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Representation::Note do
+ let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
+ let(:updated_at) { Time.new(2017, 1, 1, 12, 15) }
+
+ shared_examples 'a Note' do
+ it 'returns an instance of Note' do
+ expect(note).to be_an_instance_of(described_class)
+ end
+
+ context 'the returned Note' do
+ it 'includes the noteable ID' do
+ expect(note.noteable_id).to eq(42)
+ end
+
+ it 'includes the noteable type' do
+ expect(note.noteable_type).to eq('Issue')
+ end
+
+ it 'includes the author details' do
+ expect(note.author)
+ .to be_an_instance_of(Gitlab::GithubImport::Representation::User)
+
+ expect(note.author.id).to eq(4)
+ expect(note.author.login).to eq('alice')
+ end
+
+ it 'includes the note body' do
+ expect(note.note).to eq('Hello world')
+ end
+
+ it 'includes the created timestamp' do
+ expect(note.created_at).to eq(created_at)
+ end
+
+ it 'includes the updated timestamp' do
+ expect(note.updated_at).to eq(updated_at)
+ end
+
+ it 'includes the GitHub ID' do
+ expect(note.github_id).to eq(1)
+ end
+ end
+ end
+
+ describe '.from_api_response' do
+ let(:response) do
+ double(
+ :response,
+ html_url: 'https://github.com/foo/bar/issues/42',
+ user: double(:user, id: 4, login: 'alice'),
+ body: 'Hello world',
+ created_at: created_at,
+ updated_at: updated_at,
+ id: 1
+ )
+ end
+
+ it_behaves_like 'a Note' do
+ let(:note) { described_class.from_api_response(response) }
+ end
+
+ it 'does not set the user if the response did not include a user' do
+ allow(response)
+ .to receive(:user)
+ .and_return(nil)
+
+ note = described_class.from_api_response(response)
+
+ expect(note.author).to be_nil
+ end
+ end
+
+ describe '.from_json_hash' do
+ it_behaves_like 'a Note' do
+ let(:hash) do
+ {
+ 'noteable_id' => 42,
+ 'noteable_type' => 'Issue',
+ 'author' => { 'id' => 4, 'login' => 'alice' },
+ 'note' => 'Hello world',
+ 'created_at' => created_at.to_s,
+ 'updated_at' => updated_at.to_s,
+ 'github_id' => 1
+ }
+ end
+
+ let(:note) { described_class.from_json_hash(hash) }
+ end
+
+ it 'does not convert the author if it was not specified' do
+ hash = {
+ 'noteable_id' => 42,
+ 'noteable_type' => 'Issue',
+ 'note' => 'Hello world',
+ 'created_at' => created_at.to_s,
+ 'updated_at' => updated_at.to_s,
+ 'github_id' => 1
+ }
+
+ note = described_class.from_json_hash(hash)
+
+ expect(note.author).to be_nil
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/representation/pull_request_spec.rb b/spec/lib/gitlab/github_import/representation/pull_request_spec.rb
new file mode 100644
index 00000000000..33f6ff0ae6a
--- /dev/null
+++ b/spec/lib/gitlab/github_import/representation/pull_request_spec.rb
@@ -0,0 +1,288 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Representation::PullRequest do
+ let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
+ let(:updated_at) { Time.new(2017, 1, 1, 12, 15) }
+ let(:merged_at) { Time.new(2017, 1, 1, 12, 17) }
+
+ shared_examples 'a PullRequest' do
+ it 'returns an instance of PullRequest' do
+ expect(pr).to be_an_instance_of(described_class)
+ end
+
+ context 'the returned PullRequest' do
+ it 'includes the pull request number' do
+ expect(pr.iid).to eq(42)
+ end
+
+ it 'includes the pull request title' do
+ expect(pr.title).to eq('My Pull Request')
+ end
+
+ it 'includes the pull request description' do
+ expect(pr.description).to eq('This is my pull request')
+ end
+
+ it 'includes the source branch name' do
+ expect(pr.source_branch).to eq('my-feature')
+ end
+
+ it 'includes the source branch SHA' do
+ expect(pr.source_branch_sha).to eq('123abc')
+ end
+
+ it 'includes the target branch name' do
+ expect(pr.target_branch).to eq('master')
+ end
+
+ it 'includes the target branch SHA' do
+ expect(pr.target_branch_sha).to eq('456def')
+ end
+
+ it 'includes the milestone number' do
+ expect(pr.milestone_number).to eq(4)
+ end
+
+ it 'includes the user details' do
+ expect(pr.author)
+ .to be_an_instance_of(Gitlab::GithubImport::Representation::User)
+
+ expect(pr.author.id).to eq(4)
+ expect(pr.author.login).to eq('alice')
+ end
+
+ it 'includes the assignee details' do
+ expect(pr.assignee)
+ .to be_an_instance_of(Gitlab::GithubImport::Representation::User)
+
+ expect(pr.assignee.id).to eq(4)
+ expect(pr.assignee.login).to eq('alice')
+ end
+
+ it 'includes the created timestamp' do
+ expect(pr.created_at).to eq(created_at)
+ end
+
+ it 'includes the updated timestamp' do
+ expect(pr.updated_at).to eq(updated_at)
+ end
+
+ it 'includes the merged timestamp' do
+ expect(pr.merged_at).to eq(merged_at)
+ end
+
+ it 'includes the source repository ID' do
+ expect(pr.source_repository_id).to eq(400)
+ end
+
+ it 'includes the target repository ID' do
+ expect(pr.target_repository_id).to eq(200)
+ end
+
+ it 'includes the source repository owner name' do
+ expect(pr.source_repository_owner).to eq('alice')
+ end
+
+ it 'includes the pull request state' do
+ expect(pr.state).to eq(:merged)
+ end
+ end
+ end
+
+ describe '.from_api_response' do
+ let(:response) do
+ double(
+ :response,
+ number: 42,
+ title: 'My Pull Request',
+ body: 'This is my pull request',
+ state: 'closed',
+ head: double(
+ :head,
+ sha: '123abc',
+ ref: 'my-feature',
+ repo: double(:repo, id: 400),
+ user: double(:user, id: 4, login: 'alice')
+ ),
+ base: double(
+ :base,
+ sha: '456def',
+ ref: 'master',
+ repo: double(:repo, id: 200)
+ ),
+ milestone: double(:milestone, number: 4),
+ user: double(:user, id: 4, login: 'alice'),
+ assignee: double(:user, id: 4, login: 'alice'),
+ created_at: created_at,
+ updated_at: updated_at,
+ merged_at: merged_at
+ )
+ end
+
+ it_behaves_like 'a PullRequest' do
+ let(:pr) { described_class.from_api_response(response) }
+ end
+
+ it 'does not set the user if the response did not include a user' do
+ allow(response)
+ .to receive(:user)
+ .and_return(nil)
+
+ pr = described_class.from_api_response(response)
+
+ expect(pr.author).to be_nil
+ end
+ end
+
+ describe '.from_json_hash' do
+ it_behaves_like 'a PullRequest' do
+ let(:hash) do
+ {
+ 'iid' => 42,
+ 'title' => 'My Pull Request',
+ 'description' => 'This is my pull request',
+ 'source_branch' => 'my-feature',
+ 'source_branch_sha' => '123abc',
+ 'target_branch' => 'master',
+ 'target_branch_sha' => '456def',
+ 'source_repository_id' => 400,
+ 'target_repository_id' => 200,
+ 'source_repository_owner' => 'alice',
+ 'state' => 'closed',
+ 'milestone_number' => 4,
+ 'author' => { 'id' => 4, 'login' => 'alice' },
+ 'assignee' => { 'id' => 4, 'login' => 'alice' },
+ 'created_at' => created_at.to_s,
+ 'updated_at' => updated_at.to_s,
+ 'merged_at' => merged_at.to_s
+ }
+ end
+
+ let(:pr) { described_class.from_json_hash(hash) }
+ end
+
+ it 'does not convert the author if it was not specified' do
+ hash = {
+ 'iid' => 42,
+ 'title' => 'My Pull Request',
+ 'description' => 'This is my pull request',
+ 'source_branch' => 'my-feature',
+ 'source_branch_sha' => '123abc',
+ 'target_branch' => 'master',
+ 'target_branch_sha' => '456def',
+ 'source_repository_id' => 400,
+ 'target_repository_id' => 200,
+ 'source_repository_owner' => 'alice',
+ 'state' => 'closed',
+ 'milestone_number' => 4,
+ 'assignee' => { 'id' => 4, 'login' => 'alice' },
+ 'created_at' => created_at.to_s,
+ 'updated_at' => updated_at.to_s,
+ 'merged_at' => merged_at.to_s
+ }
+
+ pr = described_class.from_json_hash(hash)
+
+ expect(pr.author).to be_nil
+ end
+ end
+
+ describe '#state' do
+ it 'returns :opened for an open pull request' do
+ pr = described_class.new(state: :opened)
+
+ expect(pr.state).to eq(:opened)
+ end
+
+ it 'returns :closed for a closed pull request' do
+ pr = described_class.new(state: :closed)
+
+ expect(pr.state).to eq(:closed)
+ end
+
+ it 'returns :merged for a merged pull request' do
+ pr = described_class.new(state: :closed, merged_at: merged_at)
+
+ expect(pr.state).to eq(:merged)
+ end
+ end
+
+ describe '#cross_project?' do
+ it 'returns false for a pull request submitted from the target project' do
+ pr = described_class.new(source_repository_id: 1, target_repository_id: 1)
+
+ expect(pr).not_to be_cross_project
+ end
+
+ it 'returns true for a pull request submitted from a different project' do
+ pr = described_class.new(source_repository_id: 1, target_repository_id: 2)
+
+ expect(pr).to be_cross_project
+ end
+
+ it 'returns true if no source repository is present' do
+ pr = described_class.new(target_repository_id: 2)
+
+ expect(pr).to be_cross_project
+ end
+ end
+
+ describe '#formatted_source_branch' do
+ context 'for a cross-project pull request' do
+ it 'includes the owner name in the branch name' do
+ pr = described_class.new(
+ source_repository_owner: 'foo',
+ source_branch: 'branch',
+ target_branch: 'master',
+ source_repository_id: 1,
+ target_repository_id: 2
+ )
+
+ expect(pr.formatted_source_branch).to eq('foo:branch')
+ end
+ end
+
+ context 'for a regular pull request' do
+ it 'returns the source branch name' do
+ pr = described_class.new(
+ source_repository_owner: 'foo',
+ source_branch: 'branch',
+ target_branch: 'master',
+ source_repository_id: 1,
+ target_repository_id: 1
+ )
+
+ expect(pr.formatted_source_branch).to eq('branch')
+ end
+ end
+
+ context 'for a pull request with the same source and target branches' do
+ it 'returns a generated source branch name' do
+ pr = described_class.new(
+ iid: 1,
+ source_repository_owner: 'foo',
+ source_branch: 'branch',
+ target_branch: 'branch',
+ source_repository_id: 1,
+ target_repository_id: 1
+ )
+
+ expect(pr.formatted_source_branch).to eq('branch-1')
+ end
+ end
+ end
+
+ describe '#truncated_title' do
+ it 'truncates the title to 255 characters' do
+ object = described_class.new(title: 'm' * 300)
+
+ expect(object.truncated_title.length).to eq(255)
+ end
+
+ it 'does not truncate the title if it is shorter than 255 characters' do
+ object = described_class.new(title: 'foo')
+
+ expect(object.truncated_title).to eq('foo')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/representation/to_hash_spec.rb b/spec/lib/gitlab/github_import/representation/to_hash_spec.rb
new file mode 100644
index 00000000000..c296aa0a45b
--- /dev/null
+++ b/spec/lib/gitlab/github_import/representation/to_hash_spec.rb
@@ -0,0 +1,37 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Representation::ToHash do
+ describe '#to_hash' do
+ let(:user) { double(:user, attributes: { login: 'alice' }) }
+
+ let(:issue) do
+ double(
+ :issue,
+ attributes: { user: user, assignees: [user], number: 42 }
+ )
+ end
+
+ let(:issue_hash) { issue.to_hash }
+
+ before do
+ user.extend(described_class)
+ issue.extend(described_class)
+ end
+
+ it 'converts an object to a Hash' do
+ expect(issue_hash).to be_an_instance_of(Hash)
+ end
+
+ it 'converts nested objects to Hashes' do
+ expect(issue_hash[:user]).to eq({ login: 'alice' })
+ end
+
+ it 'converts Array values to Hashes' do
+ expect(issue_hash[:assignees]).to eq([{ login: 'alice' }])
+ end
+
+ it 'keeps values as-is if they do not respond to #to_hash' do
+ expect(issue_hash[:number]).to eq(42)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/representation/user_spec.rb b/spec/lib/gitlab/github_import/representation/user_spec.rb
new file mode 100644
index 00000000000..4e63e8ea568
--- /dev/null
+++ b/spec/lib/gitlab/github_import/representation/user_spec.rb
@@ -0,0 +1,33 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Representation::User do
+ shared_examples 'a User' do
+ it 'returns an instance of User' do
+ expect(user).to be_an_instance_of(described_class)
+ end
+
+ context 'the returned User' do
+ it 'includes the user ID' do
+ expect(user.id).to eq(42)
+ end
+
+ it 'includes the username' do
+ expect(user.login).to eq('alice')
+ end
+ end
+ end
+
+ describe '.from_api_response' do
+ it_behaves_like 'a User' do
+ let(:response) { double(:response, id: 42, login: 'alice') }
+ let(:user) { described_class.from_api_response(response) }
+ end
+ end
+
+ describe '.from_json_hash' do
+ it_behaves_like 'a User' do
+ let(:hash) { { 'id' => 42, 'login' => 'alice' } }
+ let(:user) { described_class.from_json_hash(hash) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/representation_spec.rb b/spec/lib/gitlab/github_import/representation_spec.rb
new file mode 100644
index 00000000000..0b0610817b0
--- /dev/null
+++ b/spec/lib/gitlab/github_import/representation_spec.rb
@@ -0,0 +1,17 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::Representation do
+ describe '.symbolize_hash' do
+ it 'returns a Hash with the keys as Symbols' do
+ hash = described_class.symbolize_hash('number' => 10)
+
+ expect(hash).to eq({ number: 10 })
+ end
+
+ it 'parses timestamp fields into Time instances' do
+ hash = described_class.symbolize_hash('created_at' => '2017-01-01 12:00')
+
+ expect(hash[:created_at]).to be_an_instance_of(Time)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/sequential_importer_spec.rb b/spec/lib/gitlab/github_import/sequential_importer_spec.rb
new file mode 100644
index 00000000000..6089b0b751f
--- /dev/null
+++ b/spec/lib/gitlab/github_import/sequential_importer_spec.rb
@@ -0,0 +1,37 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::SequentialImporter do
+ describe '#execute' do
+ it 'imports a project in sequence' do
+ repository = double(:repository)
+ project = double(:project, id: 1, repository: repository)
+ importer = described_class.new(project, token: 'foo')
+
+ expect_any_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter)
+ .to receive(:execute)
+
+ described_class::SEQUENTIAL_IMPORTERS.each do |klass|
+ instance = double(:instance)
+
+ expect(klass).to receive(:new)
+ .with(project, importer.client)
+ .and_return(instance)
+
+ expect(instance).to receive(:execute)
+ end
+
+ described_class::PARALLEL_IMPORTERS.each do |klass|
+ instance = double(:instance)
+
+ expect(klass).to receive(:new)
+ .with(project, importer.client, parallel: false)
+ .and_return(instance)
+
+ expect(instance).to receive(:execute)
+ end
+
+ expect(repository).to receive(:after_import)
+ expect(importer.execute).to eq(true)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/user_finder_spec.rb b/spec/lib/gitlab/github_import/user_finder_spec.rb
new file mode 100644
index 00000000000..29f4c00d9c7
--- /dev/null
+++ b/spec/lib/gitlab/github_import/user_finder_spec.rb
@@ -0,0 +1,333 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache do
+ let(:project) { create(:project) }
+ let(:client) { double(:client) }
+ let(:finder) { described_class.new(project, client) }
+
+ describe '#author_id_for' do
+ it 'returns the user ID for the author of an object' do
+ user = double(:user, id: 4, login: 'kittens')
+ note = double(:note, author: user)
+
+ expect(finder).to receive(:user_id_for).with(user).and_return(42)
+
+ expect(finder.author_id_for(note)).to eq([42, true])
+ end
+
+ it 'returns the ID of the project creator if no user ID could be found' do
+ user = double(:user, id: 4, login: 'kittens')
+ note = double(:note, author: user)
+
+ expect(finder).to receive(:user_id_for).with(user).and_return(nil)
+
+ expect(finder.author_id_for(note)).to eq([project.creator_id, false])
+ end
+
+ it 'returns the ID of the ghost user when the object has no user' do
+ note = double(:note, author: nil)
+
+ expect(finder.author_id_for(note)).to eq([User.ghost.id, true])
+ end
+
+ it 'returns the ID of the ghost user when the given object is nil' do
+ expect(finder.author_id_for(nil)).to eq([User.ghost.id, true])
+ end
+ end
+
+ describe '#assignee_id_for' do
+ it 'returns the user ID for the assignee of an issuable' do
+ user = double(:user, id: 4, login: 'kittens')
+ issue = double(:issue, assignee: user)
+
+ expect(finder).to receive(:user_id_for).with(user).and_return(42)
+ expect(finder.assignee_id_for(issue)).to eq(42)
+ end
+
+ it 'returns nil if the issuable does not have an assignee' do
+ issue = double(:issue, assignee: nil)
+
+ expect(finder).not_to receive(:user_id_for)
+ expect(finder.assignee_id_for(issue)).to be_nil
+ end
+ end
+
+ describe '#user_id_for' do
+ it 'returns the user ID for the given user' do
+ user = double(:user, id: 4, login: 'kittens')
+
+ expect(finder).to receive(:find).with(user.id, user.login).and_return(42)
+ expect(finder.user_id_for(user)).to eq(42)
+ end
+ end
+
+ describe '#find' do
+ let(:user) { create(:user) }
+
+ before do
+ allow(finder).to receive(:email_for_github_username)
+ .and_return(user.email)
+ end
+
+ context 'without a cache' do
+ before do
+ allow(finder).to receive(:find_from_cache).and_return([false, nil])
+ expect(finder).to receive(:find_id_from_database).and_call_original
+ end
+
+ it 'finds a GitLab user for a GitHub user ID' do
+ user.identities.create!(provider: :github, extern_uid: 42)
+
+ expect(finder.find(42, user.username)).to eq(user.id)
+ end
+
+ it 'finds a GitLab user for a GitHub Email address' do
+ expect(finder.find(42, user.username)).to eq(user.id)
+ end
+ end
+
+ context 'with a cache' do
+ it 'returns the cached user ID' do
+ expect(finder).to receive(:find_from_cache).and_return([true, user.id])
+ expect(finder).not_to receive(:find_id_from_database)
+
+ expect(finder.find(42, user.username)).to eq(user.id)
+ end
+
+ it 'does not query the database if the cache key exists but is empty' do
+ expect(finder).to receive(:find_from_cache).and_return([true, nil])
+ expect(finder).not_to receive(:find_id_from_database)
+
+ expect(finder.find(42, user.username)).to be_nil
+ end
+ end
+ end
+
+ describe '#find_from_cache' do
+ it 'retrieves a GitLab user ID for a GitHub user ID' do
+ expect(finder)
+ .to receive(:cached_id_for_github_id)
+ .with(42)
+ .and_return([true, 4])
+
+ expect(finder.find_from_cache(42)).to eq([true, 4])
+ end
+
+ it 'retrieves a GitLab user ID for a GitHub Email address' do
+ email = 'kittens@example.com'
+
+ expect(finder)
+ .to receive(:cached_id_for_github_id)
+ .with(42)
+ .and_return([false, nil])
+
+ expect(finder)
+ .to receive(:cached_id_for_github_email)
+ .with(email)
+ .and_return([true, 4])
+
+ expect(finder.find_from_cache(42, email)).to eq([true, 4])
+ end
+
+ it 'does not query the cache for an Email address when none is given' do
+ expect(finder)
+ .to receive(:cached_id_for_github_id)
+ .with(42)
+ .and_return([false, nil])
+
+ expect(finder).not_to receive(:cached_id_for_github_id)
+
+ expect(finder.find_from_cache(42)).to eq([false])
+ end
+ end
+
+ describe '#find_id_from_database' do
+ let(:user) { create(:user) }
+
+ it 'returns the GitLab user ID for a GitHub user ID' do
+ user.identities.create!(provider: :github, extern_uid: 42)
+
+ expect(finder.find_id_from_database(42, user.email)).to eq(user.id)
+ end
+
+ it 'returns the GitLab user ID for a GitHub Email address' do
+ expect(finder.find_id_from_database(42, user.email)).to eq(user.id)
+ end
+ end
+
+ describe '#email_for_github_username' do
+ let(:email) { 'kittens@example.com' }
+
+ context 'when an Email address is cached' do
+ it 'reads the Email address from the cache' do
+ expect(Gitlab::GithubImport::Caching)
+ .to receive(:read)
+ .and_return(email)
+
+ expect(client).not_to receive(:user)
+ expect(finder.email_for_github_username('kittens')).to eq(email)
+ end
+ end
+
+ context 'when an Email address is not cached' do
+ let(:user) { double(:user, email: email) }
+
+ it 'retrieves the Email address from the GitHub API' do
+ expect(client).to receive(:user).with('kittens').and_return(user)
+ expect(finder.email_for_github_username('kittens')).to eq(email)
+ end
+
+ it 'caches the Email address when an Email address is available' do
+ expect(client).to receive(:user).with('kittens').and_return(user)
+
+ expect(Gitlab::GithubImport::Caching)
+ .to receive(:write)
+ .with(an_instance_of(String), email)
+
+ finder.email_for_github_username('kittens')
+ end
+
+ it 'returns nil if the user does not exist' do
+ expect(client)
+ .to receive(:user)
+ .with('kittens')
+ .and_return(nil)
+
+ expect(Gitlab::GithubImport::Caching)
+ .not_to receive(:write)
+
+ expect(finder.email_for_github_username('kittens')).to be_nil
+ end
+ end
+ end
+
+ describe '#cached_id_for_github_id' do
+ let(:id) { 4 }
+
+ it 'reads a user ID from the cache' do
+ Gitlab::GithubImport::Caching
+ .write(described_class::ID_CACHE_KEY % id, 4)
+
+ expect(finder.cached_id_for_github_id(id)).to eq([true, 4])
+ end
+
+ it 'reads a non existing cache key' do
+ expect(finder.cached_id_for_github_id(id)).to eq([false, nil])
+ end
+ end
+
+ describe '#cached_id_for_github_email' do
+ let(:email) { 'kittens@example.com' }
+
+ it 'reads a user ID from the cache' do
+ Gitlab::GithubImport::Caching
+ .write(described_class::ID_FOR_EMAIL_CACHE_KEY % email, 4)
+
+ expect(finder.cached_id_for_github_email(email)).to eq([true, 4])
+ end
+
+ it 'reads a non existing cache key' do
+ expect(finder.cached_id_for_github_email(email)).to eq([false, nil])
+ end
+ end
+
+ describe '#id_for_github_id' do
+ let(:id) { 4 }
+
+ it 'queries and caches the user ID for a given GitHub ID' do
+ expect(finder).to receive(:query_id_for_github_id)
+ .with(id)
+ .and_return(42)
+
+ expect(Gitlab::GithubImport::Caching)
+ .to receive(:write)
+ .with(described_class::ID_CACHE_KEY % id, 42)
+
+ finder.id_for_github_id(id)
+ end
+
+ it 'caches a nil value if no ID could be found' do
+ expect(finder).to receive(:query_id_for_github_id)
+ .with(id)
+ .and_return(nil)
+
+ expect(Gitlab::GithubImport::Caching)
+ .to receive(:write)
+ .with(described_class::ID_CACHE_KEY % id, nil)
+
+ finder.id_for_github_id(id)
+ end
+ end
+
+ describe '#id_for_github_email' do
+ let(:email) { 'kittens@example.com' }
+
+ it 'queries and caches the user ID for a given Email address' do
+ expect(finder).to receive(:query_id_for_github_email)
+ .with(email)
+ .and_return(42)
+
+ expect(Gitlab::GithubImport::Caching)
+ .to receive(:write)
+ .with(described_class::ID_FOR_EMAIL_CACHE_KEY % email, 42)
+
+ finder.id_for_github_email(email)
+ end
+
+ it 'caches a nil value if no ID could be found' do
+ expect(finder).to receive(:query_id_for_github_email)
+ .with(email)
+ .and_return(nil)
+
+ expect(Gitlab::GithubImport::Caching)
+ .to receive(:write)
+ .with(described_class::ID_FOR_EMAIL_CACHE_KEY % email, nil)
+
+ finder.id_for_github_email(email)
+ end
+ end
+
+ describe '#query_id_for_github_id' do
+ it 'returns the ID of the user for the given GitHub user ID' do
+ user = create(:user)
+
+ user.identities.create!(provider: :github, extern_uid: '42')
+
+ expect(finder.query_id_for_github_id(42)).to eq(user.id)
+ end
+
+ it 'returns nil when no user ID could be found' do
+ expect(finder.query_id_for_github_id(42)).to be_nil
+ end
+ end
+
+ describe '#query_id_for_github_email' do
+ it 'returns the ID of the user for the given Email address' do
+ user = create(:user, email: 'kittens@example.com')
+
+ expect(finder.query_id_for_github_email(user.email)).to eq(user.id)
+ end
+
+ it 'returns nil if no user ID could be found' do
+ expect(finder.query_id_for_github_email('kittens@example.com')).to be_nil
+ end
+ end
+
+ describe '#read_id_from_cache' do
+ it 'reads an ID from the cache' do
+ Gitlab::GithubImport::Caching.write('foo', 10)
+
+ expect(finder.read_id_from_cache('foo')).to eq([true, 10])
+ end
+
+ it 'reads a cache key with an empty value' do
+ Gitlab::GithubImport::Caching.write('foo', nil)
+
+ expect(finder.read_id_from_cache('foo')).to eq([true, nil])
+ end
+
+ it 'reads a cache key that does not exist' do
+ expect(finder.read_id_from_cache('foo')).to eq([false, nil])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import_spec.rb b/spec/lib/gitlab/github_import_spec.rb
new file mode 100644
index 00000000000..51414800e8c
--- /dev/null
+++ b/spec/lib/gitlab/github_import_spec.rb
@@ -0,0 +1,79 @@
+require 'spec_helper'
+
+describe Gitlab::GithubImport do
+ let(:project) { double(:project) }
+
+ describe '.new_client_for' do
+ it 'returns a new Client with a custom token' do
+ expect(described_class::Client)
+ .to receive(:new)
+ .with('123', parallel: true)
+
+ described_class.new_client_for(project, token: '123')
+ end
+
+ it 'returns a new Client with a token stored in the import data' do
+ import_data = double(:import_data, credentials: { user: '123' })
+
+ expect(project)
+ .to receive(:import_data)
+ .and_return(import_data)
+
+ expect(described_class::Client)
+ .to receive(:new)
+ .with('123', parallel: true)
+
+ described_class.new_client_for(project)
+ end
+ end
+
+ describe '.insert_and_return_id' do
+ let(:attributes) { { iid: 1, title: 'foo' } }
+ let(:project) { create(:project) }
+
+ context 'on PostgreSQL' do
+ it 'returns the ID returned by the query' do
+ expect(Gitlab::Database)
+ .to receive(:bulk_insert)
+ .with(Issue.table_name, [attributes], return_ids: true)
+ .and_return([10])
+
+ id = described_class.insert_and_return_id(attributes, project.issues)
+
+ expect(id).to eq(10)
+ end
+ end
+
+ context 'on MySQL' do
+ it 'uses a separate query to retrieve the ID' do
+ issue = create(:issue, project: project, iid: attributes[:iid])
+
+ expect(Gitlab::Database)
+ .to receive(:bulk_insert)
+ .with(Issue.table_name, [attributes], return_ids: true)
+ .and_return([])
+
+ id = described_class.insert_and_return_id(attributes, project.issues)
+
+ expect(id).to eq(issue.id)
+ end
+ end
+ end
+
+ describe '.ghost_user_id', :clean_gitlab_redis_cache do
+ it 'returns the ID of the ghost user' do
+ expect(described_class.ghost_user_id).to eq(User.ghost.id)
+ end
+
+ it 'caches the ghost user ID' do
+ expect(Gitlab::GithubImport::Caching)
+ .to receive(:write)
+ .once
+ .and_call_original
+
+ 2.times do
+ described_class.ghost_user_id
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/group_hierarchy_spec.rb b/spec/lib/gitlab/group_hierarchy_spec.rb
index 8dc83a6db7f..30686634af4 100644
--- a/spec/lib/gitlab/group_hierarchy_spec.rb
+++ b/spec/lib/gitlab/group_hierarchy_spec.rb
@@ -18,6 +18,12 @@ describe Gitlab::GroupHierarchy, :postgresql do
expect(relation).to include(parent, child1)
end
+ it 'can find ancestors upto a certain level' do
+ relation = described_class.new(Group.where(id: child2)).base_and_ancestors(upto: child1)
+
+ expect(relation).to contain_exactly(child2)
+ end
+
it 'uses ancestors_base #initialize argument' do
relation = described_class.new(Group.where(id: child2.id), Group.none).base_and_ancestors
@@ -55,6 +61,28 @@ describe Gitlab::GroupHierarchy, :postgresql do
end
end
+ describe '#descendants' do
+ it 'includes only the descendants' do
+ relation = described_class.new(Group.where(id: parent)).descendants
+
+ expect(relation).to contain_exactly(child1, child2)
+ end
+ end
+
+ describe '#ancestors' do
+ it 'includes only the ancestors' do
+ relation = described_class.new(Group.where(id: child2)).ancestors
+
+ expect(relation).to contain_exactly(child1, parent)
+ end
+
+ it 'can find ancestors upto a certain level' do
+ relation = described_class.new(Group.where(id: child2)).ancestors(upto: child1)
+
+ expect(relation).to be_empty
+ end
+ end
+
describe '#all_groups' do
let(:relation) do
described_class.new(Group.where(id: child1.id)).all_groups
diff --git a/spec/lib/gitlab/health_checks/fs_shards_check_spec.rb b/spec/lib/gitlab/health_checks/fs_shards_check_spec.rb
index 73dd236a5c6..4c1ca4349ea 100644
--- a/spec/lib/gitlab/health_checks/fs_shards_check_spec.rb
+++ b/spec/lib/gitlab/health_checks/fs_shards_check_spec.rb
@@ -44,7 +44,7 @@ describe Gitlab::HealthChecks::FsShardsCheck do
describe '#readiness' do
subject { described_class.readiness }
- context 'storage has a tripped circuitbreaker', broken_storage: true do
+ context 'storage has a tripped circuitbreaker', :broken_storage do
let(:repository_storages) { ['broken'] }
let(:storages_paths) do
Gitlab.config.repositories.storages
diff --git a/spec/lib/gitlab/hook_data/issuable_builder_spec.rb b/spec/lib/gitlab/hook_data/issuable_builder_spec.rb
new file mode 100644
index 00000000000..26529c4759d
--- /dev/null
+++ b/spec/lib/gitlab/hook_data/issuable_builder_spec.rb
@@ -0,0 +1,110 @@
+require 'spec_helper'
+
+describe Gitlab::HookData::IssuableBuilder do
+ set(:user) { create(:user) }
+
+ # This shared example requires a `builder` and `user` variable
+ shared_examples 'issuable hook data' do |kind|
+ let(:data) { builder.build(user: user) }
+
+ include_examples 'project hook data' do
+ let(:project) { builder.issuable.project }
+ end
+ include_examples 'deprecated repository hook data'
+
+ context "with a #{kind}" do
+ it 'contains issuable data' do
+ expect(data[:object_kind]).to eq(kind)
+ expect(data[:user]).to eq(user.hook_attrs)
+ expect(data[:project]).to eq(builder.issuable.project.hook_attrs)
+ expect(data[:object_attributes]).to eq(builder.issuable.hook_attrs)
+ expect(data[:changes]).to eq({})
+ expect(data[:repository]).to eq(builder.issuable.project.hook_attrs.slice(:name, :url, :description, :homepage))
+ end
+
+ it 'does not contain certain keys' do
+ expect(data).not_to have_key(:assignees)
+ expect(data).not_to have_key(:assignee)
+ end
+
+ describe 'changes are given' do
+ let(:changes) do
+ {
+ cached_markdown_version: %w[foo bar],
+ description: ['A description', 'A cool description'],
+ description_html: %w[foo bar],
+ in_progress_merge_commit_sha: %w[foo bar],
+ lock_version: %w[foo bar],
+ merge_jid: %w[foo bar],
+ title: ['A title', 'Hello World'],
+ title_html: %w[foo bar],
+ labels: [
+ [{ id: 1, title: 'foo' }],
+ [{ id: 1, title: 'foo' }, { id: 2, title: 'bar' }]
+ ],
+ total_time_spent: [1, 2]
+ }
+ end
+ let(:data) { builder.build(user: user, changes: changes) }
+
+ it 'populates the :changes hash' do
+ expect(data[:changes]).to match(hash_including({
+ title: { previous: 'A title', current: 'Hello World' },
+ description: { previous: 'A description', current: 'A cool description' },
+ labels: {
+ previous: [{ id: 1, title: 'foo' }],
+ current: [{ id: 1, title: 'foo' }, { id: 2, title: 'bar' }]
+ },
+ total_time_spent: {
+ previous: 1,
+ current: 2
+ }
+ }))
+ end
+
+ it 'does not contain certain keys' do
+ expect(data[:changes]).not_to have_key('cached_markdown_version')
+ expect(data[:changes]).not_to have_key('description_html')
+ expect(data[:changes]).not_to have_key('lock_version')
+ expect(data[:changes]).not_to have_key('title_html')
+ expect(data[:changes]).not_to have_key('in_progress_merge_commit_sha')
+ expect(data[:changes]).not_to have_key('merge_jid')
+ end
+ end
+ end
+ end
+
+ describe '#build' do
+ it_behaves_like 'issuable hook data', 'issue' do
+ let(:issuable) { create(:issue, description: 'A description') }
+ let(:builder) { described_class.new(issuable) }
+ end
+
+ it_behaves_like 'issuable hook data', 'merge_request' do
+ let(:issuable) { create(:merge_request, description: 'A description') }
+ let(:builder) { described_class.new(issuable) }
+ end
+
+ context 'issue is assigned' do
+ let(:issue) { create(:issue, assignees: [user]) }
+ let(:data) { described_class.new(issue).build(user: user) }
+
+ it 'returns correct hook data' do
+ expect(data[:object_attributes]['assignee_id']).to eq(user.id)
+ expect(data[:assignees].first).to eq(user.hook_attrs)
+ expect(data).not_to have_key(:assignee)
+ end
+ end
+
+ context 'merge_request is assigned' do
+ let(:merge_request) { create(:merge_request, assignee: user) }
+ let(:data) { described_class.new(merge_request).build(user: user) }
+
+ it 'returns correct hook data' do
+ expect(data[:object_attributes]['assignee_id']).to eq(user.id)
+ expect(data[:assignee]).to eq(user.hook_attrs)
+ expect(data).not_to have_key(:assignees)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/hook_data/issue_builder_spec.rb b/spec/lib/gitlab/hook_data/issue_builder_spec.rb
new file mode 100644
index 00000000000..aeacd577d18
--- /dev/null
+++ b/spec/lib/gitlab/hook_data/issue_builder_spec.rb
@@ -0,0 +1,45 @@
+require 'spec_helper'
+
+describe Gitlab::HookData::IssueBuilder do
+ set(:issue) { create(:issue) }
+ let(:builder) { described_class.new(issue) }
+
+ describe '#build' do
+ let(:data) { builder.build }
+
+ it 'includes safe attribute' do
+ %w[
+ assignee_id
+ author_id
+ closed_at
+ confidential
+ created_at
+ deleted_at
+ description
+ due_date
+ id
+ iid
+ last_edited_at
+ last_edited_by_id
+ milestone_id
+ moved_to_id
+ project_id
+ relative_position
+ state
+ time_estimate
+ title
+ updated_at
+ updated_by_id
+ ].each do |key|
+ expect(data).to include(key)
+ end
+ end
+
+ it 'includes additional attrs' do
+ expect(data).to include(:total_time_spent)
+ expect(data).to include(:human_time_estimate)
+ expect(data).to include(:human_total_time_spent)
+ expect(data).to include(:assignee_ids)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb b/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb
new file mode 100644
index 00000000000..78475403f9e
--- /dev/null
+++ b/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb
@@ -0,0 +1,61 @@
+require 'spec_helper'
+
+describe Gitlab::HookData::MergeRequestBuilder do
+ set(:merge_request) { create(:merge_request) }
+ let(:builder) { described_class.new(merge_request) }
+
+ describe '#build' do
+ let(:data) { builder.build }
+
+ it 'includes safe attribute' do
+ %w[
+ assignee_id
+ author_id
+ created_at
+ deleted_at
+ description
+ head_pipeline_id
+ id
+ iid
+ last_edited_at
+ last_edited_by_id
+ merge_commit_sha
+ merge_error
+ merge_params
+ merge_status
+ merge_user_id
+ merge_when_pipeline_succeeds
+ milestone_id
+ source_branch
+ source_project_id
+ state
+ target_branch
+ target_project_id
+ time_estimate
+ title
+ updated_at
+ updated_by_id
+ ].each do |key|
+ expect(data).to include(key)
+ end
+ end
+
+ %i[source target].each do |key|
+ describe "#{key} key" do
+ include_examples 'project hook data', project_key: key do
+ let(:project) { merge_request.public_send("#{key}_project") }
+ end
+ end
+ end
+
+ it 'includes additional attrs' do
+ expect(data).to include(:source)
+ expect(data).to include(:target)
+ expect(data).to include(:last_commit)
+ expect(data).to include(:work_in_progress)
+ expect(data).to include(:total_time_spent)
+ expect(data).to include(:human_time_estimate)
+ expect(data).to include(:human_total_time_spent)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 29baa70d5ae..0ecb50f7110 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -88,6 +88,7 @@ merge_requests:
- metrics
- timelogs
- head_pipeline
+- latest_merge_request_diff
merge_request_diff:
- merge_request
- merge_request_diff_commits
@@ -147,10 +148,6 @@ deploy_keys:
- user
- deploy_keys_projects
- projects
-cluster:
-- project
-- user
-- service
services:
- project
- service_hook
@@ -182,6 +179,8 @@ project:
- tags
- chat_services
- cluster
+- clusters
+- cluster_project
- creator
- group
- namespace
@@ -195,6 +194,7 @@ project:
- mattermost_slash_commands_service
- slack_slash_commands_service
- irker_service
+- packagist_service
- pivotaltracker_service
- prometheus_service
- hipchat_service
@@ -275,6 +275,7 @@ project:
- root_of_fork_network
- fork_network_member
- fork_network
+- custom_attributes
award_emoji:
- awardable
- user
@@ -286,3 +287,6 @@ timelogs:
- user
push_event_payload:
- event
+issue_assignees:
+- issue
+- assignee
diff --git a/spec/lib/gitlab/import_export/fork_spec.rb b/spec/lib/gitlab/import_export/fork_spec.rb
index dd0ce0dae41..cfb15ee7e8b 100644
--- a/spec/lib/gitlab/import_export/fork_spec.rb
+++ b/spec/lib/gitlab/import_export/fork_spec.rb
@@ -46,7 +46,7 @@ describe 'forked project import' do
end
it 'can access the MR' do
- project.merge_requests.first.ensure_ref_fetched
+ project.merge_requests.first.fetch_ref!
expect(project.repository.ref_exists?('refs/merge-requests/1/head')).to be_truthy
end
diff --git a/spec/lib/gitlab/import_export/merge_request_parser_spec.rb b/spec/lib/gitlab/import_export/merge_request_parser_spec.rb
index 473ba40fae7..b793636c4d6 100644
--- a/spec/lib/gitlab/import_export/merge_request_parser_spec.rb
+++ b/spec/lib/gitlab/import_export/merge_request_parser_spec.rb
@@ -13,7 +13,7 @@ describe Gitlab::ImportExport::MergeRequestParser do
let(:parsed_merge_request) do
described_class.new(project,
- merge_request.diff_head_sha,
+ 'abcd',
merge_request,
merge_request.as_json).parse!
end
@@ -29,4 +29,14 @@ describe Gitlab::ImportExport::MergeRequestParser do
it 'has a target branch' do
expect(project.repository.branch_exists?(parsed_merge_request.target_branch)).to be true
end
+
+ it 'parses a MR that has no source branch' do
+ allow_any_instance_of(described_class).to receive(:branch_exists?).and_call_original
+ allow_any_instance_of(described_class).to receive(:branch_exists?).with(merge_request.source_branch).and_return(false)
+ allow_any_instance_of(described_class).to receive(:fork_merge_request?).and_return(true)
+ allow(Gitlab::GitalyClient).to receive(:migrate).and_call_original
+ allow(Gitlab::GitalyClient).to receive(:migrate).with(:fetch_ref).and_return([nil, 0])
+
+ expect(parsed_merge_request).to eq(merge_request)
+ end
end
diff --git a/spec/lib/gitlab/import_export/project.group.json b/spec/lib/gitlab/import_export/project.group.json
new file mode 100644
index 00000000000..82a1fbd2fc5
--- /dev/null
+++ b/spec/lib/gitlab/import_export/project.group.json
@@ -0,0 +1,188 @@
+{
+ "description": "Nisi et repellendus ut enim quo accusamus vel magnam.",
+ "visibility_level": 10,
+ "archived": false,
+ "milestones": [
+ {
+ "id": 1,
+ "title": "Project milestone",
+ "project_id": 8,
+ "description": "Project-level milestone",
+ "due_date": null,
+ "created_at": "2016-06-14T15:02:04.415Z",
+ "updated_at": "2016-06-14T15:02:04.415Z",
+ "state": "active",
+ "iid": 1,
+ "group_id": null
+ }
+ ],
+ "labels": [
+ {
+ "id": 2,
+ "title": "project label",
+ "color": "#428bca",
+ "project_id": 8,
+ "created_at": "2016-07-22T08:55:44.161Z",
+ "updated_at": "2016-07-22T08:55:44.161Z",
+ "template": false,
+ "description": "",
+ "type": "ProjectLabel",
+ "priorities": [
+ {
+ "id": 1,
+ "project_id": 5,
+ "label_id": 1,
+ "priority": 1,
+ "created_at": "2016-10-18T09:35:43.338Z",
+ "updated_at": "2016-10-18T09:35:43.338Z"
+ }
+ ]
+ }
+ ],
+ "issues": [
+ {
+ "id": 1,
+ "title": "Fugiat est minima quae maxime non similique.",
+ "assignee_id": null,
+ "project_id": 8,
+ "author_id": 1,
+ "created_at": "2017-07-07T18:13:01.138Z",
+ "updated_at": "2017-08-15T18:37:40.807Z",
+ "branch_name": null,
+ "description": "Quam totam fuga numquam in eveniet.",
+ "state": "opened",
+ "iid": 1,
+ "updated_by_id": 1,
+ "confidential": false,
+ "deleted_at": null,
+ "due_date": null,
+ "moved_to_id": null,
+ "lock_version": null,
+ "time_estimate": 0,
+ "closed_at": null,
+ "last_edited_at": null,
+ "last_edited_by_id": null,
+ "group_milestone_id": null,
+ "milestone": {
+ "id": 1,
+ "title": "Project milestone",
+ "project_id": 8,
+ "description": "Project-level milestone",
+ "due_date": null,
+ "created_at": "2016-06-14T15:02:04.415Z",
+ "updated_at": "2016-06-14T15:02:04.415Z",
+ "state": "active",
+ "iid": 1,
+ "group_id": null
+ },
+ "label_links": [
+ {
+ "id": 11,
+ "label_id": 6,
+ "target_id": 1,
+ "target_type": "Issue",
+ "created_at": "2017-08-15T18:37:40.795Z",
+ "updated_at": "2017-08-15T18:37:40.795Z",
+ "label": {
+ "id": 6,
+ "title": "group label",
+ "color": "#A8D695",
+ "project_id": null,
+ "created_at": "2017-08-15T18:37:19.698Z",
+ "updated_at": "2017-08-15T18:37:19.698Z",
+ "template": false,
+ "description": "",
+ "group_id": 5,
+ "type": "GroupLabel",
+ "priorities": []
+ }
+ },
+ {
+ "id": 11,
+ "label_id": 2,
+ "target_id": 1,
+ "target_type": "Issue",
+ "created_at": "2017-08-15T18:37:40.795Z",
+ "updated_at": "2017-08-15T18:37:40.795Z",
+ "label": {
+ "id": 6,
+ "title": "project label",
+ "color": "#A8D695",
+ "project_id": null,
+ "created_at": "2017-08-15T18:37:19.698Z",
+ "updated_at": "2017-08-15T18:37:19.698Z",
+ "template": false,
+ "description": "",
+ "group_id": 5,
+ "type": "ProjectLabel",
+ "priorities": []
+ }
+ }
+ ]
+ },
+ {
+ "id": 2,
+ "title": "Fugiat est minima quae maxime non similique.",
+ "assignee_id": null,
+ "project_id": 8,
+ "author_id": 1,
+ "created_at": "2017-07-07T18:13:01.138Z",
+ "updated_at": "2017-08-15T18:37:40.807Z",
+ "branch_name": null,
+ "description": "Quam totam fuga numquam in eveniet.",
+ "state": "opened",
+ "iid": 2,
+ "updated_by_id": 1,
+ "confidential": false,
+ "deleted_at": null,
+ "due_date": null,
+ "moved_to_id": null,
+ "lock_version": null,
+ "time_estimate": 0,
+ "closed_at": null,
+ "last_edited_at": null,
+ "last_edited_by_id": null,
+ "group_milestone_id": null,
+ "milestone": {
+ "id": 2,
+ "title": "A group milestone",
+ "description": "Group-level milestone",
+ "due_date": null,
+ "created_at": "2016-06-14T15:02:04.415Z",
+ "updated_at": "2016-06-14T15:02:04.415Z",
+ "state": "active",
+ "iid": 1,
+ "group_id": 100
+ },
+ "label_links": [
+ {
+ "id": 11,
+ "label_id": 2,
+ "target_id": 1,
+ "target_type": "Issue",
+ "created_at": "2017-08-15T18:37:40.795Z",
+ "updated_at": "2017-08-15T18:37:40.795Z",
+ "label": {
+ "id": 2,
+ "title": "project label",
+ "color": "#A8D695",
+ "project_id": null,
+ "created_at": "2017-08-15T18:37:19.698Z",
+ "updated_at": "2017-08-15T18:37:19.698Z",
+ "template": false,
+ "description": "",
+ "group_id": 5,
+ "type": "ProjectLabel",
+ "priorities": []
+ }
+ }
+ ]
+ }
+ ],
+ "snippets": [
+
+ ],
+ "hooks": [
+
+ ]
+}
diff --git a/spec/lib/gitlab/import_export/project.json b/spec/lib/gitlab/import_export/project.json
index 1115fb218d6..f0752649121 100644
--- a/spec/lib/gitlab/import_export/project.json
+++ b/spec/lib/gitlab/import_export/project.json
@@ -43,7 +43,7 @@
"issues": [
{
"id": 40,
- "title": "Voluptatem amet doloribus deleniti eos maxime repudiandae molestias.",
+ "title": "Voluptatem",
"assignee_id": 1,
"author_id": 22,
"project_id": 5,
@@ -60,6 +60,12 @@
"due_date": null,
"moved_to_id": null,
"test_ee_field": "test",
+ "issue_assignees": [
+ {
+ "user_id": 1,
+ "issue_id": 1
+ }
+ ],
"milestone": {
"id": 1,
"title": "test milestone",
@@ -3140,13 +3146,12 @@
"merge_request_diff": {
"id": 26,
"state": "collected",
- "st_commits": [
+ "merge_request_diff_commits": [
{
- "id": "0b4bc9a49b562e85de7cc9e834518ea6828729b9",
+ "merge_request_diff_id": 26,
+ "sha": "0b4bc9a49b562e85de7cc9e834518ea6828729b9",
+ "relative_order": 0,
"message": "Feature added\n\nSigned-off-by: Dmitriy Zaporozhets \u003cdmitriy.zaporozhets@gmail.com\u003e\n",
- "parent_ids": [
- "ae73cb07c9eeaf35924a10f713b364d32b2dd34f"
- ],
"authored_date": "2014-02-27T09:26:01.000+01:00",
"author_name": "Dmitriy Zaporozhets",
"author_email": "dmitriy.zaporozhets@gmail.com",
@@ -3155,9 +3160,11 @@
"committer_email": "dmitriy.zaporozhets@gmail.com"
}
],
- "utf8_st_diffs": [
+ "merge_request_diff_files": [
{
- "diff": "--- /dev/null\n+++ b/files/ruby/feature.rb\n@@ -0,0 +1,5 @@\n+class Feature\n+ def foo\n+ puts 'bar'\n+ end\n+end\n",
+ "merge_request_diff_id": 26,
+ "relative_order": 0,
+ "utf8_diff": "--- /dev/null\n+++ b/files/ruby/feature.rb\n@@ -0,0 +1,5 @@\n+class Feature\n+ def foo\n+ puts 'bar'\n+ end\n+end\n",
"new_path": "files/ruby/feature.rb",
"old_path": "files/ruby/feature.rb",
"a_mode": "0",
@@ -3419,13 +3426,12 @@
"merge_request_diff": {
"id": 15,
"state": "collected",
- "st_commits": [
+ "merge_request_diff_commits": [
{
- "id": "94b8d581c48d894b86661718582fecbc5e3ed2eb",
+ "merge_request_diff_id": 15,
+ "relative_order": 0,
+ "sha": "94b8d581c48d894b86661718582fecbc5e3ed2eb",
"message": "fixes #10\n",
- "parent_ids": [
- "be93687618e4b132087f430a4d8fc3a609c9b77c"
- ],
"authored_date": "2016-01-19T13:22:56.000+01:00",
"author_name": "James Lopez",
"author_email": "james@jameslopez.es",
@@ -3434,9 +3440,11 @@
"committer_email": "james@jameslopez.es"
}
],
- "utf8_st_diffs": [
+ "merge_request_diff_files": [
{
- "diff": "--- /dev/null\n+++ b/test\n",
+ "merge_request_diff_id": 15,
+ "relative_order": 0,
+ "utf8_diff": "--- /dev/null\n+++ b/test\n",
"new_path": "test",
"old_path": "test",
"a_mode": "0",
@@ -3698,13 +3706,12 @@
"merge_request_diff": {
"id": 14,
"state": "collected",
- "st_commits": [
+ "merge_request_diff_commits": [
{
- "id": "ddd4ff416a931589c695eb4f5b23f844426f6928",
+ "merge_request_diff_id": 14,
+ "relative_order": 0,
+ "sha": "ddd4ff416a931589c695eb4f5b23f844426f6928",
"message": "fixes #10\n",
- "parent_ids": [
- "be93687618e4b132087f430a4d8fc3a609c9b77c"
- ],
"authored_date": "2016-01-19T14:14:43.000+01:00",
"author_name": "James Lopez",
"author_email": "james@jameslopez.es",
@@ -3713,12 +3720,10 @@
"committer_email": "james@jameslopez.es"
},
{
- "id": "be93687618e4b132087f430a4d8fc3a609c9b77c",
+ "merge_request_diff_id": 14,
+ "relative_order": 1,
+ "sha": "be93687618e4b132087f430a4d8fc3a609c9b77c",
"message": "Merge branch 'master' into 'master'\r\n\r\nLFS object pointer.\r\n\r\n\r\n\r\nSee merge request !6",
- "parent_ids": [
- "5f923865dde3436854e9ceb9cdb7815618d4e849",
- "048721d90c449b244b7b4c53a9186b04330174ec"
- ],
"authored_date": "2015-12-07T12:52:12.000+01:00",
"author_name": "Marin Jankovski",
"author_email": "marin@gitlab.com",
@@ -3727,11 +3732,10 @@
"committer_email": "marin@gitlab.com"
},
{
- "id": "048721d90c449b244b7b4c53a9186b04330174ec",
+ "merge_request_diff_id": 14,
+ "relative_order": 2,
+ "sha": "048721d90c449b244b7b4c53a9186b04330174ec",
"message": "LFS object pointer.\n",
- "parent_ids": [
- "5f923865dde3436854e9ceb9cdb7815618d4e849"
- ],
"authored_date": "2015-12-07T11:54:28.000+01:00",
"author_name": "Marin Jankovski",
"author_email": "maxlazio@gmail.com",
@@ -3740,11 +3744,10 @@
"committer_email": "maxlazio@gmail.com"
},
{
- "id": "5f923865dde3436854e9ceb9cdb7815618d4e849",
+ "merge_request_diff_id": 14,
+ "relative_order": 3,
+ "sha": "5f923865dde3436854e9ceb9cdb7815618d4e849",
"message": "GitLab currently doesn't support patches that involve a merge commit: add a commit here\n",
- "parent_ids": [
- "d2d430676773caa88cdaf7c55944073b2fd5561a"
- ],
"authored_date": "2015-11-13T16:27:12.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -3753,12 +3756,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "d2d430676773caa88cdaf7c55944073b2fd5561a",
+ "merge_request_diff_id": 14,
+ "relative_order": 4,
+ "sha": "d2d430676773caa88cdaf7c55944073b2fd5561a",
"message": "Merge branch 'add-svg' into 'master'\r\n\r\nAdd GitLab SVG\r\n\r\nAdded to test preview of sanitized SVG images\r\n\r\nSee merge request !5",
- "parent_ids": [
- "59e29889be61e6e0e5e223bfa9ac2721d31605b8",
- "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73"
- ],
"authored_date": "2015-11-13T08:50:17.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -3767,11 +3768,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73",
+ "merge_request_diff_id": 14,
+ "relative_order": 5,
+ "sha": "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73",
"message": "Add GitLab SVG\n",
- "parent_ids": [
- "59e29889be61e6e0e5e223bfa9ac2721d31605b8"
- ],
"authored_date": "2015-11-13T08:39:43.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -3780,12 +3780,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "59e29889be61e6e0e5e223bfa9ac2721d31605b8",
+ "merge_request_diff_id": 14,
+ "relative_order": 6,
+ "sha": "59e29889be61e6e0e5e223bfa9ac2721d31605b8",
"message": "Merge branch 'whitespace' into 'master'\r\n\r\nadd whitespace test file\r\n\r\nSorry, I did a mistake.\r\nGit ignore empty files.\r\nSo I add a new whitespace test file.\r\n\r\nSee merge request !4",
- "parent_ids": [
- "19e2e9b4ef76b422ce1154af39a91323ccc57434",
- "66eceea0db202bb39c4e445e8ca28689645366c5"
- ],
"authored_date": "2015-11-13T07:21:40.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -3794,11 +3792,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "66eceea0db202bb39c4e445e8ca28689645366c5",
+ "merge_request_diff_id": 14,
+ "relative_order": 7,
+ "sha": "66eceea0db202bb39c4e445e8ca28689645366c5",
"message": "add spaces in whitespace file\n",
- "parent_ids": [
- "08f22f255f082689c0d7d39d19205085311542bc"
- ],
"authored_date": "2015-11-13T06:01:27.000+01:00",
"author_name": "윤민식",
"author_email": "minsik.yoon@samsung.com",
@@ -3807,11 +3804,10 @@
"committer_email": "minsik.yoon@samsung.com"
},
{
- "id": "08f22f255f082689c0d7d39d19205085311542bc",
+ "merge_request_diff_id": 14,
+ "relative_order": 8,
+ "sha": "08f22f255f082689c0d7d39d19205085311542bc",
"message": "remove emtpy file.(beacase git ignore empty file)\nadd whitespace test file.\n",
- "parent_ids": [
- "c642fe9b8b9f28f9225d7ea953fe14e74748d53b"
- ],
"authored_date": "2015-11-13T06:00:16.000+01:00",
"author_name": "윤민식",
"author_email": "minsik.yoon@samsung.com",
@@ -3820,12 +3816,10 @@
"committer_email": "minsik.yoon@samsung.com"
},
{
- "id": "19e2e9b4ef76b422ce1154af39a91323ccc57434",
+ "merge_request_diff_id": 14,
+ "relative_order": 9,
+ "sha": "19e2e9b4ef76b422ce1154af39a91323ccc57434",
"message": "Merge branch 'whitespace' into 'master'\r\n\r\nadd spaces\r\n\r\nTo test this pull request.(https://github.com/gitlabhq/gitlabhq/pull/9757)\r\nJust add whitespaces.\r\n\r\nSee merge request !3",
- "parent_ids": [
- "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd",
- "c642fe9b8b9f28f9225d7ea953fe14e74748d53b"
- ],
"authored_date": "2015-11-13T05:23:14.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -3834,11 +3828,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "c642fe9b8b9f28f9225d7ea953fe14e74748d53b",
+ "merge_request_diff_id": 14,
+ "relative_order": 10,
+ "sha": "c642fe9b8b9f28f9225d7ea953fe14e74748d53b",
"message": "add whitespace in empty\n",
- "parent_ids": [
- "9a944d90955aaf45f6d0c88f30e27f8d2c41cec0"
- ],
"authored_date": "2015-11-13T05:08:45.000+01:00",
"author_name": "윤민식",
"author_email": "minsik.yoon@samsung.com",
@@ -3847,11 +3840,10 @@
"committer_email": "minsik.yoon@samsung.com"
},
{
- "id": "9a944d90955aaf45f6d0c88f30e27f8d2c41cec0",
+ "merge_request_diff_id": 14,
+ "relative_order": 11,
+ "sha": "9a944d90955aaf45f6d0c88f30e27f8d2c41cec0",
"message": "add empty file\n",
- "parent_ids": [
- "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd"
- ],
"authored_date": "2015-11-13T05:08:04.000+01:00",
"author_name": "윤민식",
"author_email": "minsik.yoon@samsung.com",
@@ -3860,11 +3852,10 @@
"committer_email": "minsik.yoon@samsung.com"
},
{
- "id": "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd",
+ "merge_request_diff_id": 14,
+ "relative_order": 12,
+ "sha": "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd",
"message": "Add ISO-8859 test file\n",
- "parent_ids": [
- "e56497bb5f03a90a51293fc6d516788730953899"
- ],
"authored_date": "2015-08-25T17:53:12.000+02:00",
"author_name": "Stan Hu",
"author_email": "stanhu@packetzoom.com",
@@ -3873,12 +3864,10 @@
"committer_email": "stanhu@packetzoom.com"
},
{
- "id": "e56497bb5f03a90a51293fc6d516788730953899",
+ "merge_request_diff_id": 14,
+ "relative_order": 13,
+ "sha": "e56497bb5f03a90a51293fc6d516788730953899",
"message": "Merge branch 'tree_helper_spec' into 'master'\n\nAdd directory structure for tree_helper spec\n\nThis directory structure is needed for a testing the method flatten_tree(tree) in the TreeHelper module\n\nSee [merge request #275](https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/275#note_732774)\n\nSee merge request !2\n",
- "parent_ids": [
- "5937ac0a7beb003549fc5fd26fc247adbce4a52e",
- "4cd80ccab63c82b4bad16faa5193fbd2aa06df40"
- ],
"authored_date": "2015-01-10T22:23:29.000+01:00",
"author_name": "Sytse Sijbrandij",
"author_email": "sytse@gitlab.com",
@@ -3887,11 +3876,10 @@
"committer_email": "sytse@gitlab.com"
},
{
- "id": "4cd80ccab63c82b4bad16faa5193fbd2aa06df40",
+ "merge_request_diff_id": 14,
+ "relative_order": 14,
+ "sha": "4cd80ccab63c82b4bad16faa5193fbd2aa06df40",
"message": "add directory structure for tree_helper spec\n",
- "parent_ids": [
- "5937ac0a7beb003549fc5fd26fc247adbce4a52e"
- ],
"authored_date": "2015-01-10T21:28:18.000+01:00",
"author_name": "marmis85",
"author_email": "marmis85@gmail.com",
@@ -3900,11 +3888,10 @@
"committer_email": "marmis85@gmail.com"
},
{
- "id": "5937ac0a7beb003549fc5fd26fc247adbce4a52e",
+ "merge_request_diff_id": 14,
+ "relative_order": 15,
+ "sha": "5937ac0a7beb003549fc5fd26fc247adbce4a52e",
"message": "Add submodule from gitlab.com\n\nSigned-off-by: Dmitriy Zaporozhets \u003cdmitriy.zaporozhets@gmail.com\u003e\n",
- "parent_ids": [
- "570e7b2abdd848b95f2f578043fc23bd6f6fd24d"
- ],
"authored_date": "2014-02-27T10:01:38.000+01:00",
"author_name": "Dmitriy Zaporozhets",
"author_email": "dmitriy.zaporozhets@gmail.com",
@@ -3913,11 +3900,10 @@
"committer_email": "dmitriy.zaporozhets@gmail.com"
},
{
- "id": "570e7b2abdd848b95f2f578043fc23bd6f6fd24d",
+ "merge_request_diff_id": 14,
+ "relative_order": 16,
+ "sha": "570e7b2abdd848b95f2f578043fc23bd6f6fd24d",
"message": "Change some files\n\nSigned-off-by: Dmitriy Zaporozhets \u003cdmitriy.zaporozhets@gmail.com\u003e\n",
- "parent_ids": [
- "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9"
- ],
"authored_date": "2014-02-27T09:57:31.000+01:00",
"author_name": "Dmitriy Zaporozhets",
"author_email": "dmitriy.zaporozhets@gmail.com",
@@ -3926,11 +3912,10 @@
"committer_email": "dmitriy.zaporozhets@gmail.com"
},
{
- "id": "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9",
+ "merge_request_diff_id": 14,
+ "relative_order": 17,
+ "sha": "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9",
"message": "More submodules\n\nSigned-off-by: Dmitriy Zaporozhets \u003cdmitriy.zaporozhets@gmail.com\u003e\n",
- "parent_ids": [
- "d14d6c0abdd253381df51a723d58691b2ee1ab08"
- ],
"authored_date": "2014-02-27T09:54:21.000+01:00",
"author_name": "Dmitriy Zaporozhets",
"author_email": "dmitriy.zaporozhets@gmail.com",
@@ -3939,11 +3924,10 @@
"committer_email": "dmitriy.zaporozhets@gmail.com"
},
{
- "id": "d14d6c0abdd253381df51a723d58691b2ee1ab08",
+ "merge_request_diff_id": 14,
+ "relative_order": 18,
+ "sha": "d14d6c0abdd253381df51a723d58691b2ee1ab08",
"message": "Remove ds_store files\n\nSigned-off-by: Dmitriy Zaporozhets \u003cdmitriy.zaporozhets@gmail.com\u003e\n",
- "parent_ids": [
- "c1acaa58bbcbc3eafe538cb8274ba387047b69f8"
- ],
"authored_date": "2014-02-27T09:49:50.000+01:00",
"author_name": "Dmitriy Zaporozhets",
"author_email": "dmitriy.zaporozhets@gmail.com",
@@ -3952,11 +3936,10 @@
"committer_email": "dmitriy.zaporozhets@gmail.com"
},
{
- "id": "c1acaa58bbcbc3eafe538cb8274ba387047b69f8",
+ "merge_request_diff_id": 14,
+ "relative_order": 19,
+ "sha": "c1acaa58bbcbc3eafe538cb8274ba387047b69f8",
"message": "Ignore DS files\n\nSigned-off-by: Dmitriy Zaporozhets \u003cdmitriy.zaporozhets@gmail.com\u003e\n",
- "parent_ids": [
- "ae73cb07c9eeaf35924a10f713b364d32b2dd34f"
- ],
"authored_date": "2014-02-27T09:48:32.000+01:00",
"author_name": "Dmitriy Zaporozhets",
"author_email": "dmitriy.zaporozhets@gmail.com",
@@ -3965,9 +3948,11 @@
"committer_email": "dmitriy.zaporozhets@gmail.com"
}
],
- "utf8_st_diffs": [
+ "merge_request_diff_files": [
{
- "diff": "Binary files a/.DS_Store and /dev/null differ\n",
+ "merge_request_diff_id": 14,
+ "relative_order": 0,
+ "utf8_diff": "Binary files a/.DS_Store and /dev/null differ\n",
"new_path": ".DS_Store",
"old_path": ".DS_Store",
"a_mode": "100644",
@@ -3978,7 +3963,9 @@
"too_large": false
},
{
- "diff": "--- a/.gitignore\n+++ b/.gitignore\n@@ -17,3 +17,4 @@ rerun.txt\n pickle-email-*.html\n .project\n config/initializers/secret_token.rb\n+.DS_Store\n",
+ "merge_request_diff_id": 14,
+ "relative_order": 1,
+ "utf8_diff": "--- a/.gitignore\n+++ b/.gitignore\n@@ -17,3 +17,4 @@ rerun.txt\n pickle-email-*.html\n .project\n config/initializers/secret_token.rb\n+.DS_Store\n",
"new_path": ".gitignore",
"old_path": ".gitignore",
"a_mode": "100644",
@@ -3989,7 +3976,9 @@
"too_large": false
},
{
- "diff": "--- a/.gitmodules\n+++ b/.gitmodules\n@@ -1,3 +1,9 @@\n [submodule \"six\"]\n \tpath = six\n \turl = git://github.com/randx/six.git\n+[submodule \"gitlab-shell\"]\n+\tpath = gitlab-shell\n+\turl = https://github.com/gitlabhq/gitlab-shell.git\n+[submodule \"gitlab-grack\"]\n+\tpath = gitlab-grack\n+\turl = https://gitlab.com/gitlab-org/gitlab-grack.git\n",
+ "merge_request_diff_id": 14,
+ "relative_order": 2,
+ "utf8_diff": "--- a/.gitmodules\n+++ b/.gitmodules\n@@ -1,3 +1,9 @@\n [submodule \"six\"]\n \tpath = six\n \turl = git://github.com/randx/six.git\n+[submodule \"gitlab-shell\"]\n+\tpath = gitlab-shell\n+\turl = https://github.com/gitlabhq/gitlab-shell.git\n+[submodule \"gitlab-grack\"]\n+\tpath = gitlab-grack\n+\turl = https://gitlab.com/gitlab-org/gitlab-grack.git\n",
"new_path": ".gitmodules",
"old_path": ".gitmodules",
"a_mode": "100644",
@@ -4000,7 +3989,9 @@
"too_large": false
},
{
- "diff": "--- a/CHANGELOG\n+++ b/CHANGELOG\n@@ -1,4 +1,6 @@\n-v 6.7.0\n+v6.8.0\n+\n+v6.7.0\n - Add support for Gemnasium as a Project Service (Olivier Gonzalez)\n - Add edit file button to MergeRequest diff\n - Public groups (Jason Hollingsworth)\n",
+ "merge_request_diff_id": 14,
+ "relative_order": 3,
+ "utf8_diff": "--- a/CHANGELOG\n+++ b/CHANGELOG\n@@ -1,4 +1,6 @@\n-v 6.7.0\n+v6.8.0\n+\n+v6.7.0\n - Add support for Gemnasium as a Project Service (Olivier Gonzalez)\n - Add edit file button to MergeRequest diff\n - Public groups (Jason Hollingsworth)\n",
"new_path": "CHANGELOG",
"old_path": "CHANGELOG",
"a_mode": "100644",
@@ -4011,7 +4002,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/encoding/iso8859.txt\n@@ -0,0 +1 @@\n+Äü\n",
+ "merge_request_diff_id": 14,
+ "relative_order": 4,
+ "utf8_diff": "--- /dev/null\n+++ b/encoding/iso8859.txt\n@@ -0,0 +1 @@\n+Äü\n",
"new_path": "encoding/iso8859.txt",
"old_path": "encoding/iso8859.txt",
"a_mode": "0",
@@ -4022,7 +4015,9 @@
"too_large": false
},
{
- "diff": "Binary files a/files/.DS_Store and /dev/null differ\n",
+ "merge_request_diff_id": 14,
+ "relative_order": 5,
+ "utf8_diff": "Binary files a/files/.DS_Store and /dev/null differ\n",
"new_path": "files/.DS_Store",
"old_path": "files/.DS_Store",
"a_mode": "100644",
@@ -4033,7 +4028,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/files/images/wm.svg\n@@ -0,0 +1,78 @@\n+\u003c?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?\u003e\n+\u003csvg width=\"1300px\" height=\"680px\" viewBox=\"0 0 1300 680\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:sketch=\"http://www.bohemiancoding.com/sketch/ns\"\u003e\n+ \u003c!-- Generator: Sketch 3.2.2 (9983) - http://www.bohemiancoding.com/sketch --\u003e\n+ \u003ctitle\u003ewm\u003c/title\u003e\n+ \u003cdesc\u003eCreated with Sketch.\u003c/desc\u003e\n+ \u003cdefs\u003e\n+ \u003cpath id=\"path-1\" d=\"M-69.8,1023.54607 L1675.19996,1023.54607 L1675.19996,0 L-69.8,0 L-69.8,1023.54607 L-69.8,1023.54607 Z\"\u003e\u003c/path\u003e\n+ \u003c/defs\u003e\n+ \u003cg id=\"Page-1\" stroke=\"none\" stroke-width=\"1\" fill=\"none\" fill-rule=\"evenodd\" sketch:type=\"MSPage\"\u003e\n+ \u003cpath d=\"M1300,680 L0,680 L0,0 L1300,0 L1300,680 L1300,680 Z\" id=\"bg\" fill=\"#30353E\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cg id=\"gitlab_logo\" sketch:type=\"MSLayerGroup\" transform=\"translate(-262.000000, -172.000000)\"\u003e\n+ \u003cg id=\"g10\" transform=\"translate(872.500000, 512.354581) scale(1, -1) translate(-872.500000, -512.354581) translate(0.000000, 0.290751)\"\u003e\n+ \u003cg id=\"g12\" transform=\"translate(1218.022652, 440.744871)\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\n+ \u003cpath d=\"M-50.0233338,141.900706 L-69.07059,141.900706 L-69.0100967,0.155858152 L8.04444805,0.155858152 L8.04444805,17.6840847 L-49.9628405,17.6840847 L-50.0233338,141.900706 L-50.0233338,141.900706 Z\" id=\"path14\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g16\"\u003e\n+ \u003cg id=\"g18-Clipped\"\u003e\n+ \u003cmask id=\"mask-2\" sketch:name=\"path22\" fill=\"white\"\u003e\n+ \u003cuse xlink:href=\"#path-1\"\u003e\u003c/use\u003e\n+ \u003c/mask\u003e\n+ \u003cg id=\"path22\"\u003e\u003c/g\u003e\n+ \u003cg id=\"g18\" mask=\"url(#mask-2)\"\u003e\n+ \u003cg transform=\"translate(382.736659, 312.879425)\"\u003e\n+ \u003cg id=\"g24\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(852.718192, 124.992771)\"\u003e\n+ \u003cpath d=\"M63.9833317,27.9148929 C59.2218085,22.9379001 51.2134221,17.9597442 40.3909323,17.9597442 C25.8888194,17.9597442 20.0453962,25.1013043 20.0453962,34.4074318 C20.0453962,48.4730484 29.7848226,55.1819277 50.5642821,55.1819277 C54.4602853,55.1819277 60.7364685,54.7492469 63.9833317,54.1002256 L63.9833317,27.9148929 L63.9833317,27.9148929 Z M44.2869356,113.827628 C28.9053426,113.827628 14.7975996,108.376082 3.78897657,99.301416 L10.5211864,87.6422957 C18.3131929,92.1866076 27.8374026,96.7320827 41.4728323,96.7320827 C57.0568452,96.7320827 63.9833317,88.7239978 63.9833317,75.3074024 L63.9833317,68.3821827 C60.9528485,69.0312039 54.6766653,69.4650479 50.7806621,69.4650479 C17.4476729,69.4650479 0.565379986,57.7791759 0.565379986,33.3245665 C0.565379986,11.4683685 13.9844297,0.43151772 34.3299658,0.43151772 C48.0351955,0.43151772 61.1692285,6.70771614 65.7143717,16.8780421 L69.1776149,3.02876588 L82.5978279,3.02876588 L82.5978279,75.5237428 C82.5978279,98.462806 72.6408582,113.827628 44.2869356,113.827628 L44.2869356,113.827628 Z\" id=\"path26\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g28\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(959.546624, 124.857151)\"\u003e\n+ \u003cpath d=\"M37.2266657,17.4468081 C30.0837992,17.4468081 23.8064527,18.3121698 19.0449295,20.4767371 L19.0449295,79.2306079 L19.0449295,86.0464943 C25.538656,91.457331 33.5470425,95.3526217 43.7203922,95.3526217 C62.1173451,95.3526217 69.2602116,82.3687072 69.2602116,61.3767077 C69.2602116,31.5135879 57.7885819,17.4468081 37.2266657,17.4468081 M45.2315622,113.963713 C28.208506,113.963713 19.0449295,102.384849 19.0449295,102.384849 L19.0449295,120.67143 L18.9844362,144.908535 L10.3967097,144.908535 L0.371103324,144.908535 L0.431596656,6.62629771 C9.73826309,2.73100702 22.5081728,0.567602823 36.3611458,0.567602823 C71.8579349,0.567602823 88.9566078,23.2891625 88.9566078,62.4584098 C88.9566078,93.4043948 73.1527248,113.963713 45.2315622,113.963713\" id=\"path30\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g32\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(509.576747, 125.294950)\"\u003e\n+ \u003cpath d=\"M68.636665,129.10638 C85.5189579,129.10638 96.3414476,123.480366 103.484314,117.853189 L111.669527,132.029302 C100.513161,141.811145 85.5073245,147.06845 69.5021849,147.06845 C29.0274926,147.06845 0.673569983,122.3975 0.673569983,72.6252464 C0.673569983,20.4709215 31.2622559,0.12910638 66.2553217,0.12910638 C83.7879179,0.12910638 98.7227909,4.24073748 108.462217,8.35236859 L108.063194,64.0763105 L108.063194,70.6502677 L108.063194,81.6057001 L56.1168719,81.6057001 L56.1168719,64.0763105 L89.2323178,64.0763105 L89.6313411,21.7701271 C85.3025779,19.6055598 77.7269514,17.8748364 67.554765,17.8748364 C39.4172223,17.8748364 20.5863462,35.5717154 20.5863462,72.8415868 C20.5863462,110.711628 40.0663623,129.10638 68.636665,129.10638\" id=\"path34\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g36\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(692.388992, 124.376085)\"\u003e\n+ \u003cpath d=\"M19.7766662,145.390067 L1.16216997,145.390067 L1.2226633,121.585642 L1.2226633,111.846834 L1.2226633,106.170806 L1.2226633,96.2656714 L1.2226633,39.5681976 L1.2226633,39.3518572 C1.2226633,16.4127939 11.1796331,1.04797161 39.5335557,1.04797161 C43.4504989,1.04797161 47.2836822,1.40388649 51.0051854,2.07965952 L51.0051854,18.7925385 C48.3109055,18.3796307 45.4351455,18.1446804 42.3476589,18.1446804 C26.763646,18.1446804 19.8371595,26.1516022 19.8371595,39.5681976 L19.8371595,96.2656714 L51.0051854,96.2656714 L51.0051854,111.846834 L19.8371595,111.846834 L19.7766662,145.390067 L19.7766662,145.390067 Z\" id=\"path38\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cpath d=\"M646.318899,128.021188 L664.933395,128.021188 L664.933395,236.223966 L646.318899,236.223966 L646.318899,128.021188 L646.318899,128.021188 Z\" id=\"path40\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cpath d=\"M646.318899,251.154944 L664.933395,251.154944 L664.933395,269.766036 L646.318899,269.766036 L646.318899,251.154944 L646.318899,251.154944 Z\" id=\"path42\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cg id=\"g44\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.464170, 0.676006)\"\u003e\n+ \u003cpath d=\"M429.269989,169.815599 L405.225053,243.802859 L357.571431,390.440955 C355.120288,397.984955 344.444378,397.984955 341.992071,390.440955 L294.337286,243.802859 L136.094873,243.802859 L88.4389245,390.440955 C85.9877812,397.984955 75.3118715,397.984955 72.8595648,390.440955 L25.2059427,243.802859 L1.16216997,169.815599 C-1.03187664,163.067173 1.37156997,155.674379 7.11261982,151.503429 L215.215498,0.336141836 L423.319539,151.503429 C429.060589,155.674379 431.462873,163.067173 429.269989,169.815599\" id=\"path46\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g48\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(135.410135, 1.012147)\"\u003e\n+ \u003cpath d=\"M80.269998,0 L80.269998,0 L159.391786,243.466717 L1.14820997,243.466717 L80.269998,0 L80.269998,0 Z\" id=\"path50\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g52\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\"\u003e\n+ \u003cg id=\"path54\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g56\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(24.893471, 1.012613)\"\u003e\n+ \u003cpath d=\"M190.786662,0 L111.664874,243.465554 L0.777106647,243.465554 L190.786662,0 L190.786662,0 Z\" id=\"path58\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g60\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\"\u003e\n+ \u003cg id=\"path62\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g64\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.077245, 0.223203)\"\u003e\n+ \u003cpath d=\"M25.5933327,244.255313 L25.5933327,244.255313 L1.54839663,170.268052 C-0.644486651,163.519627 1.75779662,156.126833 7.50000981,151.957046 L215.602888,0.789758846 L25.5933327,244.255313 L25.5933327,244.255313 Z\" id=\"path66\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g68\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\"\u003e\n+ \u003cg id=\"path70\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g72\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(25.670578, 244.478283)\"\u003e\n+ \u003cpath d=\"M0,0 L110.887767,0 L63.2329818,146.638096 C60.7806751,154.183259 50.1047654,154.183259 47.6536221,146.638096 L0,0 L0,0 Z\" id=\"path74\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g76\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\"\u003e\n+ \u003cpath d=\"M0,0 L79.121788,243.465554 L190.009555,243.465554 L0,0 L0,0 Z\" id=\"path78\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g80\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(214.902910, 0.223203)\"\u003e\n+ \u003cpath d=\"M190.786662,244.255313 L190.786662,244.255313 L214.831598,170.268052 C217.024481,163.519627 214.622198,156.126833 208.879985,151.957046 L0.777106647,0.789758846 L190.786662,244.255313 L190.786662,244.255313 Z\" id=\"path82\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g84\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(294.009575, 244.478283)\"\u003e\n+ \u003cpath d=\"M111.679997,0 L0.79222998,0 L48.4470155,146.638096 C50.8993221,154.183259 61.5752318,154.183259 64.0263751,146.638096 L111.679997,0 L111.679997,0 Z\" id=\"path86\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+\u003c/svg\u003e\n\\ No newline at end of file\n",
+ "merge_request_diff_id": 14,
+ "relative_order": 6,
+ "utf8_diff": "--- /dev/null\n+++ b/files/images/wm.svg\n@@ -0,0 +1,78 @@\n+\u003c?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?\u003e\n+\u003csvg width=\"1300px\" height=\"680px\" viewBox=\"0 0 1300 680\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:sketch=\"http://www.bohemiancoding.com/sketch/ns\"\u003e\n+ \u003c!-- Generator: Sketch 3.2.2 (9983) - http://www.bohemiancoding.com/sketch --\u003e\n+ \u003ctitle\u003ewm\u003c/title\u003e\n+ \u003cdesc\u003eCreated with Sketch.\u003c/desc\u003e\n+ \u003cdefs\u003e\n+ \u003cpath id=\"path-1\" d=\"M-69.8,1023.54607 L1675.19996,1023.54607 L1675.19996,0 L-69.8,0 L-69.8,1023.54607 L-69.8,1023.54607 Z\"\u003e\u003c/path\u003e\n+ \u003c/defs\u003e\n+ \u003cg id=\"Page-1\" stroke=\"none\" stroke-width=\"1\" fill=\"none\" fill-rule=\"evenodd\" sketch:type=\"MSPage\"\u003e\n+ \u003cpath d=\"M1300,680 L0,680 L0,0 L1300,0 L1300,680 L1300,680 Z\" id=\"bg\" fill=\"#30353E\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cg id=\"gitlab_logo\" sketch:type=\"MSLayerGroup\" transform=\"translate(-262.000000, -172.000000)\"\u003e\n+ \u003cg id=\"g10\" transform=\"translate(872.500000, 512.354581) scale(1, -1) translate(-872.500000, -512.354581) translate(0.000000, 0.290751)\"\u003e\n+ \u003cg id=\"g12\" transform=\"translate(1218.022652, 440.744871)\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\n+ \u003cpath d=\"M-50.0233338,141.900706 L-69.07059,141.900706 L-69.0100967,0.155858152 L8.04444805,0.155858152 L8.04444805,17.6840847 L-49.9628405,17.6840847 L-50.0233338,141.900706 L-50.0233338,141.900706 Z\" id=\"path14\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g16\"\u003e\n+ \u003cg id=\"g18-Clipped\"\u003e\n+ \u003cmask id=\"mask-2\" sketch:name=\"path22\" fill=\"white\"\u003e\n+ \u003cuse xlink:href=\"#path-1\"\u003e\u003c/use\u003e\n+ \u003c/mask\u003e\n+ \u003cg id=\"path22\"\u003e\u003c/g\u003e\n+ \u003cg id=\"g18\" mask=\"url(#mask-2)\"\u003e\n+ \u003cg transform=\"translate(382.736659, 312.879425)\"\u003e\n+ \u003cg id=\"g24\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(852.718192, 124.992771)\"\u003e\n+ \u003cpath d=\"M63.9833317,27.9148929 C59.2218085,22.9379001 51.2134221,17.9597442 40.3909323,17.9597442 C25.8888194,17.9597442 20.0453962,25.1013043 20.0453962,34.4074318 C20.0453962,48.4730484 29.7848226,55.1819277 50.5642821,55.1819277 C54.4602853,55.1819277 60.7364685,54.7492469 63.9833317,54.1002256 L63.9833317,27.9148929 L63.9833317,27.9148929 Z M44.2869356,113.827628 C28.9053426,113.827628 14.7975996,108.376082 3.78897657,99.301416 L10.5211864,87.6422957 C18.3131929,92.1866076 27.8374026,96.7320827 41.4728323,96.7320827 C57.0568452,96.7320827 63.9833317,88.7239978 63.9833317,75.3074024 L63.9833317,68.3821827 C60.9528485,69.0312039 54.6766653,69.4650479 50.7806621,69.4650479 C17.4476729,69.4650479 0.565379986,57.7791759 0.565379986,33.3245665 C0.565379986,11.4683685 13.9844297,0.43151772 34.3299658,0.43151772 C48.0351955,0.43151772 61.1692285,6.70771614 65.7143717,16.8780421 L69.1776149,3.02876588 L82.5978279,3.02876588 L82.5978279,75.5237428 C82.5978279,98.462806 72.6408582,113.827628 44.2869356,113.827628 L44.2869356,113.827628 Z\" id=\"path26\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g28\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(959.546624, 124.857151)\"\u003e\n+ \u003cpath d=\"M37.2266657,17.4468081 C30.0837992,17.4468081 23.8064527,18.3121698 19.0449295,20.4767371 L19.0449295,79.2306079 L19.0449295,86.0464943 C25.538656,91.457331 33.5470425,95.3526217 43.7203922,95.3526217 C62.1173451,95.3526217 69.2602116,82.3687072 69.2602116,61.3767077 C69.2602116,31.5135879 57.7885819,17.4468081 37.2266657,17.4468081 M45.2315622,113.963713 C28.208506,113.963713 19.0449295,102.384849 19.0449295,102.384849 L19.0449295,120.67143 L18.9844362,144.908535 L10.3967097,144.908535 L0.371103324,144.908535 L0.431596656,6.62629771 C9.73826309,2.73100702 22.5081728,0.567602823 36.3611458,0.567602823 C71.8579349,0.567602823 88.9566078,23.2891625 88.9566078,62.4584098 C88.9566078,93.4043948 73.1527248,113.963713 45.2315622,113.963713\" id=\"path30\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g32\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(509.576747, 125.294950)\"\u003e\n+ \u003cpath d=\"M68.636665,129.10638 C85.5189579,129.10638 96.3414476,123.480366 103.484314,117.853189 L111.669527,132.029302 C100.513161,141.811145 85.5073245,147.06845 69.5021849,147.06845 C29.0274926,147.06845 0.673569983,122.3975 0.673569983,72.6252464 C0.673569983,20.4709215 31.2622559,0.12910638 66.2553217,0.12910638 C83.7879179,0.12910638 98.7227909,4.24073748 108.462217,8.35236859 L108.063194,64.0763105 L108.063194,70.6502677 L108.063194,81.6057001 L56.1168719,81.6057001 L56.1168719,64.0763105 L89.2323178,64.0763105 L89.6313411,21.7701271 C85.3025779,19.6055598 77.7269514,17.8748364 67.554765,17.8748364 C39.4172223,17.8748364 20.5863462,35.5717154 20.5863462,72.8415868 C20.5863462,110.711628 40.0663623,129.10638 68.636665,129.10638\" id=\"path34\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g36\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(692.388992, 124.376085)\"\u003e\n+ \u003cpath d=\"M19.7766662,145.390067 L1.16216997,145.390067 L1.2226633,121.585642 L1.2226633,111.846834 L1.2226633,106.170806 L1.2226633,96.2656714 L1.2226633,39.5681976 L1.2226633,39.3518572 C1.2226633,16.4127939 11.1796331,1.04797161 39.5335557,1.04797161 C43.4504989,1.04797161 47.2836822,1.40388649 51.0051854,2.07965952 L51.0051854,18.7925385 C48.3109055,18.3796307 45.4351455,18.1446804 42.3476589,18.1446804 C26.763646,18.1446804 19.8371595,26.1516022 19.8371595,39.5681976 L19.8371595,96.2656714 L51.0051854,96.2656714 L51.0051854,111.846834 L19.8371595,111.846834 L19.7766662,145.390067 L19.7766662,145.390067 Z\" id=\"path38\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cpath d=\"M646.318899,128.021188 L664.933395,128.021188 L664.933395,236.223966 L646.318899,236.223966 L646.318899,128.021188 L646.318899,128.021188 Z\" id=\"path40\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cpath d=\"M646.318899,251.154944 L664.933395,251.154944 L664.933395,269.766036 L646.318899,269.766036 L646.318899,251.154944 L646.318899,251.154944 Z\" id=\"path42\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cg id=\"g44\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.464170, 0.676006)\"\u003e\n+ \u003cpath d=\"M429.269989,169.815599 L405.225053,243.802859 L357.571431,390.440955 C355.120288,397.984955 344.444378,397.984955 341.992071,390.440955 L294.337286,243.802859 L136.094873,243.802859 L88.4389245,390.440955 C85.9877812,397.984955 75.3118715,397.984955 72.8595648,390.440955 L25.2059427,243.802859 L1.16216997,169.815599 C-1.03187664,163.067173 1.37156997,155.674379 7.11261982,151.503429 L215.215498,0.336141836 L423.319539,151.503429 C429.060589,155.674379 431.462873,163.067173 429.269989,169.815599\" id=\"path46\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g48\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(135.410135, 1.012147)\"\u003e\n+ \u003cpath d=\"M80.269998,0 L80.269998,0 L159.391786,243.466717 L1.14820997,243.466717 L80.269998,0 L80.269998,0 Z\" id=\"path50\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g52\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\"\u003e\n+ \u003cg id=\"path54\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g56\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(24.893471, 1.012613)\"\u003e\n+ \u003cpath d=\"M190.786662,0 L111.664874,243.465554 L0.777106647,243.465554 L190.786662,0 L190.786662,0 Z\" id=\"path58\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g60\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\"\u003e\n+ \u003cg id=\"path62\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g64\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.077245, 0.223203)\"\u003e\n+ \u003cpath d=\"M25.5933327,244.255313 L25.5933327,244.255313 L1.54839663,170.268052 C-0.644486651,163.519627 1.75779662,156.126833 7.50000981,151.957046 L215.602888,0.789758846 L25.5933327,244.255313 L25.5933327,244.255313 Z\" id=\"path66\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g68\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\"\u003e\n+ \u003cg id=\"path70\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g72\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(25.670578, 244.478283)\"\u003e\n+ \u003cpath d=\"M0,0 L110.887767,0 L63.2329818,146.638096 C60.7806751,154.183259 50.1047654,154.183259 47.6536221,146.638096 L0,0 L0,0 Z\" id=\"path74\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g76\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\"\u003e\n+ \u003cpath d=\"M0,0 L79.121788,243.465554 L190.009555,243.465554 L0,0 L0,0 Z\" id=\"path78\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g80\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(214.902910, 0.223203)\"\u003e\n+ \u003cpath d=\"M190.786662,244.255313 L190.786662,244.255313 L214.831598,170.268052 C217.024481,163.519627 214.622198,156.126833 208.879985,151.957046 L0.777106647,0.789758846 L190.786662,244.255313 L190.786662,244.255313 Z\" id=\"path82\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g84\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(294.009575, 244.478283)\"\u003e\n+ \u003cpath d=\"M111.679997,0 L0.79222998,0 L48.4470155,146.638096 C50.8993221,154.183259 61.5752318,154.183259 64.0263751,146.638096 L111.679997,0 L111.679997,0 Z\" id=\"path86\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+\u003c/svg\u003e\n\\ No newline at end of file\n",
"new_path": "files/images/wm.svg",
"old_path": "files/images/wm.svg",
"a_mode": "0",
@@ -4044,7 +4041,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/files/lfs/lfs_object.iso\n@@ -0,0 +1,4 @@\n+version https://git-lfs.github.com/spec/v1\n+oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897\n+size 1575078\n+\n",
+ "merge_request_diff_id": 14,
+ "relative_order": 7,
+ "utf8_diff": "--- /dev/null\n+++ b/files/lfs/lfs_object.iso\n@@ -0,0 +1,4 @@\n+version https://git-lfs.github.com/spec/v1\n+oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897\n+size 1575078\n+\n",
"new_path": "files/lfs/lfs_object.iso",
"old_path": "files/lfs/lfs_object.iso",
"a_mode": "0",
@@ -4055,7 +4054,9 @@
"too_large": false
},
{
- "diff": "--- a/files/ruby/popen.rb\n+++ b/files/ruby/popen.rb\n@@ -6,12 +6,18 @@ module Popen\n \n def popen(cmd, path=nil)\n unless cmd.is_a?(Array)\n- raise \"System commands must be given as an array of strings\"\n+ raise RuntimeError, \"System commands must be given as an array of strings\"\n end\n \n path ||= Dir.pwd\n- vars = { \"PWD\" =\u003e path }\n- options = { chdir: path }\n+\n+ vars = {\n+ \"PWD\" =\u003e path\n+ }\n+\n+ options = {\n+ chdir: path\n+ }\n \n unless File.directory?(path)\n FileUtils.mkdir_p(path)\n@@ -19,6 +25,7 @@ module Popen\n \n @cmd_output = \"\"\n @cmd_status = 0\n+\n Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr|\n @cmd_output \u003c\u003c stdout.read\n @cmd_output \u003c\u003c stderr.read\n",
+ "merge_request_diff_id": 14,
+ "relative_order": 8,
+ "utf8_diff": "--- a/files/ruby/popen.rb\n+++ b/files/ruby/popen.rb\n@@ -6,12 +6,18 @@ module Popen\n \n def popen(cmd, path=nil)\n unless cmd.is_a?(Array)\n- raise \"System commands must be given as an array of strings\"\n+ raise RuntimeError, \"System commands must be given as an array of strings\"\n end\n \n path ||= Dir.pwd\n- vars = { \"PWD\" =\u003e path }\n- options = { chdir: path }\n+\n+ vars = {\n+ \"PWD\" =\u003e path\n+ }\n+\n+ options = {\n+ chdir: path\n+ }\n \n unless File.directory?(path)\n FileUtils.mkdir_p(path)\n@@ -19,6 +25,7 @@ module Popen\n \n @cmd_output = \"\"\n @cmd_status = 0\n+\n Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr|\n @cmd_output \u003c\u003c stdout.read\n @cmd_output \u003c\u003c stderr.read\n",
"new_path": "files/ruby/popen.rb",
"old_path": "files/ruby/popen.rb",
"a_mode": "100644",
@@ -4066,7 +4067,9 @@
"too_large": false
},
{
- "diff": "--- a/files/ruby/regex.rb\n+++ b/files/ruby/regex.rb\n@@ -19,14 +19,12 @@ module Gitlab\n end\n \n def archive_formats_regex\n- #|zip|tar| tar.gz | tar.bz2 |\n- /(zip|tar|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n+ /(zip|tar|7z|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n end\n \n def git_reference_regex\n # Valid git ref regex, see:\n # https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html\n-\n %r{\n (?!\n (?# doesn't begins with)\n",
+ "merge_request_diff_id": 14,
+ "relative_order": 9,
+ "utf8_diff": "--- a/files/ruby/regex.rb\n+++ b/files/ruby/regex.rb\n@@ -19,14 +19,12 @@ module Gitlab\n end\n \n def archive_formats_regex\n- #|zip|tar| tar.gz | tar.bz2 |\n- /(zip|tar|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n+ /(zip|tar|7z|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n end\n \n def git_reference_regex\n # Valid git ref regex, see:\n # https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html\n-\n %r{\n (?!\n (?# doesn't begins with)\n",
"new_path": "files/ruby/regex.rb",
"old_path": "files/ruby/regex.rb",
"a_mode": "100644",
@@ -4077,7 +4080,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/files/whitespace\n@@ -0,0 +1 @@\n+test \n",
+ "merge_request_diff_id": 14,
+ "relative_order": 10,
+ "utf8_diff": "--- /dev/null\n+++ b/files/whitespace\n@@ -0,0 +1 @@\n+test \n",
"new_path": "files/whitespace",
"old_path": "files/whitespace",
"a_mode": "0",
@@ -4088,7 +4093,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/foo/bar/.gitkeep\n",
+ "merge_request_diff_id": 14,
+ "relative_order": 11,
+ "utf8_diff": "--- /dev/null\n+++ b/foo/bar/.gitkeep\n",
"new_path": "foo/bar/.gitkeep",
"old_path": "foo/bar/.gitkeep",
"a_mode": "0",
@@ -4099,7 +4106,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/gitlab-grack\n@@ -0,0 +1 @@\n+Subproject commit 645f6c4c82fd3f5e06f67134450a570b795e55a6\n",
+ "merge_request_diff_id": 14,
+ "relative_order": 12,
+ "utf8_diff": "--- /dev/null\n+++ b/gitlab-grack\n@@ -0,0 +1 @@\n+Subproject commit 645f6c4c82fd3f5e06f67134450a570b795e55a6\n",
"new_path": "gitlab-grack",
"old_path": "gitlab-grack",
"a_mode": "0",
@@ -4110,7 +4119,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/gitlab-shell\n@@ -0,0 +1 @@\n+Subproject commit 79bceae69cb5750d6567b223597999bfa91cb3b9\n",
+ "merge_request_diff_id": 14,
+ "relative_order": 13,
+ "utf8_diff": "--- /dev/null\n+++ b/gitlab-shell\n@@ -0,0 +1 @@\n+Subproject commit 79bceae69cb5750d6567b223597999bfa91cb3b9\n",
"new_path": "gitlab-shell",
"old_path": "gitlab-shell",
"a_mode": "0",
@@ -4121,7 +4132,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/test\n",
+ "merge_request_diff_id": 14,
+ "relative_order": 14,
+ "utf8_diff": "--- /dev/null\n+++ b/test\n",
"new_path": "test",
"old_path": "test",
"a_mode": "0",
@@ -4209,6 +4222,7 @@
},
"events": [
{
+ "merge_request_diff_id": 14,
"id": 529,
"target_type": "Note",
"target_id": 793,
@@ -4392,13 +4406,12 @@
"merge_request_diff": {
"id": 13,
"state": "collected",
- "st_commits": [
+ "merge_request_diff_commits": [
{
- "id": "0bfedc29d30280c7e8564e19f654584b459e5868",
+ "merge_request_diff_id": 13,
+ "relative_order": 0,
+ "sha": "0bfedc29d30280c7e8564e19f654584b459e5868",
"message": "fixes #10\n",
- "parent_ids": [
- "be93687618e4b132087f430a4d8fc3a609c9b77c"
- ],
"authored_date": "2016-01-19T15:25:23.000+01:00",
"author_name": "James Lopez",
"author_email": "james@jameslopez.es",
@@ -4407,12 +4420,10 @@
"committer_email": "james@jameslopez.es"
},
{
- "id": "be93687618e4b132087f430a4d8fc3a609c9b77c",
+ "merge_request_diff_id": 13,
+ "relative_order": 1,
+ "sha": "be93687618e4b132087f430a4d8fc3a609c9b77c",
"message": "Merge branch 'master' into 'master'\r\n\r\nLFS object pointer.\r\n\r\n\r\n\r\nSee merge request !6",
- "parent_ids": [
- "5f923865dde3436854e9ceb9cdb7815618d4e849",
- "048721d90c449b244b7b4c53a9186b04330174ec"
- ],
"authored_date": "2015-12-07T12:52:12.000+01:00",
"author_name": "Marin Jankovski",
"author_email": "marin@gitlab.com",
@@ -4421,11 +4432,10 @@
"committer_email": "marin@gitlab.com"
},
{
- "id": "048721d90c449b244b7b4c53a9186b04330174ec",
+ "merge_request_diff_id": 13,
+ "relative_order": 2,
+ "sha": "048721d90c449b244b7b4c53a9186b04330174ec",
"message": "LFS object pointer.\n",
- "parent_ids": [
- "5f923865dde3436854e9ceb9cdb7815618d4e849"
- ],
"authored_date": "2015-12-07T11:54:28.000+01:00",
"author_name": "Marin Jankovski",
"author_email": "maxlazio@gmail.com",
@@ -4434,11 +4444,10 @@
"committer_email": "maxlazio@gmail.com"
},
{
- "id": "5f923865dde3436854e9ceb9cdb7815618d4e849",
+ "merge_request_diff_id": 13,
+ "relative_order": 3,
+ "sha": "5f923865dde3436854e9ceb9cdb7815618d4e849",
"message": "GitLab currently doesn't support patches that involve a merge commit: add a commit here\n",
- "parent_ids": [
- "d2d430676773caa88cdaf7c55944073b2fd5561a"
- ],
"authored_date": "2015-11-13T16:27:12.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -4447,12 +4456,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "d2d430676773caa88cdaf7c55944073b2fd5561a",
+ "merge_request_diff_id": 13,
+ "relative_order": 4,
+ "sha": "d2d430676773caa88cdaf7c55944073b2fd5561a",
"message": "Merge branch 'add-svg' into 'master'\r\n\r\nAdd GitLab SVG\r\n\r\nAdded to test preview of sanitized SVG images\r\n\r\nSee merge request !5",
- "parent_ids": [
- "59e29889be61e6e0e5e223bfa9ac2721d31605b8",
- "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73"
- ],
"authored_date": "2015-11-13T08:50:17.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -4461,11 +4468,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73",
+ "merge_request_diff_id": 13,
+ "relative_order": 5,
+ "sha": "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73",
"message": "Add GitLab SVG\n",
- "parent_ids": [
- "59e29889be61e6e0e5e223bfa9ac2721d31605b8"
- ],
"authored_date": "2015-11-13T08:39:43.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -4474,12 +4480,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "59e29889be61e6e0e5e223bfa9ac2721d31605b8",
+ "merge_request_diff_id": 13,
+ "relative_order": 6,
+ "sha": "59e29889be61e6e0e5e223bfa9ac2721d31605b8",
"message": "Merge branch 'whitespace' into 'master'\r\n\r\nadd whitespace test file\r\n\r\nSorry, I did a mistake.\r\nGit ignore empty files.\r\nSo I add a new whitespace test file.\r\n\r\nSee merge request !4",
- "parent_ids": [
- "19e2e9b4ef76b422ce1154af39a91323ccc57434",
- "66eceea0db202bb39c4e445e8ca28689645366c5"
- ],
"authored_date": "2015-11-13T07:21:40.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -4488,11 +4492,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "66eceea0db202bb39c4e445e8ca28689645366c5",
+ "merge_request_diff_id": 13,
+ "relative_order": 7,
+ "sha": "66eceea0db202bb39c4e445e8ca28689645366c5",
"message": "add spaces in whitespace file\n",
- "parent_ids": [
- "08f22f255f082689c0d7d39d19205085311542bc"
- ],
"authored_date": "2015-11-13T06:01:27.000+01:00",
"author_name": "윤민식",
"author_email": "minsik.yoon@samsung.com",
@@ -4501,11 +4504,10 @@
"committer_email": "minsik.yoon@samsung.com"
},
{
- "id": "08f22f255f082689c0d7d39d19205085311542bc",
+ "merge_request_diff_id": 13,
+ "relative_order": 8,
+ "sha": "08f22f255f082689c0d7d39d19205085311542bc",
"message": "remove emtpy file.(beacase git ignore empty file)\nadd whitespace test file.\n",
- "parent_ids": [
- "c642fe9b8b9f28f9225d7ea953fe14e74748d53b"
- ],
"authored_date": "2015-11-13T06:00:16.000+01:00",
"author_name": "윤민식",
"author_email": "minsik.yoon@samsung.com",
@@ -4514,12 +4516,10 @@
"committer_email": "minsik.yoon@samsung.com"
},
{
- "id": "19e2e9b4ef76b422ce1154af39a91323ccc57434",
+ "merge_request_diff_id": 13,
+ "relative_order": 9,
+ "sha": "19e2e9b4ef76b422ce1154af39a91323ccc57434",
"message": "Merge branch 'whitespace' into 'master'\r\n\r\nadd spaces\r\n\r\nTo test this pull request.(https://github.com/gitlabhq/gitlabhq/pull/9757)\r\nJust add whitespaces.\r\n\r\nSee merge request !3",
- "parent_ids": [
- "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd",
- "c642fe9b8b9f28f9225d7ea953fe14e74748d53b"
- ],
"authored_date": "2015-11-13T05:23:14.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -4528,11 +4528,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "c642fe9b8b9f28f9225d7ea953fe14e74748d53b",
+ "merge_request_diff_id": 13,
+ "relative_order": 10,
+ "sha": "c642fe9b8b9f28f9225d7ea953fe14e74748d53b",
"message": "add whitespace in empty\n",
- "parent_ids": [
- "9a944d90955aaf45f6d0c88f30e27f8d2c41cec0"
- ],
"authored_date": "2015-11-13T05:08:45.000+01:00",
"author_name": "윤민식",
"author_email": "minsik.yoon@samsung.com",
@@ -4541,11 +4540,10 @@
"committer_email": "minsik.yoon@samsung.com"
},
{
- "id": "9a944d90955aaf45f6d0c88f30e27f8d2c41cec0",
+ "merge_request_diff_id": 13,
+ "relative_order": 11,
+ "sha": "9a944d90955aaf45f6d0c88f30e27f8d2c41cec0",
"message": "add empty file\n",
- "parent_ids": [
- "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd"
- ],
"authored_date": "2015-11-13T05:08:04.000+01:00",
"author_name": "윤민식",
"author_email": "minsik.yoon@samsung.com",
@@ -4554,11 +4552,10 @@
"committer_email": "minsik.yoon@samsung.com"
},
{
- "id": "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd",
+ "merge_request_diff_id": 13,
+ "relative_order": 12,
+ "sha": "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd",
"message": "Add ISO-8859 test file\n",
- "parent_ids": [
- "e56497bb5f03a90a51293fc6d516788730953899"
- ],
"authored_date": "2015-08-25T17:53:12.000+02:00",
"author_name": "Stan Hu",
"author_email": "stanhu@packetzoom.com",
@@ -4567,12 +4564,10 @@
"committer_email": "stanhu@packetzoom.com"
},
{
- "id": "e56497bb5f03a90a51293fc6d516788730953899",
+ "merge_request_diff_id": 13,
+ "relative_order": 13,
+ "sha": "e56497bb5f03a90a51293fc6d516788730953899",
"message": "Merge branch 'tree_helper_spec' into 'master'\n\nAdd directory structure for tree_helper spec\n\nThis directory structure is needed for a testing the method flatten_tree(tree) in the TreeHelper module\n\nSee [merge request #275](https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/275#note_732774)\n\nSee merge request !2\n",
- "parent_ids": [
- "5937ac0a7beb003549fc5fd26fc247adbce4a52e",
- "4cd80ccab63c82b4bad16faa5193fbd2aa06df40"
- ],
"authored_date": "2015-01-10T22:23:29.000+01:00",
"author_name": "Sytse Sijbrandij",
"author_email": "sytse@gitlab.com",
@@ -4581,11 +4576,10 @@
"committer_email": "sytse@gitlab.com"
},
{
- "id": "4cd80ccab63c82b4bad16faa5193fbd2aa06df40",
+ "merge_request_diff_id": 13,
+ "relative_order": 14,
+ "sha": "4cd80ccab63c82b4bad16faa5193fbd2aa06df40",
"message": "add directory structure for tree_helper spec\n",
- "parent_ids": [
- "5937ac0a7beb003549fc5fd26fc247adbce4a52e"
- ],
"authored_date": "2015-01-10T21:28:18.000+01:00",
"author_name": "marmis85",
"author_email": "marmis85@gmail.com",
@@ -4594,9 +4588,11 @@
"committer_email": "marmis85@gmail.com"
}
],
- "utf8_st_diffs": [
+ "merge_request_diff_files": [
{
- "diff": "--- a/CHANGELOG\n+++ b/CHANGELOG\n@@ -1,4 +1,6 @@\n-v 6.7.0\n+v6.8.0\n+\n+v6.7.0\n - Add support for Gemnasium as a Project Service (Olivier Gonzalez)\n - Add edit file button to MergeRequest diff\n - Public groups (Jason Hollingsworth)\n",
+ "merge_request_diff_id": 13,
+ "relative_order": 0,
+ "utf8_diff": "--- a/CHANGELOG\n+++ b/CHANGELOG\n@@ -1,4 +1,6 @@\n-v 6.7.0\n+v6.8.0\n+\n+v6.7.0\n - Add support for Gemnasium as a Project Service (Olivier Gonzalez)\n - Add edit file button to MergeRequest diff\n - Public groups (Jason Hollingsworth)\n",
"new_path": "CHANGELOG",
"old_path": "CHANGELOG",
"a_mode": "100644",
@@ -4607,7 +4603,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/encoding/iso8859.txt\n@@ -0,0 +1 @@\n+Äü\n",
+ "merge_request_diff_id": 13,
+ "relative_order": 1,
+ "utf8_diff": "--- /dev/null\n+++ b/encoding/iso8859.txt\n@@ -0,0 +1 @@\n+Äü\n",
"new_path": "encoding/iso8859.txt",
"old_path": "encoding/iso8859.txt",
"a_mode": "0",
@@ -4618,7 +4616,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/files/images/wm.svg\n@@ -0,0 +1,78 @@\n+\u003c?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?\u003e\n+\u003csvg width=\"1300px\" height=\"680px\" viewBox=\"0 0 1300 680\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:sketch=\"http://www.bohemiancoding.com/sketch/ns\"\u003e\n+ \u003c!-- Generator: Sketch 3.2.2 (9983) - http://www.bohemiancoding.com/sketch --\u003e\n+ \u003ctitle\u003ewm\u003c/title\u003e\n+ \u003cdesc\u003eCreated with Sketch.\u003c/desc\u003e\n+ \u003cdefs\u003e\n+ \u003cpath id=\"path-1\" d=\"M-69.8,1023.54607 L1675.19996,1023.54607 L1675.19996,0 L-69.8,0 L-69.8,1023.54607 L-69.8,1023.54607 Z\"\u003e\u003c/path\u003e\n+ \u003c/defs\u003e\n+ \u003cg id=\"Page-1\" stroke=\"none\" stroke-width=\"1\" fill=\"none\" fill-rule=\"evenodd\" sketch:type=\"MSPage\"\u003e\n+ \u003cpath d=\"M1300,680 L0,680 L0,0 L1300,0 L1300,680 L1300,680 Z\" id=\"bg\" fill=\"#30353E\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cg id=\"gitlab_logo\" sketch:type=\"MSLayerGroup\" transform=\"translate(-262.000000, -172.000000)\"\u003e\n+ \u003cg id=\"g10\" transform=\"translate(872.500000, 512.354581) scale(1, -1) translate(-872.500000, -512.354581) translate(0.000000, 0.290751)\"\u003e\n+ \u003cg id=\"g12\" transform=\"translate(1218.022652, 440.744871)\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\n+ \u003cpath d=\"M-50.0233338,141.900706 L-69.07059,141.900706 L-69.0100967,0.155858152 L8.04444805,0.155858152 L8.04444805,17.6840847 L-49.9628405,17.6840847 L-50.0233338,141.900706 L-50.0233338,141.900706 Z\" id=\"path14\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g16\"\u003e\n+ \u003cg id=\"g18-Clipped\"\u003e\n+ \u003cmask id=\"mask-2\" sketch:name=\"path22\" fill=\"white\"\u003e\n+ \u003cuse xlink:href=\"#path-1\"\u003e\u003c/use\u003e\n+ \u003c/mask\u003e\n+ \u003cg id=\"path22\"\u003e\u003c/g\u003e\n+ \u003cg id=\"g18\" mask=\"url(#mask-2)\"\u003e\n+ \u003cg transform=\"translate(382.736659, 312.879425)\"\u003e\n+ \u003cg id=\"g24\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(852.718192, 124.992771)\"\u003e\n+ \u003cpath d=\"M63.9833317,27.9148929 C59.2218085,22.9379001 51.2134221,17.9597442 40.3909323,17.9597442 C25.8888194,17.9597442 20.0453962,25.1013043 20.0453962,34.4074318 C20.0453962,48.4730484 29.7848226,55.1819277 50.5642821,55.1819277 C54.4602853,55.1819277 60.7364685,54.7492469 63.9833317,54.1002256 L63.9833317,27.9148929 L63.9833317,27.9148929 Z M44.2869356,113.827628 C28.9053426,113.827628 14.7975996,108.376082 3.78897657,99.301416 L10.5211864,87.6422957 C18.3131929,92.1866076 27.8374026,96.7320827 41.4728323,96.7320827 C57.0568452,96.7320827 63.9833317,88.7239978 63.9833317,75.3074024 L63.9833317,68.3821827 C60.9528485,69.0312039 54.6766653,69.4650479 50.7806621,69.4650479 C17.4476729,69.4650479 0.565379986,57.7791759 0.565379986,33.3245665 C0.565379986,11.4683685 13.9844297,0.43151772 34.3299658,0.43151772 C48.0351955,0.43151772 61.1692285,6.70771614 65.7143717,16.8780421 L69.1776149,3.02876588 L82.5978279,3.02876588 L82.5978279,75.5237428 C82.5978279,98.462806 72.6408582,113.827628 44.2869356,113.827628 L44.2869356,113.827628 Z\" id=\"path26\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g28\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(959.546624, 124.857151)\"\u003e\n+ \u003cpath d=\"M37.2266657,17.4468081 C30.0837992,17.4468081 23.8064527,18.3121698 19.0449295,20.4767371 L19.0449295,79.2306079 L19.0449295,86.0464943 C25.538656,91.457331 33.5470425,95.3526217 43.7203922,95.3526217 C62.1173451,95.3526217 69.2602116,82.3687072 69.2602116,61.3767077 C69.2602116,31.5135879 57.7885819,17.4468081 37.2266657,17.4468081 M45.2315622,113.963713 C28.208506,113.963713 19.0449295,102.384849 19.0449295,102.384849 L19.0449295,120.67143 L18.9844362,144.908535 L10.3967097,144.908535 L0.371103324,144.908535 L0.431596656,6.62629771 C9.73826309,2.73100702 22.5081728,0.567602823 36.3611458,0.567602823 C71.8579349,0.567602823 88.9566078,23.2891625 88.9566078,62.4584098 C88.9566078,93.4043948 73.1527248,113.963713 45.2315622,113.963713\" id=\"path30\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g32\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(509.576747, 125.294950)\"\u003e\n+ \u003cpath d=\"M68.636665,129.10638 C85.5189579,129.10638 96.3414476,123.480366 103.484314,117.853189 L111.669527,132.029302 C100.513161,141.811145 85.5073245,147.06845 69.5021849,147.06845 C29.0274926,147.06845 0.673569983,122.3975 0.673569983,72.6252464 C0.673569983,20.4709215 31.2622559,0.12910638 66.2553217,0.12910638 C83.7879179,0.12910638 98.7227909,4.24073748 108.462217,8.35236859 L108.063194,64.0763105 L108.063194,70.6502677 L108.063194,81.6057001 L56.1168719,81.6057001 L56.1168719,64.0763105 L89.2323178,64.0763105 L89.6313411,21.7701271 C85.3025779,19.6055598 77.7269514,17.8748364 67.554765,17.8748364 C39.4172223,17.8748364 20.5863462,35.5717154 20.5863462,72.8415868 C20.5863462,110.711628 40.0663623,129.10638 68.636665,129.10638\" id=\"path34\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g36\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(692.388992, 124.376085)\"\u003e\n+ \u003cpath d=\"M19.7766662,145.390067 L1.16216997,145.390067 L1.2226633,121.585642 L1.2226633,111.846834 L1.2226633,106.170806 L1.2226633,96.2656714 L1.2226633,39.5681976 L1.2226633,39.3518572 C1.2226633,16.4127939 11.1796331,1.04797161 39.5335557,1.04797161 C43.4504989,1.04797161 47.2836822,1.40388649 51.0051854,2.07965952 L51.0051854,18.7925385 C48.3109055,18.3796307 45.4351455,18.1446804 42.3476589,18.1446804 C26.763646,18.1446804 19.8371595,26.1516022 19.8371595,39.5681976 L19.8371595,96.2656714 L51.0051854,96.2656714 L51.0051854,111.846834 L19.8371595,111.846834 L19.7766662,145.390067 L19.7766662,145.390067 Z\" id=\"path38\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cpath d=\"M646.318899,128.021188 L664.933395,128.021188 L664.933395,236.223966 L646.318899,236.223966 L646.318899,128.021188 L646.318899,128.021188 Z\" id=\"path40\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cpath d=\"M646.318899,251.154944 L664.933395,251.154944 L664.933395,269.766036 L646.318899,269.766036 L646.318899,251.154944 L646.318899,251.154944 Z\" id=\"path42\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cg id=\"g44\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.464170, 0.676006)\"\u003e\n+ \u003cpath d=\"M429.269989,169.815599 L405.225053,243.802859 L357.571431,390.440955 C355.120288,397.984955 344.444378,397.984955 341.992071,390.440955 L294.337286,243.802859 L136.094873,243.802859 L88.4389245,390.440955 C85.9877812,397.984955 75.3118715,397.984955 72.8595648,390.440955 L25.2059427,243.802859 L1.16216997,169.815599 C-1.03187664,163.067173 1.37156997,155.674379 7.11261982,151.503429 L215.215498,0.336141836 L423.319539,151.503429 C429.060589,155.674379 431.462873,163.067173 429.269989,169.815599\" id=\"path46\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g48\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(135.410135, 1.012147)\"\u003e\n+ \u003cpath d=\"M80.269998,0 L80.269998,0 L159.391786,243.466717 L1.14820997,243.466717 L80.269998,0 L80.269998,0 Z\" id=\"path50\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g52\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\"\u003e\n+ \u003cg id=\"path54\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g56\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(24.893471, 1.012613)\"\u003e\n+ \u003cpath d=\"M190.786662,0 L111.664874,243.465554 L0.777106647,243.465554 L190.786662,0 L190.786662,0 Z\" id=\"path58\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g60\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\"\u003e\n+ \u003cg id=\"path62\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g64\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.077245, 0.223203)\"\u003e\n+ \u003cpath d=\"M25.5933327,244.255313 L25.5933327,244.255313 L1.54839663,170.268052 C-0.644486651,163.519627 1.75779662,156.126833 7.50000981,151.957046 L215.602888,0.789758846 L25.5933327,244.255313 L25.5933327,244.255313 Z\" id=\"path66\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g68\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\"\u003e\n+ \u003cg id=\"path70\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g72\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(25.670578, 244.478283)\"\u003e\n+ \u003cpath d=\"M0,0 L110.887767,0 L63.2329818,146.638096 C60.7806751,154.183259 50.1047654,154.183259 47.6536221,146.638096 L0,0 L0,0 Z\" id=\"path74\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g76\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\"\u003e\n+ \u003cpath d=\"M0,0 L79.121788,243.465554 L190.009555,243.465554 L0,0 L0,0 Z\" id=\"path78\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g80\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(214.902910, 0.223203)\"\u003e\n+ \u003cpath d=\"M190.786662,244.255313 L190.786662,244.255313 L214.831598,170.268052 C217.024481,163.519627 214.622198,156.126833 208.879985,151.957046 L0.777106647,0.789758846 L190.786662,244.255313 L190.786662,244.255313 Z\" id=\"path82\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g84\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(294.009575, 244.478283)\"\u003e\n+ \u003cpath d=\"M111.679997,0 L0.79222998,0 L48.4470155,146.638096 C50.8993221,154.183259 61.5752318,154.183259 64.0263751,146.638096 L111.679997,0 L111.679997,0 Z\" id=\"path86\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+\u003c/svg\u003e\n\\ No newline at end of file\n",
+ "merge_request_diff_id": 13,
+ "relative_order": 2,
+ "utf8_diff": "--- /dev/null\n+++ b/files/images/wm.svg\n@@ -0,0 +1,78 @@\n+\u003c?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?\u003e\n+\u003csvg width=\"1300px\" height=\"680px\" viewBox=\"0 0 1300 680\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:sketch=\"http://www.bohemiancoding.com/sketch/ns\"\u003e\n+ \u003c!-- Generator: Sketch 3.2.2 (9983) - http://www.bohemiancoding.com/sketch --\u003e\n+ \u003ctitle\u003ewm\u003c/title\u003e\n+ \u003cdesc\u003eCreated with Sketch.\u003c/desc\u003e\n+ \u003cdefs\u003e\n+ \u003cpath id=\"path-1\" d=\"M-69.8,1023.54607 L1675.19996,1023.54607 L1675.19996,0 L-69.8,0 L-69.8,1023.54607 L-69.8,1023.54607 Z\"\u003e\u003c/path\u003e\n+ \u003c/defs\u003e\n+ \u003cg id=\"Page-1\" stroke=\"none\" stroke-width=\"1\" fill=\"none\" fill-rule=\"evenodd\" sketch:type=\"MSPage\"\u003e\n+ \u003cpath d=\"M1300,680 L0,680 L0,0 L1300,0 L1300,680 L1300,680 Z\" id=\"bg\" fill=\"#30353E\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cg id=\"gitlab_logo\" sketch:type=\"MSLayerGroup\" transform=\"translate(-262.000000, -172.000000)\"\u003e\n+ \u003cg id=\"g10\" transform=\"translate(872.500000, 512.354581) scale(1, -1) translate(-872.500000, -512.354581) translate(0.000000, 0.290751)\"\u003e\n+ \u003cg id=\"g12\" transform=\"translate(1218.022652, 440.744871)\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\n+ \u003cpath d=\"M-50.0233338,141.900706 L-69.07059,141.900706 L-69.0100967,0.155858152 L8.04444805,0.155858152 L8.04444805,17.6840847 L-49.9628405,17.6840847 L-50.0233338,141.900706 L-50.0233338,141.900706 Z\" id=\"path14\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g16\"\u003e\n+ \u003cg id=\"g18-Clipped\"\u003e\n+ \u003cmask id=\"mask-2\" sketch:name=\"path22\" fill=\"white\"\u003e\n+ \u003cuse xlink:href=\"#path-1\"\u003e\u003c/use\u003e\n+ \u003c/mask\u003e\n+ \u003cg id=\"path22\"\u003e\u003c/g\u003e\n+ \u003cg id=\"g18\" mask=\"url(#mask-2)\"\u003e\n+ \u003cg transform=\"translate(382.736659, 312.879425)\"\u003e\n+ \u003cg id=\"g24\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(852.718192, 124.992771)\"\u003e\n+ \u003cpath d=\"M63.9833317,27.9148929 C59.2218085,22.9379001 51.2134221,17.9597442 40.3909323,17.9597442 C25.8888194,17.9597442 20.0453962,25.1013043 20.0453962,34.4074318 C20.0453962,48.4730484 29.7848226,55.1819277 50.5642821,55.1819277 C54.4602853,55.1819277 60.7364685,54.7492469 63.9833317,54.1002256 L63.9833317,27.9148929 L63.9833317,27.9148929 Z M44.2869356,113.827628 C28.9053426,113.827628 14.7975996,108.376082 3.78897657,99.301416 L10.5211864,87.6422957 C18.3131929,92.1866076 27.8374026,96.7320827 41.4728323,96.7320827 C57.0568452,96.7320827 63.9833317,88.7239978 63.9833317,75.3074024 L63.9833317,68.3821827 C60.9528485,69.0312039 54.6766653,69.4650479 50.7806621,69.4650479 C17.4476729,69.4650479 0.565379986,57.7791759 0.565379986,33.3245665 C0.565379986,11.4683685 13.9844297,0.43151772 34.3299658,0.43151772 C48.0351955,0.43151772 61.1692285,6.70771614 65.7143717,16.8780421 L69.1776149,3.02876588 L82.5978279,3.02876588 L82.5978279,75.5237428 C82.5978279,98.462806 72.6408582,113.827628 44.2869356,113.827628 L44.2869356,113.827628 Z\" id=\"path26\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g28\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(959.546624, 124.857151)\"\u003e\n+ \u003cpath d=\"M37.2266657,17.4468081 C30.0837992,17.4468081 23.8064527,18.3121698 19.0449295,20.4767371 L19.0449295,79.2306079 L19.0449295,86.0464943 C25.538656,91.457331 33.5470425,95.3526217 43.7203922,95.3526217 C62.1173451,95.3526217 69.2602116,82.3687072 69.2602116,61.3767077 C69.2602116,31.5135879 57.7885819,17.4468081 37.2266657,17.4468081 M45.2315622,113.963713 C28.208506,113.963713 19.0449295,102.384849 19.0449295,102.384849 L19.0449295,120.67143 L18.9844362,144.908535 L10.3967097,144.908535 L0.371103324,144.908535 L0.431596656,6.62629771 C9.73826309,2.73100702 22.5081728,0.567602823 36.3611458,0.567602823 C71.8579349,0.567602823 88.9566078,23.2891625 88.9566078,62.4584098 C88.9566078,93.4043948 73.1527248,113.963713 45.2315622,113.963713\" id=\"path30\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g32\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(509.576747, 125.294950)\"\u003e\n+ \u003cpath d=\"M68.636665,129.10638 C85.5189579,129.10638 96.3414476,123.480366 103.484314,117.853189 L111.669527,132.029302 C100.513161,141.811145 85.5073245,147.06845 69.5021849,147.06845 C29.0274926,147.06845 0.673569983,122.3975 0.673569983,72.6252464 C0.673569983,20.4709215 31.2622559,0.12910638 66.2553217,0.12910638 C83.7879179,0.12910638 98.7227909,4.24073748 108.462217,8.35236859 L108.063194,64.0763105 L108.063194,70.6502677 L108.063194,81.6057001 L56.1168719,81.6057001 L56.1168719,64.0763105 L89.2323178,64.0763105 L89.6313411,21.7701271 C85.3025779,19.6055598 77.7269514,17.8748364 67.554765,17.8748364 C39.4172223,17.8748364 20.5863462,35.5717154 20.5863462,72.8415868 C20.5863462,110.711628 40.0663623,129.10638 68.636665,129.10638\" id=\"path34\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g36\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(692.388992, 124.376085)\"\u003e\n+ \u003cpath d=\"M19.7766662,145.390067 L1.16216997,145.390067 L1.2226633,121.585642 L1.2226633,111.846834 L1.2226633,106.170806 L1.2226633,96.2656714 L1.2226633,39.5681976 L1.2226633,39.3518572 C1.2226633,16.4127939 11.1796331,1.04797161 39.5335557,1.04797161 C43.4504989,1.04797161 47.2836822,1.40388649 51.0051854,2.07965952 L51.0051854,18.7925385 C48.3109055,18.3796307 45.4351455,18.1446804 42.3476589,18.1446804 C26.763646,18.1446804 19.8371595,26.1516022 19.8371595,39.5681976 L19.8371595,96.2656714 L51.0051854,96.2656714 L51.0051854,111.846834 L19.8371595,111.846834 L19.7766662,145.390067 L19.7766662,145.390067 Z\" id=\"path38\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cpath d=\"M646.318899,128.021188 L664.933395,128.021188 L664.933395,236.223966 L646.318899,236.223966 L646.318899,128.021188 L646.318899,128.021188 Z\" id=\"path40\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cpath d=\"M646.318899,251.154944 L664.933395,251.154944 L664.933395,269.766036 L646.318899,269.766036 L646.318899,251.154944 L646.318899,251.154944 Z\" id=\"path42\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cg id=\"g44\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.464170, 0.676006)\"\u003e\n+ \u003cpath d=\"M429.269989,169.815599 L405.225053,243.802859 L357.571431,390.440955 C355.120288,397.984955 344.444378,397.984955 341.992071,390.440955 L294.337286,243.802859 L136.094873,243.802859 L88.4389245,390.440955 C85.9877812,397.984955 75.3118715,397.984955 72.8595648,390.440955 L25.2059427,243.802859 L1.16216997,169.815599 C-1.03187664,163.067173 1.37156997,155.674379 7.11261982,151.503429 L215.215498,0.336141836 L423.319539,151.503429 C429.060589,155.674379 431.462873,163.067173 429.269989,169.815599\" id=\"path46\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g48\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(135.410135, 1.012147)\"\u003e\n+ \u003cpath d=\"M80.269998,0 L80.269998,0 L159.391786,243.466717 L1.14820997,243.466717 L80.269998,0 L80.269998,0 Z\" id=\"path50\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g52\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\"\u003e\n+ \u003cg id=\"path54\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g56\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(24.893471, 1.012613)\"\u003e\n+ \u003cpath d=\"M190.786662,0 L111.664874,243.465554 L0.777106647,243.465554 L190.786662,0 L190.786662,0 Z\" id=\"path58\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g60\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\"\u003e\n+ \u003cg id=\"path62\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g64\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.077245, 0.223203)\"\u003e\n+ \u003cpath d=\"M25.5933327,244.255313 L25.5933327,244.255313 L1.54839663,170.268052 C-0.644486651,163.519627 1.75779662,156.126833 7.50000981,151.957046 L215.602888,0.789758846 L25.5933327,244.255313 L25.5933327,244.255313 Z\" id=\"path66\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g68\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\"\u003e\n+ \u003cg id=\"path70\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g72\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(25.670578, 244.478283)\"\u003e\n+ \u003cpath d=\"M0,0 L110.887767,0 L63.2329818,146.638096 C60.7806751,154.183259 50.1047654,154.183259 47.6536221,146.638096 L0,0 L0,0 Z\" id=\"path74\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g76\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\"\u003e\n+ \u003cpath d=\"M0,0 L79.121788,243.465554 L190.009555,243.465554 L0,0 L0,0 Z\" id=\"path78\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g80\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(214.902910, 0.223203)\"\u003e\n+ \u003cpath d=\"M190.786662,244.255313 L190.786662,244.255313 L214.831598,170.268052 C217.024481,163.519627 214.622198,156.126833 208.879985,151.957046 L0.777106647,0.789758846 L190.786662,244.255313 L190.786662,244.255313 Z\" id=\"path82\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g84\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(294.009575, 244.478283)\"\u003e\n+ \u003cpath d=\"M111.679997,0 L0.79222998,0 L48.4470155,146.638096 C50.8993221,154.183259 61.5752318,154.183259 64.0263751,146.638096 L111.679997,0 L111.679997,0 Z\" id=\"path86\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+\u003c/svg\u003e\n\\ No newline at end of file\n",
"new_path": "files/images/wm.svg",
"old_path": "files/images/wm.svg",
"a_mode": "0",
@@ -4629,7 +4629,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/files/lfs/lfs_object.iso\n@@ -0,0 +1,4 @@\n+version https://git-lfs.github.com/spec/v1\n+oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897\n+size 1575078\n+\n",
+ "merge_request_diff_id": 13,
+ "relative_order": 3,
+ "utf8_diff": "--- /dev/null\n+++ b/files/lfs/lfs_object.iso\n@@ -0,0 +1,4 @@\n+version https://git-lfs.github.com/spec/v1\n+oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897\n+size 1575078\n+\n",
"new_path": "files/lfs/lfs_object.iso",
"old_path": "files/lfs/lfs_object.iso",
"a_mode": "0",
@@ -4640,7 +4642,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/files/whitespace\n@@ -0,0 +1 @@\n+test \n",
+ "merge_request_diff_id": 13,
+ "relative_order": 4,
+ "utf8_diff": "--- /dev/null\n+++ b/files/whitespace\n@@ -0,0 +1 @@\n+test \n",
"new_path": "files/whitespace",
"old_path": "files/whitespace",
"a_mode": "0",
@@ -4651,7 +4655,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/foo/bar/.gitkeep\n",
+ "merge_request_diff_id": 13,
+ "relative_order": 5,
+ "utf8_diff": "--- /dev/null\n+++ b/foo/bar/.gitkeep\n",
"new_path": "foo/bar/.gitkeep",
"old_path": "foo/bar/.gitkeep",
"a_mode": "0",
@@ -4662,7 +4668,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/test\n",
+ "merge_request_diff_id": 13,
+ "relative_order": 6,
+ "utf8_diff": "--- /dev/null\n+++ b/test\n",
"new_path": "test",
"old_path": "test",
"a_mode": "0",
@@ -4924,13 +4932,12 @@
"merge_request_diff": {
"id": 12,
"state": "collected",
- "st_commits": [
+ "merge_request_diff_commits": [
{
- "id": "97a0df9696e2aebf10c31b3016f40214e0e8f243",
+ "merge_request_diff_id": 12,
+ "relative_order": 0,
+ "sha": "97a0df9696e2aebf10c31b3016f40214e0e8f243",
"message": "fixes #10\n",
- "parent_ids": [
- "be93687618e4b132087f430a4d8fc3a609c9b77c"
- ],
"authored_date": "2016-01-19T14:08:21.000+01:00",
"author_name": "James Lopez",
"author_email": "james@jameslopez.es",
@@ -4939,12 +4946,10 @@
"committer_email": "james@jameslopez.es"
},
{
- "id": "be93687618e4b132087f430a4d8fc3a609c9b77c",
+ "merge_request_diff_id": 12,
+ "relative_order": 1,
+ "sha": "be93687618e4b132087f430a4d8fc3a609c9b77c",
"message": "Merge branch 'master' into 'master'\r\n\r\nLFS object pointer.\r\n\r\n\r\n\r\nSee merge request !6",
- "parent_ids": [
- "5f923865dde3436854e9ceb9cdb7815618d4e849",
- "048721d90c449b244b7b4c53a9186b04330174ec"
- ],
"authored_date": "2015-12-07T12:52:12.000+01:00",
"author_name": "Marin Jankovski",
"author_email": "marin@gitlab.com",
@@ -4953,11 +4958,10 @@
"committer_email": "marin@gitlab.com"
},
{
- "id": "048721d90c449b244b7b4c53a9186b04330174ec",
+ "merge_request_diff_id": 12,
+ "relative_order": 2,
+ "sha": "048721d90c449b244b7b4c53a9186b04330174ec",
"message": "LFS object pointer.\n",
- "parent_ids": [
- "5f923865dde3436854e9ceb9cdb7815618d4e849"
- ],
"authored_date": "2015-12-07T11:54:28.000+01:00",
"author_name": "Marin Jankovski",
"author_email": "maxlazio@gmail.com",
@@ -4966,11 +4970,10 @@
"committer_email": "maxlazio@gmail.com"
},
{
- "id": "5f923865dde3436854e9ceb9cdb7815618d4e849",
+ "merge_request_diff_id": 12,
+ "relative_order": 3,
+ "sha": "5f923865dde3436854e9ceb9cdb7815618d4e849",
"message": "GitLab currently doesn't support patches that involve a merge commit: add a commit here\n",
- "parent_ids": [
- "d2d430676773caa88cdaf7c55944073b2fd5561a"
- ],
"authored_date": "2015-11-13T16:27:12.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -4979,12 +4982,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "d2d430676773caa88cdaf7c55944073b2fd5561a",
+ "merge_request_diff_id": 12,
+ "relative_order": 4,
+ "sha": "d2d430676773caa88cdaf7c55944073b2fd5561a",
"message": "Merge branch 'add-svg' into 'master'\r\n\r\nAdd GitLab SVG\r\n\r\nAdded to test preview of sanitized SVG images\r\n\r\nSee merge request !5",
- "parent_ids": [
- "59e29889be61e6e0e5e223bfa9ac2721d31605b8",
- "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73"
- ],
"authored_date": "2015-11-13T08:50:17.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -4993,11 +4994,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73",
+ "merge_request_diff_id": 12,
+ "relative_order": 5,
+ "sha": "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73",
"message": "Add GitLab SVG\n",
- "parent_ids": [
- "59e29889be61e6e0e5e223bfa9ac2721d31605b8"
- ],
"authored_date": "2015-11-13T08:39:43.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -5006,12 +5006,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "59e29889be61e6e0e5e223bfa9ac2721d31605b8",
+ "merge_request_diff_id": 12,
+ "relative_order": 6,
+ "sha": "59e29889be61e6e0e5e223bfa9ac2721d31605b8",
"message": "Merge branch 'whitespace' into 'master'\r\n\r\nadd whitespace test file\r\n\r\nSorry, I did a mistake.\r\nGit ignore empty files.\r\nSo I add a new whitespace test file.\r\n\r\nSee merge request !4",
- "parent_ids": [
- "19e2e9b4ef76b422ce1154af39a91323ccc57434",
- "66eceea0db202bb39c4e445e8ca28689645366c5"
- ],
"authored_date": "2015-11-13T07:21:40.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -5020,11 +5018,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "66eceea0db202bb39c4e445e8ca28689645366c5",
+ "merge_request_diff_id": 12,
+ "relative_order": 7,
+ "sha": "66eceea0db202bb39c4e445e8ca28689645366c5",
"message": "add spaces in whitespace file\n",
- "parent_ids": [
- "08f22f255f082689c0d7d39d19205085311542bc"
- ],
"authored_date": "2015-11-13T06:01:27.000+01:00",
"author_name": "윤민식",
"author_email": "minsik.yoon@samsung.com",
@@ -5033,11 +5030,10 @@
"committer_email": "minsik.yoon@samsung.com"
},
{
- "id": "08f22f255f082689c0d7d39d19205085311542bc",
+ "merge_request_diff_id": 12,
+ "relative_order": 8,
+ "sha": "08f22f255f082689c0d7d39d19205085311542bc",
"message": "remove emtpy file.(beacase git ignore empty file)\nadd whitespace test file.\n",
- "parent_ids": [
- "c642fe9b8b9f28f9225d7ea953fe14e74748d53b"
- ],
"authored_date": "2015-11-13T06:00:16.000+01:00",
"author_name": "윤민식",
"author_email": "minsik.yoon@samsung.com",
@@ -5046,12 +5042,10 @@
"committer_email": "minsik.yoon@samsung.com"
},
{
- "id": "19e2e9b4ef76b422ce1154af39a91323ccc57434",
+ "merge_request_diff_id": 12,
+ "relative_order": 9,
+ "sha": "19e2e9b4ef76b422ce1154af39a91323ccc57434",
"message": "Merge branch 'whitespace' into 'master'\r\n\r\nadd spaces\r\n\r\nTo test this pull request.(https://github.com/gitlabhq/gitlabhq/pull/9757)\r\nJust add whitespaces.\r\n\r\nSee merge request !3",
- "parent_ids": [
- "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd",
- "c642fe9b8b9f28f9225d7ea953fe14e74748d53b"
- ],
"authored_date": "2015-11-13T05:23:14.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -5060,11 +5054,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "c642fe9b8b9f28f9225d7ea953fe14e74748d53b",
+ "merge_request_diff_id": 12,
+ "relative_order": 10,
+ "sha": "c642fe9b8b9f28f9225d7ea953fe14e74748d53b",
"message": "add whitespace in empty\n",
- "parent_ids": [
- "9a944d90955aaf45f6d0c88f30e27f8d2c41cec0"
- ],
"authored_date": "2015-11-13T05:08:45.000+01:00",
"author_name": "윤민식",
"author_email": "minsik.yoon@samsung.com",
@@ -5073,11 +5066,10 @@
"committer_email": "minsik.yoon@samsung.com"
},
{
- "id": "9a944d90955aaf45f6d0c88f30e27f8d2c41cec0",
+ "merge_request_diff_id": 12,
+ "relative_order": 11,
+ "sha": "9a944d90955aaf45f6d0c88f30e27f8d2c41cec0",
"message": "add empty file\n",
- "parent_ids": [
- "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd"
- ],
"authored_date": "2015-11-13T05:08:04.000+01:00",
"author_name": "윤민식",
"author_email": "minsik.yoon@samsung.com",
@@ -5086,11 +5078,10 @@
"committer_email": "minsik.yoon@samsung.com"
},
{
- "id": "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd",
+ "merge_request_diff_id": 12,
+ "relative_order": 12,
+ "sha": "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd",
"message": "Add ISO-8859 test file\n",
- "parent_ids": [
- "e56497bb5f03a90a51293fc6d516788730953899"
- ],
"authored_date": "2015-08-25T17:53:12.000+02:00",
"author_name": "Stan Hu",
"author_email": "stanhu@packetzoom.com",
@@ -5099,9 +5090,11 @@
"committer_email": "stanhu@packetzoom.com"
}
],
- "utf8_st_diffs": [
+ "merge_request_diff_files": [
{
- "diff": "--- a/CHANGELOG\n+++ b/CHANGELOG\n@@ -1,4 +1,6 @@\n-v 6.7.0\n+v6.8.0\n+\n+v6.7.0\n - Add support for Gemnasium as a Project Service (Olivier Gonzalez)\n - Add edit file button to MergeRequest diff\n - Public groups (Jason Hollingsworth)\n",
+ "merge_request_diff_id": 12,
+ "relative_order": 0,
+ "utf8_diff": "--- a/CHANGELOG\n+++ b/CHANGELOG\n@@ -1,4 +1,6 @@\n-v 6.7.0\n+v6.8.0\n+\n+v6.7.0\n - Add support for Gemnasium as a Project Service (Olivier Gonzalez)\n - Add edit file button to MergeRequest diff\n - Public groups (Jason Hollingsworth)\n",
"new_path": "CHANGELOG",
"old_path": "CHANGELOG",
"a_mode": "100644",
@@ -5112,7 +5105,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/encoding/iso8859.txt\n@@ -0,0 +1 @@\n+Äü\n",
+ "merge_request_diff_id": 12,
+ "relative_order": 1,
+ "utf8_diff": "--- /dev/null\n+++ b/encoding/iso8859.txt\n@@ -0,0 +1 @@\n+Äü\n",
"new_path": "encoding/iso8859.txt",
"old_path": "encoding/iso8859.txt",
"a_mode": "0",
@@ -5123,7 +5118,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/files/images/wm.svg\n@@ -0,0 +1,78 @@\n+\u003c?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?\u003e\n+\u003csvg width=\"1300px\" height=\"680px\" viewBox=\"0 0 1300 680\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:sketch=\"http://www.bohemiancoding.com/sketch/ns\"\u003e\n+ \u003c!-- Generator: Sketch 3.2.2 (9983) - http://www.bohemiancoding.com/sketch --\u003e\n+ \u003ctitle\u003ewm\u003c/title\u003e\n+ \u003cdesc\u003eCreated with Sketch.\u003c/desc\u003e\n+ \u003cdefs\u003e\n+ \u003cpath id=\"path-1\" d=\"M-69.8,1023.54607 L1675.19996,1023.54607 L1675.19996,0 L-69.8,0 L-69.8,1023.54607 L-69.8,1023.54607 Z\"\u003e\u003c/path\u003e\n+ \u003c/defs\u003e\n+ \u003cg id=\"Page-1\" stroke=\"none\" stroke-width=\"1\" fill=\"none\" fill-rule=\"evenodd\" sketch:type=\"MSPage\"\u003e\n+ \u003cpath d=\"M1300,680 L0,680 L0,0 L1300,0 L1300,680 L1300,680 Z\" id=\"bg\" fill=\"#30353E\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cg id=\"gitlab_logo\" sketch:type=\"MSLayerGroup\" transform=\"translate(-262.000000, -172.000000)\"\u003e\n+ \u003cg id=\"g10\" transform=\"translate(872.500000, 512.354581) scale(1, -1) translate(-872.500000, -512.354581) translate(0.000000, 0.290751)\"\u003e\n+ \u003cg id=\"g12\" transform=\"translate(1218.022652, 440.744871)\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\n+ \u003cpath d=\"M-50.0233338,141.900706 L-69.07059,141.900706 L-69.0100967,0.155858152 L8.04444805,0.155858152 L8.04444805,17.6840847 L-49.9628405,17.6840847 L-50.0233338,141.900706 L-50.0233338,141.900706 Z\" id=\"path14\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g16\"\u003e\n+ \u003cg id=\"g18-Clipped\"\u003e\n+ \u003cmask id=\"mask-2\" sketch:name=\"path22\" fill=\"white\"\u003e\n+ \u003cuse xlink:href=\"#path-1\"\u003e\u003c/use\u003e\n+ \u003c/mask\u003e\n+ \u003cg id=\"path22\"\u003e\u003c/g\u003e\n+ \u003cg id=\"g18\" mask=\"url(#mask-2)\"\u003e\n+ \u003cg transform=\"translate(382.736659, 312.879425)\"\u003e\n+ \u003cg id=\"g24\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(852.718192, 124.992771)\"\u003e\n+ \u003cpath d=\"M63.9833317,27.9148929 C59.2218085,22.9379001 51.2134221,17.9597442 40.3909323,17.9597442 C25.8888194,17.9597442 20.0453962,25.1013043 20.0453962,34.4074318 C20.0453962,48.4730484 29.7848226,55.1819277 50.5642821,55.1819277 C54.4602853,55.1819277 60.7364685,54.7492469 63.9833317,54.1002256 L63.9833317,27.9148929 L63.9833317,27.9148929 Z M44.2869356,113.827628 C28.9053426,113.827628 14.7975996,108.376082 3.78897657,99.301416 L10.5211864,87.6422957 C18.3131929,92.1866076 27.8374026,96.7320827 41.4728323,96.7320827 C57.0568452,96.7320827 63.9833317,88.7239978 63.9833317,75.3074024 L63.9833317,68.3821827 C60.9528485,69.0312039 54.6766653,69.4650479 50.7806621,69.4650479 C17.4476729,69.4650479 0.565379986,57.7791759 0.565379986,33.3245665 C0.565379986,11.4683685 13.9844297,0.43151772 34.3299658,0.43151772 C48.0351955,0.43151772 61.1692285,6.70771614 65.7143717,16.8780421 L69.1776149,3.02876588 L82.5978279,3.02876588 L82.5978279,75.5237428 C82.5978279,98.462806 72.6408582,113.827628 44.2869356,113.827628 L44.2869356,113.827628 Z\" id=\"path26\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g28\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(959.546624, 124.857151)\"\u003e\n+ \u003cpath d=\"M37.2266657,17.4468081 C30.0837992,17.4468081 23.8064527,18.3121698 19.0449295,20.4767371 L19.0449295,79.2306079 L19.0449295,86.0464943 C25.538656,91.457331 33.5470425,95.3526217 43.7203922,95.3526217 C62.1173451,95.3526217 69.2602116,82.3687072 69.2602116,61.3767077 C69.2602116,31.5135879 57.7885819,17.4468081 37.2266657,17.4468081 M45.2315622,113.963713 C28.208506,113.963713 19.0449295,102.384849 19.0449295,102.384849 L19.0449295,120.67143 L18.9844362,144.908535 L10.3967097,144.908535 L0.371103324,144.908535 L0.431596656,6.62629771 C9.73826309,2.73100702 22.5081728,0.567602823 36.3611458,0.567602823 C71.8579349,0.567602823 88.9566078,23.2891625 88.9566078,62.4584098 C88.9566078,93.4043948 73.1527248,113.963713 45.2315622,113.963713\" id=\"path30\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g32\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(509.576747, 125.294950)\"\u003e\n+ \u003cpath d=\"M68.636665,129.10638 C85.5189579,129.10638 96.3414476,123.480366 103.484314,117.853189 L111.669527,132.029302 C100.513161,141.811145 85.5073245,147.06845 69.5021849,147.06845 C29.0274926,147.06845 0.673569983,122.3975 0.673569983,72.6252464 C0.673569983,20.4709215 31.2622559,0.12910638 66.2553217,0.12910638 C83.7879179,0.12910638 98.7227909,4.24073748 108.462217,8.35236859 L108.063194,64.0763105 L108.063194,70.6502677 L108.063194,81.6057001 L56.1168719,81.6057001 L56.1168719,64.0763105 L89.2323178,64.0763105 L89.6313411,21.7701271 C85.3025779,19.6055598 77.7269514,17.8748364 67.554765,17.8748364 C39.4172223,17.8748364 20.5863462,35.5717154 20.5863462,72.8415868 C20.5863462,110.711628 40.0663623,129.10638 68.636665,129.10638\" id=\"path34\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g36\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(692.388992, 124.376085)\"\u003e\n+ \u003cpath d=\"M19.7766662,145.390067 L1.16216997,145.390067 L1.2226633,121.585642 L1.2226633,111.846834 L1.2226633,106.170806 L1.2226633,96.2656714 L1.2226633,39.5681976 L1.2226633,39.3518572 C1.2226633,16.4127939 11.1796331,1.04797161 39.5335557,1.04797161 C43.4504989,1.04797161 47.2836822,1.40388649 51.0051854,2.07965952 L51.0051854,18.7925385 C48.3109055,18.3796307 45.4351455,18.1446804 42.3476589,18.1446804 C26.763646,18.1446804 19.8371595,26.1516022 19.8371595,39.5681976 L19.8371595,96.2656714 L51.0051854,96.2656714 L51.0051854,111.846834 L19.8371595,111.846834 L19.7766662,145.390067 L19.7766662,145.390067 Z\" id=\"path38\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cpath d=\"M646.318899,128.021188 L664.933395,128.021188 L664.933395,236.223966 L646.318899,236.223966 L646.318899,128.021188 L646.318899,128.021188 Z\" id=\"path40\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cpath d=\"M646.318899,251.154944 L664.933395,251.154944 L664.933395,269.766036 L646.318899,269.766036 L646.318899,251.154944 L646.318899,251.154944 Z\" id=\"path42\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cg id=\"g44\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.464170, 0.676006)\"\u003e\n+ \u003cpath d=\"M429.269989,169.815599 L405.225053,243.802859 L357.571431,390.440955 C355.120288,397.984955 344.444378,397.984955 341.992071,390.440955 L294.337286,243.802859 L136.094873,243.802859 L88.4389245,390.440955 C85.9877812,397.984955 75.3118715,397.984955 72.8595648,390.440955 L25.2059427,243.802859 L1.16216997,169.815599 C-1.03187664,163.067173 1.37156997,155.674379 7.11261982,151.503429 L215.215498,0.336141836 L423.319539,151.503429 C429.060589,155.674379 431.462873,163.067173 429.269989,169.815599\" id=\"path46\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g48\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(135.410135, 1.012147)\"\u003e\n+ \u003cpath d=\"M80.269998,0 L80.269998,0 L159.391786,243.466717 L1.14820997,243.466717 L80.269998,0 L80.269998,0 Z\" id=\"path50\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g52\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\"\u003e\n+ \u003cg id=\"path54\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g56\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(24.893471, 1.012613)\"\u003e\n+ \u003cpath d=\"M190.786662,0 L111.664874,243.465554 L0.777106647,243.465554 L190.786662,0 L190.786662,0 Z\" id=\"path58\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g60\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\"\u003e\n+ \u003cg id=\"path62\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g64\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.077245, 0.223203)\"\u003e\n+ \u003cpath d=\"M25.5933327,244.255313 L25.5933327,244.255313 L1.54839663,170.268052 C-0.644486651,163.519627 1.75779662,156.126833 7.50000981,151.957046 L215.602888,0.789758846 L25.5933327,244.255313 L25.5933327,244.255313 Z\" id=\"path66\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g68\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\"\u003e\n+ \u003cg id=\"path70\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g72\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(25.670578, 244.478283)\"\u003e\n+ \u003cpath d=\"M0,0 L110.887767,0 L63.2329818,146.638096 C60.7806751,154.183259 50.1047654,154.183259 47.6536221,146.638096 L0,0 L0,0 Z\" id=\"path74\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g76\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\"\u003e\n+ \u003cpath d=\"M0,0 L79.121788,243.465554 L190.009555,243.465554 L0,0 L0,0 Z\" id=\"path78\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g80\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(214.902910, 0.223203)\"\u003e\n+ \u003cpath d=\"M190.786662,244.255313 L190.786662,244.255313 L214.831598,170.268052 C217.024481,163.519627 214.622198,156.126833 208.879985,151.957046 L0.777106647,0.789758846 L190.786662,244.255313 L190.786662,244.255313 Z\" id=\"path82\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g84\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(294.009575, 244.478283)\"\u003e\n+ \u003cpath d=\"M111.679997,0 L0.79222998,0 L48.4470155,146.638096 C50.8993221,154.183259 61.5752318,154.183259 64.0263751,146.638096 L111.679997,0 L111.679997,0 Z\" id=\"path86\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+\u003c/svg\u003e\n\\ No newline at end of file\n",
+ "merge_request_diff_id": 12,
+ "relative_order": 2,
+ "utf8_diff": "--- /dev/null\n+++ b/files/images/wm.svg\n@@ -0,0 +1,78 @@\n+\u003c?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?\u003e\n+\u003csvg width=\"1300px\" height=\"680px\" viewBox=\"0 0 1300 680\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:sketch=\"http://www.bohemiancoding.com/sketch/ns\"\u003e\n+ \u003c!-- Generator: Sketch 3.2.2 (9983) - http://www.bohemiancoding.com/sketch --\u003e\n+ \u003ctitle\u003ewm\u003c/title\u003e\n+ \u003cdesc\u003eCreated with Sketch.\u003c/desc\u003e\n+ \u003cdefs\u003e\n+ \u003cpath id=\"path-1\" d=\"M-69.8,1023.54607 L1675.19996,1023.54607 L1675.19996,0 L-69.8,0 L-69.8,1023.54607 L-69.8,1023.54607 Z\"\u003e\u003c/path\u003e\n+ \u003c/defs\u003e\n+ \u003cg id=\"Page-1\" stroke=\"none\" stroke-width=\"1\" fill=\"none\" fill-rule=\"evenodd\" sketch:type=\"MSPage\"\u003e\n+ \u003cpath d=\"M1300,680 L0,680 L0,0 L1300,0 L1300,680 L1300,680 Z\" id=\"bg\" fill=\"#30353E\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cg id=\"gitlab_logo\" sketch:type=\"MSLayerGroup\" transform=\"translate(-262.000000, -172.000000)\"\u003e\n+ \u003cg id=\"g10\" transform=\"translate(872.500000, 512.354581) scale(1, -1) translate(-872.500000, -512.354581) translate(0.000000, 0.290751)\"\u003e\n+ \u003cg id=\"g12\" transform=\"translate(1218.022652, 440.744871)\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\n+ \u003cpath d=\"M-50.0233338,141.900706 L-69.07059,141.900706 L-69.0100967,0.155858152 L8.04444805,0.155858152 L8.04444805,17.6840847 L-49.9628405,17.6840847 L-50.0233338,141.900706 L-50.0233338,141.900706 Z\" id=\"path14\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g16\"\u003e\n+ \u003cg id=\"g18-Clipped\"\u003e\n+ \u003cmask id=\"mask-2\" sketch:name=\"path22\" fill=\"white\"\u003e\n+ \u003cuse xlink:href=\"#path-1\"\u003e\u003c/use\u003e\n+ \u003c/mask\u003e\n+ \u003cg id=\"path22\"\u003e\u003c/g\u003e\n+ \u003cg id=\"g18\" mask=\"url(#mask-2)\"\u003e\n+ \u003cg transform=\"translate(382.736659, 312.879425)\"\u003e\n+ \u003cg id=\"g24\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(852.718192, 124.992771)\"\u003e\n+ \u003cpath d=\"M63.9833317,27.9148929 C59.2218085,22.9379001 51.2134221,17.9597442 40.3909323,17.9597442 C25.8888194,17.9597442 20.0453962,25.1013043 20.0453962,34.4074318 C20.0453962,48.4730484 29.7848226,55.1819277 50.5642821,55.1819277 C54.4602853,55.1819277 60.7364685,54.7492469 63.9833317,54.1002256 L63.9833317,27.9148929 L63.9833317,27.9148929 Z M44.2869356,113.827628 C28.9053426,113.827628 14.7975996,108.376082 3.78897657,99.301416 L10.5211864,87.6422957 C18.3131929,92.1866076 27.8374026,96.7320827 41.4728323,96.7320827 C57.0568452,96.7320827 63.9833317,88.7239978 63.9833317,75.3074024 L63.9833317,68.3821827 C60.9528485,69.0312039 54.6766653,69.4650479 50.7806621,69.4650479 C17.4476729,69.4650479 0.565379986,57.7791759 0.565379986,33.3245665 C0.565379986,11.4683685 13.9844297,0.43151772 34.3299658,0.43151772 C48.0351955,0.43151772 61.1692285,6.70771614 65.7143717,16.8780421 L69.1776149,3.02876588 L82.5978279,3.02876588 L82.5978279,75.5237428 C82.5978279,98.462806 72.6408582,113.827628 44.2869356,113.827628 L44.2869356,113.827628 Z\" id=\"path26\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g28\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(959.546624, 124.857151)\"\u003e\n+ \u003cpath d=\"M37.2266657,17.4468081 C30.0837992,17.4468081 23.8064527,18.3121698 19.0449295,20.4767371 L19.0449295,79.2306079 L19.0449295,86.0464943 C25.538656,91.457331 33.5470425,95.3526217 43.7203922,95.3526217 C62.1173451,95.3526217 69.2602116,82.3687072 69.2602116,61.3767077 C69.2602116,31.5135879 57.7885819,17.4468081 37.2266657,17.4468081 M45.2315622,113.963713 C28.208506,113.963713 19.0449295,102.384849 19.0449295,102.384849 L19.0449295,120.67143 L18.9844362,144.908535 L10.3967097,144.908535 L0.371103324,144.908535 L0.431596656,6.62629771 C9.73826309,2.73100702 22.5081728,0.567602823 36.3611458,0.567602823 C71.8579349,0.567602823 88.9566078,23.2891625 88.9566078,62.4584098 C88.9566078,93.4043948 73.1527248,113.963713 45.2315622,113.963713\" id=\"path30\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g32\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(509.576747, 125.294950)\"\u003e\n+ \u003cpath d=\"M68.636665,129.10638 C85.5189579,129.10638 96.3414476,123.480366 103.484314,117.853189 L111.669527,132.029302 C100.513161,141.811145 85.5073245,147.06845 69.5021849,147.06845 C29.0274926,147.06845 0.673569983,122.3975 0.673569983,72.6252464 C0.673569983,20.4709215 31.2622559,0.12910638 66.2553217,0.12910638 C83.7879179,0.12910638 98.7227909,4.24073748 108.462217,8.35236859 L108.063194,64.0763105 L108.063194,70.6502677 L108.063194,81.6057001 L56.1168719,81.6057001 L56.1168719,64.0763105 L89.2323178,64.0763105 L89.6313411,21.7701271 C85.3025779,19.6055598 77.7269514,17.8748364 67.554765,17.8748364 C39.4172223,17.8748364 20.5863462,35.5717154 20.5863462,72.8415868 C20.5863462,110.711628 40.0663623,129.10638 68.636665,129.10638\" id=\"path34\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g36\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(692.388992, 124.376085)\"\u003e\n+ \u003cpath d=\"M19.7766662,145.390067 L1.16216997,145.390067 L1.2226633,121.585642 L1.2226633,111.846834 L1.2226633,106.170806 L1.2226633,96.2656714 L1.2226633,39.5681976 L1.2226633,39.3518572 C1.2226633,16.4127939 11.1796331,1.04797161 39.5335557,1.04797161 C43.4504989,1.04797161 47.2836822,1.40388649 51.0051854,2.07965952 L51.0051854,18.7925385 C48.3109055,18.3796307 45.4351455,18.1446804 42.3476589,18.1446804 C26.763646,18.1446804 19.8371595,26.1516022 19.8371595,39.5681976 L19.8371595,96.2656714 L51.0051854,96.2656714 L51.0051854,111.846834 L19.8371595,111.846834 L19.7766662,145.390067 L19.7766662,145.390067 Z\" id=\"path38\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cpath d=\"M646.318899,128.021188 L664.933395,128.021188 L664.933395,236.223966 L646.318899,236.223966 L646.318899,128.021188 L646.318899,128.021188 Z\" id=\"path40\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cpath d=\"M646.318899,251.154944 L664.933395,251.154944 L664.933395,269.766036 L646.318899,269.766036 L646.318899,251.154944 L646.318899,251.154944 Z\" id=\"path42\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cg id=\"g44\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.464170, 0.676006)\"\u003e\n+ \u003cpath d=\"M429.269989,169.815599 L405.225053,243.802859 L357.571431,390.440955 C355.120288,397.984955 344.444378,397.984955 341.992071,390.440955 L294.337286,243.802859 L136.094873,243.802859 L88.4389245,390.440955 C85.9877812,397.984955 75.3118715,397.984955 72.8595648,390.440955 L25.2059427,243.802859 L1.16216997,169.815599 C-1.03187664,163.067173 1.37156997,155.674379 7.11261982,151.503429 L215.215498,0.336141836 L423.319539,151.503429 C429.060589,155.674379 431.462873,163.067173 429.269989,169.815599\" id=\"path46\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g48\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(135.410135, 1.012147)\"\u003e\n+ \u003cpath d=\"M80.269998,0 L80.269998,0 L159.391786,243.466717 L1.14820997,243.466717 L80.269998,0 L80.269998,0 Z\" id=\"path50\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g52\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\"\u003e\n+ \u003cg id=\"path54\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g56\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(24.893471, 1.012613)\"\u003e\n+ \u003cpath d=\"M190.786662,0 L111.664874,243.465554 L0.777106647,243.465554 L190.786662,0 L190.786662,0 Z\" id=\"path58\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g60\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\"\u003e\n+ \u003cg id=\"path62\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g64\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.077245, 0.223203)\"\u003e\n+ \u003cpath d=\"M25.5933327,244.255313 L25.5933327,244.255313 L1.54839663,170.268052 C-0.644486651,163.519627 1.75779662,156.126833 7.50000981,151.957046 L215.602888,0.789758846 L25.5933327,244.255313 L25.5933327,244.255313 Z\" id=\"path66\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g68\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\"\u003e\n+ \u003cg id=\"path70\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g72\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(25.670578, 244.478283)\"\u003e\n+ \u003cpath d=\"M0,0 L110.887767,0 L63.2329818,146.638096 C60.7806751,154.183259 50.1047654,154.183259 47.6536221,146.638096 L0,0 L0,0 Z\" id=\"path74\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g76\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\"\u003e\n+ \u003cpath d=\"M0,0 L79.121788,243.465554 L190.009555,243.465554 L0,0 L0,0 Z\" id=\"path78\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g80\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(214.902910, 0.223203)\"\u003e\n+ \u003cpath d=\"M190.786662,244.255313 L190.786662,244.255313 L214.831598,170.268052 C217.024481,163.519627 214.622198,156.126833 208.879985,151.957046 L0.777106647,0.789758846 L190.786662,244.255313 L190.786662,244.255313 Z\" id=\"path82\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g84\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(294.009575, 244.478283)\"\u003e\n+ \u003cpath d=\"M111.679997,0 L0.79222998,0 L48.4470155,146.638096 C50.8993221,154.183259 61.5752318,154.183259 64.0263751,146.638096 L111.679997,0 L111.679997,0 Z\" id=\"path86\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+\u003c/svg\u003e\n\\ No newline at end of file\n",
"new_path": "files/images/wm.svg",
"old_path": "files/images/wm.svg",
"a_mode": "0",
@@ -5134,7 +5131,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/files/lfs/lfs_object.iso\n@@ -0,0 +1,4 @@\n+version https://git-lfs.github.com/spec/v1\n+oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897\n+size 1575078\n+\n",
+ "merge_request_diff_id": 12,
+ "relative_order": 3,
+ "utf8_diff": "--- /dev/null\n+++ b/files/lfs/lfs_object.iso\n@@ -0,0 +1,4 @@\n+version https://git-lfs.github.com/spec/v1\n+oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897\n+size 1575078\n+\n",
"new_path": "files/lfs/lfs_object.iso",
"old_path": "files/lfs/lfs_object.iso",
"a_mode": "0",
@@ -5145,7 +5144,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/files/whitespace\n@@ -0,0 +1 @@\n+test \n",
+ "merge_request_diff_id": 12,
+ "relative_order": 4,
+ "utf8_diff": "--- /dev/null\n+++ b/files/whitespace\n@@ -0,0 +1 @@\n+test \n",
"new_path": "files/whitespace",
"old_path": "files/whitespace",
"a_mode": "0",
@@ -5156,7 +5157,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/test\n",
+ "merge_request_diff_id": 12,
+ "relative_order": 5,
+ "utf8_diff": "--- /dev/null\n+++ b/test\n",
"new_path": "test",
"old_path": "test",
"a_mode": "0",
@@ -5418,9 +5421,9 @@
"merge_request_diff": {
"id": 11,
"state": "empty",
- "st_commits": null,
- "utf8_st_diffs": [
-
+ "merge_request_diff_commits": [
+ ],
+ "merge_request_diff_files": [
],
"merge_request_id": 11,
"created_at": "2016-06-14T15:02:23.772Z",
@@ -5673,13 +5676,12 @@
"merge_request_diff": {
"id": 10,
"state": "collected",
- "st_commits": [
+ "merge_request_diff_commits": [
{
- "id": "f998ac87ac9244f15e9c15109a6f4e62a54b779d",
+ "merge_request_diff_id": 10,
+ "relative_order": 0,
+ "sha": "f998ac87ac9244f15e9c15109a6f4e62a54b779d",
"message": "fixes #10\n",
- "parent_ids": [
- "be93687618e4b132087f430a4d8fc3a609c9b77c"
- ],
"authored_date": "2016-01-19T14:43:23.000+01:00",
"author_name": "James Lopez",
"author_email": "james@jameslopez.es",
@@ -5688,12 +5690,10 @@
"committer_email": "james@jameslopez.es"
},
{
- "id": "be93687618e4b132087f430a4d8fc3a609c9b77c",
+ "merge_request_diff_id": 10,
+ "relative_order": 1,
+ "sha": "be93687618e4b132087f430a4d8fc3a609c9b77c",
"message": "Merge branch 'master' into 'master'\r\n\r\nLFS object pointer.\r\n\r\n\r\n\r\nSee merge request !6",
- "parent_ids": [
- "5f923865dde3436854e9ceb9cdb7815618d4e849",
- "048721d90c449b244b7b4c53a9186b04330174ec"
- ],
"authored_date": "2015-12-07T12:52:12.000+01:00",
"author_name": "Marin Jankovski",
"author_email": "marin@gitlab.com",
@@ -5702,11 +5702,10 @@
"committer_email": "marin@gitlab.com"
},
{
- "id": "048721d90c449b244b7b4c53a9186b04330174ec",
+ "merge_request_diff_id": 10,
+ "relative_order": 2,
+ "sha": "048721d90c449b244b7b4c53a9186b04330174ec",
"message": "LFS object pointer.\n",
- "parent_ids": [
- "5f923865dde3436854e9ceb9cdb7815618d4e849"
- ],
"authored_date": "2015-12-07T11:54:28.000+01:00",
"author_name": "Marin Jankovski",
"author_email": "maxlazio@gmail.com",
@@ -5715,11 +5714,10 @@
"committer_email": "maxlazio@gmail.com"
},
{
- "id": "5f923865dde3436854e9ceb9cdb7815618d4e849",
+ "merge_request_diff_id": 10,
+ "relative_order": 3,
+ "sha": "5f923865dde3436854e9ceb9cdb7815618d4e849",
"message": "GitLab currently doesn't support patches that involve a merge commit: add a commit here\n",
- "parent_ids": [
- "d2d430676773caa88cdaf7c55944073b2fd5561a"
- ],
"authored_date": "2015-11-13T16:27:12.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -5728,12 +5726,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "d2d430676773caa88cdaf7c55944073b2fd5561a",
+ "merge_request_diff_id": 10,
+ "relative_order": 4,
+ "sha": "d2d430676773caa88cdaf7c55944073b2fd5561a",
"message": "Merge branch 'add-svg' into 'master'\r\n\r\nAdd GitLab SVG\r\n\r\nAdded to test preview of sanitized SVG images\r\n\r\nSee merge request !5",
- "parent_ids": [
- "59e29889be61e6e0e5e223bfa9ac2721d31605b8",
- "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73"
- ],
"authored_date": "2015-11-13T08:50:17.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -5742,11 +5738,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73",
+ "merge_request_diff_id": 10,
+ "relative_order": 5,
+ "sha": "2ea1f3dec713d940208fb5ce4a38765ecb5d3f73",
"message": "Add GitLab SVG\n",
- "parent_ids": [
- "59e29889be61e6e0e5e223bfa9ac2721d31605b8"
- ],
"authored_date": "2015-11-13T08:39:43.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -5755,12 +5750,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "59e29889be61e6e0e5e223bfa9ac2721d31605b8",
+ "merge_request_diff_id": 10,
+ "relative_order": 6,
+ "sha": "59e29889be61e6e0e5e223bfa9ac2721d31605b8",
"message": "Merge branch 'whitespace' into 'master'\r\n\r\nadd whitespace test file\r\n\r\nSorry, I did a mistake.\r\nGit ignore empty files.\r\nSo I add a new whitespace test file.\r\n\r\nSee merge request !4",
- "parent_ids": [
- "19e2e9b4ef76b422ce1154af39a91323ccc57434",
- "66eceea0db202bb39c4e445e8ca28689645366c5"
- ],
"authored_date": "2015-11-13T07:21:40.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -5769,11 +5762,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "66eceea0db202bb39c4e445e8ca28689645366c5",
+ "merge_request_diff_id": 10,
+ "relative_order": 7,
+ "sha": "66eceea0db202bb39c4e445e8ca28689645366c5",
"message": "add spaces in whitespace file\n",
- "parent_ids": [
- "08f22f255f082689c0d7d39d19205085311542bc"
- ],
"authored_date": "2015-11-13T06:01:27.000+01:00",
"author_name": "윤민식",
"author_email": "minsik.yoon@samsung.com",
@@ -5782,11 +5774,10 @@
"committer_email": "minsik.yoon@samsung.com"
},
{
- "id": "08f22f255f082689c0d7d39d19205085311542bc",
+ "merge_request_diff_id": 10,
+ "relative_order": 8,
+ "sha": "08f22f255f082689c0d7d39d19205085311542bc",
"message": "remove emtpy file.(beacase git ignore empty file)\nadd whitespace test file.\n",
- "parent_ids": [
- "c642fe9b8b9f28f9225d7ea953fe14e74748d53b"
- ],
"authored_date": "2015-11-13T06:00:16.000+01:00",
"author_name": "윤민식",
"author_email": "minsik.yoon@samsung.com",
@@ -5795,12 +5786,10 @@
"committer_email": "minsik.yoon@samsung.com"
},
{
- "id": "19e2e9b4ef76b422ce1154af39a91323ccc57434",
+ "merge_request_diff_id": 10,
+ "relative_order": 9,
+ "sha": "19e2e9b4ef76b422ce1154af39a91323ccc57434",
"message": "Merge branch 'whitespace' into 'master'\r\n\r\nadd spaces\r\n\r\nTo test this pull request.(https://github.com/gitlabhq/gitlabhq/pull/9757)\r\nJust add whitespaces.\r\n\r\nSee merge request !3",
- "parent_ids": [
- "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd",
- "c642fe9b8b9f28f9225d7ea953fe14e74748d53b"
- ],
"authored_date": "2015-11-13T05:23:14.000+01:00",
"author_name": "Stan Hu",
"author_email": "stanhu@gmail.com",
@@ -5809,11 +5798,10 @@
"committer_email": "stanhu@gmail.com"
},
{
- "id": "c642fe9b8b9f28f9225d7ea953fe14e74748d53b",
+ "merge_request_diff_id": 10,
+ "relative_order": 10,
+ "sha": "c642fe9b8b9f28f9225d7ea953fe14e74748d53b",
"message": "add whitespace in empty\n",
- "parent_ids": [
- "9a944d90955aaf45f6d0c88f30e27f8d2c41cec0"
- ],
"authored_date": "2015-11-13T05:08:45.000+01:00",
"author_name": "윤민식",
"author_email": "minsik.yoon@samsung.com",
@@ -5822,11 +5810,10 @@
"committer_email": "minsik.yoon@samsung.com"
},
{
- "id": "9a944d90955aaf45f6d0c88f30e27f8d2c41cec0",
+ "merge_request_diff_id": 10,
+ "relative_order": 11,
+ "sha": "9a944d90955aaf45f6d0c88f30e27f8d2c41cec0",
"message": "add empty file\n",
- "parent_ids": [
- "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd"
- ],
"authored_date": "2015-11-13T05:08:04.000+01:00",
"author_name": "윤민식",
"author_email": "minsik.yoon@samsung.com",
@@ -5835,11 +5822,10 @@
"committer_email": "minsik.yoon@samsung.com"
},
{
- "id": "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd",
+ "merge_request_diff_id": 10,
+ "relative_order": 12,
+ "sha": "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd",
"message": "Add ISO-8859 test file\n",
- "parent_ids": [
- "e56497bb5f03a90a51293fc6d516788730953899"
- ],
"authored_date": "2015-08-25T17:53:12.000+02:00",
"author_name": "Stan Hu",
"author_email": "stanhu@packetzoom.com",
@@ -5848,12 +5834,10 @@
"committer_email": "stanhu@packetzoom.com"
},
{
- "id": "e56497bb5f03a90a51293fc6d516788730953899",
+ "merge_request_diff_id": 10,
+ "relative_order": 13,
+ "sha": "e56497bb5f03a90a51293fc6d516788730953899",
"message": "Merge branch 'tree_helper_spec' into 'master'\n\nAdd directory structure for tree_helper spec\n\nThis directory structure is needed for a testing the method flatten_tree(tree) in the TreeHelper module\n\nSee [merge request #275](https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/275#note_732774)\n\nSee merge request !2\n",
- "parent_ids": [
- "5937ac0a7beb003549fc5fd26fc247adbce4a52e",
- "4cd80ccab63c82b4bad16faa5193fbd2aa06df40"
- ],
"authored_date": "2015-01-10T22:23:29.000+01:00",
"author_name": "Sytse Sijbrandij",
"author_email": "sytse@gitlab.com",
@@ -5862,11 +5846,10 @@
"committer_email": "sytse@gitlab.com"
},
{
- "id": "4cd80ccab63c82b4bad16faa5193fbd2aa06df40",
+ "merge_request_diff_id": 10,
+ "relative_order": 14,
+ "sha": "4cd80ccab63c82b4bad16faa5193fbd2aa06df40",
"message": "add directory structure for tree_helper spec\n",
- "parent_ids": [
- "5937ac0a7beb003549fc5fd26fc247adbce4a52e"
- ],
"authored_date": "2015-01-10T21:28:18.000+01:00",
"author_name": "marmis85",
"author_email": "marmis85@gmail.com",
@@ -5875,11 +5858,10 @@
"committer_email": "marmis85@gmail.com"
},
{
- "id": "5937ac0a7beb003549fc5fd26fc247adbce4a52e",
+ "merge_request_diff_id": 10,
+ "relative_order": 16,
+ "sha": "5937ac0a7beb003549fc5fd26fc247adbce4a52e",
"message": "Add submodule from gitlab.com\n\nSigned-off-by: Dmitriy Zaporozhets \u003cdmitriy.zaporozhets@gmail.com\u003e\n",
- "parent_ids": [
- "570e7b2abdd848b95f2f578043fc23bd6f6fd24d"
- ],
"authored_date": "2014-02-27T10:01:38.000+01:00",
"author_name": "Dmitriy Zaporozhets",
"author_email": "dmitriy.zaporozhets@gmail.com",
@@ -5888,11 +5870,10 @@
"committer_email": "dmitriy.zaporozhets@gmail.com"
},
{
- "id": "570e7b2abdd848b95f2f578043fc23bd6f6fd24d",
+ "merge_request_diff_id": 10,
+ "relative_order": 17,
+ "sha": "570e7b2abdd848b95f2f578043fc23bd6f6fd24d",
"message": "Change some files\n\nSigned-off-by: Dmitriy Zaporozhets \u003cdmitriy.zaporozhets@gmail.com\u003e\n",
- "parent_ids": [
- "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9"
- ],
"authored_date": "2014-02-27T09:57:31.000+01:00",
"author_name": "Dmitriy Zaporozhets",
"author_email": "dmitriy.zaporozhets@gmail.com",
@@ -5901,11 +5882,10 @@
"committer_email": "dmitriy.zaporozhets@gmail.com"
},
{
- "id": "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9",
+ "merge_request_diff_id": 10,
+ "relative_order": 18,
+ "sha": "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9",
"message": "More submodules\n\nSigned-off-by: Dmitriy Zaporozhets \u003cdmitriy.zaporozhets@gmail.com\u003e\n",
- "parent_ids": [
- "d14d6c0abdd253381df51a723d58691b2ee1ab08"
- ],
"authored_date": "2014-02-27T09:54:21.000+01:00",
"author_name": "Dmitriy Zaporozhets",
"author_email": "dmitriy.zaporozhets@gmail.com",
@@ -5914,11 +5894,10 @@
"committer_email": "dmitriy.zaporozhets@gmail.com"
},
{
- "id": "d14d6c0abdd253381df51a723d58691b2ee1ab08",
+ "merge_request_diff_id": 10,
+ "relative_order": 19,
+ "sha": "d14d6c0abdd253381df51a723d58691b2ee1ab08",
"message": "Remove ds_store files\n\nSigned-off-by: Dmitriy Zaporozhets \u003cdmitriy.zaporozhets@gmail.com\u003e\n",
- "parent_ids": [
- "c1acaa58bbcbc3eafe538cb8274ba387047b69f8"
- ],
"authored_date": "2014-02-27T09:49:50.000+01:00",
"author_name": "Dmitriy Zaporozhets",
"author_email": "dmitriy.zaporozhets@gmail.com",
@@ -5927,11 +5906,10 @@
"committer_email": "dmitriy.zaporozhets@gmail.com"
},
{
- "id": "c1acaa58bbcbc3eafe538cb8274ba387047b69f8",
+ "merge_request_diff_id": 10,
+ "relative_order": 20,
+ "sha": "c1acaa58bbcbc3eafe538cb8274ba387047b69f8",
"message": "Ignore DS files\n\nSigned-off-by: Dmitriy Zaporozhets \u003cdmitriy.zaporozhets@gmail.com\u003e\n",
- "parent_ids": [
- "ae73cb07c9eeaf35924a10f713b364d32b2dd34f"
- ],
"authored_date": "2014-02-27T09:48:32.000+01:00",
"author_name": "Dmitriy Zaporozhets",
"author_email": "dmitriy.zaporozhets@gmail.com",
@@ -5940,9 +5918,11 @@
"committer_email": "dmitriy.zaporozhets@gmail.com"
}
],
- "utf8_st_diffs": [
+ "merge_request_diff_files": [
{
- "diff": "Binary files a/.DS_Store and /dev/null differ\n",
+ "merge_request_diff_id": 10,
+ "relative_order": 0,
+ "utf8_diff": "Binary files a/.DS_Store and /dev/null differ\n",
"new_path": ".DS_Store",
"old_path": ".DS_Store",
"a_mode": "100644",
@@ -5953,7 +5933,9 @@
"too_large": false
},
{
- "diff": "--- a/.gitignore\n+++ b/.gitignore\n@@ -17,3 +17,4 @@ rerun.txt\n pickle-email-*.html\n .project\n config/initializers/secret_token.rb\n+.DS_Store\n",
+ "merge_request_diff_id": 10,
+ "relative_order": 1,
+ "utf8_diff": "--- a/.gitignore\n+++ b/.gitignore\n@@ -17,3 +17,4 @@ rerun.txt\n pickle-email-*.html\n .project\n config/initializers/secret_token.rb\n+.DS_Store\n",
"new_path": ".gitignore",
"old_path": ".gitignore",
"a_mode": "100644",
@@ -5964,7 +5946,9 @@
"too_large": false
},
{
- "diff": "--- a/.gitmodules\n+++ b/.gitmodules\n@@ -1,3 +1,9 @@\n [submodule \"six\"]\n \tpath = six\n \turl = git://github.com/randx/six.git\n+[submodule \"gitlab-shell\"]\n+\tpath = gitlab-shell\n+\turl = https://github.com/gitlabhq/gitlab-shell.git\n+[submodule \"gitlab-grack\"]\n+\tpath = gitlab-grack\n+\turl = https://gitlab.com/gitlab-org/gitlab-grack.git\n",
+ "merge_request_diff_id": 10,
+ "relative_order": 2,
+ "utf8_diff": "--- a/.gitmodules\n+++ b/.gitmodules\n@@ -1,3 +1,9 @@\n [submodule \"six\"]\n \tpath = six\n \turl = git://github.com/randx/six.git\n+[submodule \"gitlab-shell\"]\n+\tpath = gitlab-shell\n+\turl = https://github.com/gitlabhq/gitlab-shell.git\n+[submodule \"gitlab-grack\"]\n+\tpath = gitlab-grack\n+\turl = https://gitlab.com/gitlab-org/gitlab-grack.git\n",
"new_path": ".gitmodules",
"old_path": ".gitmodules",
"a_mode": "100644",
@@ -5975,7 +5959,9 @@
"too_large": false
},
{
- "diff": "--- a/CHANGELOG\n+++ b/CHANGELOG\n@@ -1,4 +1,6 @@\n-v 6.7.0\n+v6.8.0\n+\n+v6.7.0\n - Add support for Gemnasium as a Project Service (Olivier Gonzalez)\n - Add edit file button to MergeRequest diff\n - Public groups (Jason Hollingsworth)\n",
+ "merge_request_diff_id": 10,
+ "relative_order": 3,
+ "utf8_diff": "--- a/CHANGELOG\n+++ b/CHANGELOG\n@@ -1,4 +1,6 @@\n-v 6.7.0\n+v6.8.0\n+\n+v6.7.0\n - Add support for Gemnasium as a Project Service (Olivier Gonzalez)\n - Add edit file button to MergeRequest diff\n - Public groups (Jason Hollingsworth)\n",
"new_path": "CHANGELOG",
"old_path": "CHANGELOG",
"a_mode": "100644",
@@ -5986,7 +5972,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/encoding/iso8859.txt\n@@ -0,0 +1 @@\n+Äü\n",
+ "merge_request_diff_id": 10,
+ "relative_order": 4,
+ "utf8_diff": "--- /dev/null\n+++ b/encoding/iso8859.txt\n@@ -0,0 +1 @@\n+Äü\n",
"new_path": "encoding/iso8859.txt",
"old_path": "encoding/iso8859.txt",
"a_mode": "0",
@@ -5997,7 +5985,9 @@
"too_large": false
},
{
- "diff": "Binary files a/files/.DS_Store and /dev/null differ\n",
+ "merge_request_diff_id": 10,
+ "relative_order": 5,
+ "utf8_diff": "Binary files a/files/.DS_Store and /dev/null differ\n",
"new_path": "files/.DS_Store",
"old_path": "files/.DS_Store",
"a_mode": "100644",
@@ -6008,7 +5998,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/files/images/wm.svg\n@@ -0,0 +1,78 @@\n+\u003c?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?\u003e\n+\u003csvg width=\"1300px\" height=\"680px\" viewBox=\"0 0 1300 680\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:sketch=\"http://www.bohemiancoding.com/sketch/ns\"\u003e\n+ \u003c!-- Generator: Sketch 3.2.2 (9983) - http://www.bohemiancoding.com/sketch --\u003e\n+ \u003ctitle\u003ewm\u003c/title\u003e\n+ \u003cdesc\u003eCreated with Sketch.\u003c/desc\u003e\n+ \u003cdefs\u003e\n+ \u003cpath id=\"path-1\" d=\"M-69.8,1023.54607 L1675.19996,1023.54607 L1675.19996,0 L-69.8,0 L-69.8,1023.54607 L-69.8,1023.54607 Z\"\u003e\u003c/path\u003e\n+ \u003c/defs\u003e\n+ \u003cg id=\"Page-1\" stroke=\"none\" stroke-width=\"1\" fill=\"none\" fill-rule=\"evenodd\" sketch:type=\"MSPage\"\u003e\n+ \u003cpath d=\"M1300,680 L0,680 L0,0 L1300,0 L1300,680 L1300,680 Z\" id=\"bg\" fill=\"#30353E\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cg id=\"gitlab_logo\" sketch:type=\"MSLayerGroup\" transform=\"translate(-262.000000, -172.000000)\"\u003e\n+ \u003cg id=\"g10\" transform=\"translate(872.500000, 512.354581) scale(1, -1) translate(-872.500000, -512.354581) translate(0.000000, 0.290751)\"\u003e\n+ \u003cg id=\"g12\" transform=\"translate(1218.022652, 440.744871)\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\n+ \u003cpath d=\"M-50.0233338,141.900706 L-69.07059,141.900706 L-69.0100967,0.155858152 L8.04444805,0.155858152 L8.04444805,17.6840847 L-49.9628405,17.6840847 L-50.0233338,141.900706 L-50.0233338,141.900706 Z\" id=\"path14\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g16\"\u003e\n+ \u003cg id=\"g18-Clipped\"\u003e\n+ \u003cmask id=\"mask-2\" sketch:name=\"path22\" fill=\"white\"\u003e\n+ \u003cuse xlink:href=\"#path-1\"\u003e\u003c/use\u003e\n+ \u003c/mask\u003e\n+ \u003cg id=\"path22\"\u003e\u003c/g\u003e\n+ \u003cg id=\"g18\" mask=\"url(#mask-2)\"\u003e\n+ \u003cg transform=\"translate(382.736659, 312.879425)\"\u003e\n+ \u003cg id=\"g24\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(852.718192, 124.992771)\"\u003e\n+ \u003cpath d=\"M63.9833317,27.9148929 C59.2218085,22.9379001 51.2134221,17.9597442 40.3909323,17.9597442 C25.8888194,17.9597442 20.0453962,25.1013043 20.0453962,34.4074318 C20.0453962,48.4730484 29.7848226,55.1819277 50.5642821,55.1819277 C54.4602853,55.1819277 60.7364685,54.7492469 63.9833317,54.1002256 L63.9833317,27.9148929 L63.9833317,27.9148929 Z M44.2869356,113.827628 C28.9053426,113.827628 14.7975996,108.376082 3.78897657,99.301416 L10.5211864,87.6422957 C18.3131929,92.1866076 27.8374026,96.7320827 41.4728323,96.7320827 C57.0568452,96.7320827 63.9833317,88.7239978 63.9833317,75.3074024 L63.9833317,68.3821827 C60.9528485,69.0312039 54.6766653,69.4650479 50.7806621,69.4650479 C17.4476729,69.4650479 0.565379986,57.7791759 0.565379986,33.3245665 C0.565379986,11.4683685 13.9844297,0.43151772 34.3299658,0.43151772 C48.0351955,0.43151772 61.1692285,6.70771614 65.7143717,16.8780421 L69.1776149,3.02876588 L82.5978279,3.02876588 L82.5978279,75.5237428 C82.5978279,98.462806 72.6408582,113.827628 44.2869356,113.827628 L44.2869356,113.827628 Z\" id=\"path26\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g28\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(959.546624, 124.857151)\"\u003e\n+ \u003cpath d=\"M37.2266657,17.4468081 C30.0837992,17.4468081 23.8064527,18.3121698 19.0449295,20.4767371 L19.0449295,79.2306079 L19.0449295,86.0464943 C25.538656,91.457331 33.5470425,95.3526217 43.7203922,95.3526217 C62.1173451,95.3526217 69.2602116,82.3687072 69.2602116,61.3767077 C69.2602116,31.5135879 57.7885819,17.4468081 37.2266657,17.4468081 M45.2315622,113.963713 C28.208506,113.963713 19.0449295,102.384849 19.0449295,102.384849 L19.0449295,120.67143 L18.9844362,144.908535 L10.3967097,144.908535 L0.371103324,144.908535 L0.431596656,6.62629771 C9.73826309,2.73100702 22.5081728,0.567602823 36.3611458,0.567602823 C71.8579349,0.567602823 88.9566078,23.2891625 88.9566078,62.4584098 C88.9566078,93.4043948 73.1527248,113.963713 45.2315622,113.963713\" id=\"path30\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g32\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(509.576747, 125.294950)\"\u003e\n+ \u003cpath d=\"M68.636665,129.10638 C85.5189579,129.10638 96.3414476,123.480366 103.484314,117.853189 L111.669527,132.029302 C100.513161,141.811145 85.5073245,147.06845 69.5021849,147.06845 C29.0274926,147.06845 0.673569983,122.3975 0.673569983,72.6252464 C0.673569983,20.4709215 31.2622559,0.12910638 66.2553217,0.12910638 C83.7879179,0.12910638 98.7227909,4.24073748 108.462217,8.35236859 L108.063194,64.0763105 L108.063194,70.6502677 L108.063194,81.6057001 L56.1168719,81.6057001 L56.1168719,64.0763105 L89.2323178,64.0763105 L89.6313411,21.7701271 C85.3025779,19.6055598 77.7269514,17.8748364 67.554765,17.8748364 C39.4172223,17.8748364 20.5863462,35.5717154 20.5863462,72.8415868 C20.5863462,110.711628 40.0663623,129.10638 68.636665,129.10638\" id=\"path34\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g36\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(692.388992, 124.376085)\"\u003e\n+ \u003cpath d=\"M19.7766662,145.390067 L1.16216997,145.390067 L1.2226633,121.585642 L1.2226633,111.846834 L1.2226633,106.170806 L1.2226633,96.2656714 L1.2226633,39.5681976 L1.2226633,39.3518572 C1.2226633,16.4127939 11.1796331,1.04797161 39.5335557,1.04797161 C43.4504989,1.04797161 47.2836822,1.40388649 51.0051854,2.07965952 L51.0051854,18.7925385 C48.3109055,18.3796307 45.4351455,18.1446804 42.3476589,18.1446804 C26.763646,18.1446804 19.8371595,26.1516022 19.8371595,39.5681976 L19.8371595,96.2656714 L51.0051854,96.2656714 L51.0051854,111.846834 L19.8371595,111.846834 L19.7766662,145.390067 L19.7766662,145.390067 Z\" id=\"path38\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cpath d=\"M646.318899,128.021188 L664.933395,128.021188 L664.933395,236.223966 L646.318899,236.223966 L646.318899,128.021188 L646.318899,128.021188 Z\" id=\"path40\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cpath d=\"M646.318899,251.154944 L664.933395,251.154944 L664.933395,269.766036 L646.318899,269.766036 L646.318899,251.154944 L646.318899,251.154944 Z\" id=\"path42\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cg id=\"g44\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.464170, 0.676006)\"\u003e\n+ \u003cpath d=\"M429.269989,169.815599 L405.225053,243.802859 L357.571431,390.440955 C355.120288,397.984955 344.444378,397.984955 341.992071,390.440955 L294.337286,243.802859 L136.094873,243.802859 L88.4389245,390.440955 C85.9877812,397.984955 75.3118715,397.984955 72.8595648,390.440955 L25.2059427,243.802859 L1.16216997,169.815599 C-1.03187664,163.067173 1.37156997,155.674379 7.11261982,151.503429 L215.215498,0.336141836 L423.319539,151.503429 C429.060589,155.674379 431.462873,163.067173 429.269989,169.815599\" id=\"path46\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g48\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(135.410135, 1.012147)\"\u003e\n+ \u003cpath d=\"M80.269998,0 L80.269998,0 L159.391786,243.466717 L1.14820997,243.466717 L80.269998,0 L80.269998,0 Z\" id=\"path50\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g52\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\"\u003e\n+ \u003cg id=\"path54\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g56\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(24.893471, 1.012613)\"\u003e\n+ \u003cpath d=\"M190.786662,0 L111.664874,243.465554 L0.777106647,243.465554 L190.786662,0 L190.786662,0 Z\" id=\"path58\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g60\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\"\u003e\n+ \u003cg id=\"path62\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g64\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.077245, 0.223203)\"\u003e\n+ \u003cpath d=\"M25.5933327,244.255313 L25.5933327,244.255313 L1.54839663,170.268052 C-0.644486651,163.519627 1.75779662,156.126833 7.50000981,151.957046 L215.602888,0.789758846 L25.5933327,244.255313 L25.5933327,244.255313 Z\" id=\"path66\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g68\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\"\u003e\n+ \u003cg id=\"path70\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g72\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(25.670578, 244.478283)\"\u003e\n+ \u003cpath d=\"M0,0 L110.887767,0 L63.2329818,146.638096 C60.7806751,154.183259 50.1047654,154.183259 47.6536221,146.638096 L0,0 L0,0 Z\" id=\"path74\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g76\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\"\u003e\n+ \u003cpath d=\"M0,0 L79.121788,243.465554 L190.009555,243.465554 L0,0 L0,0 Z\" id=\"path78\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g80\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(214.902910, 0.223203)\"\u003e\n+ \u003cpath d=\"M190.786662,244.255313 L190.786662,244.255313 L214.831598,170.268052 C217.024481,163.519627 214.622198,156.126833 208.879985,151.957046 L0.777106647,0.789758846 L190.786662,244.255313 L190.786662,244.255313 Z\" id=\"path82\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g84\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(294.009575, 244.478283)\"\u003e\n+ \u003cpath d=\"M111.679997,0 L0.79222998,0 L48.4470155,146.638096 C50.8993221,154.183259 61.5752318,154.183259 64.0263751,146.638096 L111.679997,0 L111.679997,0 Z\" id=\"path86\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+\u003c/svg\u003e\n\\ No newline at end of file\n",
+ "merge_request_diff_id": 10,
+ "relative_order": 6,
+ "utf8_diff": "--- /dev/null\n+++ b/files/images/wm.svg\n@@ -0,0 +1,78 @@\n+\u003c?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?\u003e\n+\u003csvg width=\"1300px\" height=\"680px\" viewBox=\"0 0 1300 680\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:sketch=\"http://www.bohemiancoding.com/sketch/ns\"\u003e\n+ \u003c!-- Generator: Sketch 3.2.2 (9983) - http://www.bohemiancoding.com/sketch --\u003e\n+ \u003ctitle\u003ewm\u003c/title\u003e\n+ \u003cdesc\u003eCreated with Sketch.\u003c/desc\u003e\n+ \u003cdefs\u003e\n+ \u003cpath id=\"path-1\" d=\"M-69.8,1023.54607 L1675.19996,1023.54607 L1675.19996,0 L-69.8,0 L-69.8,1023.54607 L-69.8,1023.54607 Z\"\u003e\u003c/path\u003e\n+ \u003c/defs\u003e\n+ \u003cg id=\"Page-1\" stroke=\"none\" stroke-width=\"1\" fill=\"none\" fill-rule=\"evenodd\" sketch:type=\"MSPage\"\u003e\n+ \u003cpath d=\"M1300,680 L0,680 L0,0 L1300,0 L1300,680 L1300,680 Z\" id=\"bg\" fill=\"#30353E\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cg id=\"gitlab_logo\" sketch:type=\"MSLayerGroup\" transform=\"translate(-262.000000, -172.000000)\"\u003e\n+ \u003cg id=\"g10\" transform=\"translate(872.500000, 512.354581) scale(1, -1) translate(-872.500000, -512.354581) translate(0.000000, 0.290751)\"\u003e\n+ \u003cg id=\"g12\" transform=\"translate(1218.022652, 440.744871)\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\n+ \u003cpath d=\"M-50.0233338,141.900706 L-69.07059,141.900706 L-69.0100967,0.155858152 L8.04444805,0.155858152 L8.04444805,17.6840847 L-49.9628405,17.6840847 L-50.0233338,141.900706 L-50.0233338,141.900706 Z\" id=\"path14\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g16\"\u003e\n+ \u003cg id=\"g18-Clipped\"\u003e\n+ \u003cmask id=\"mask-2\" sketch:name=\"path22\" fill=\"white\"\u003e\n+ \u003cuse xlink:href=\"#path-1\"\u003e\u003c/use\u003e\n+ \u003c/mask\u003e\n+ \u003cg id=\"path22\"\u003e\u003c/g\u003e\n+ \u003cg id=\"g18\" mask=\"url(#mask-2)\"\u003e\n+ \u003cg transform=\"translate(382.736659, 312.879425)\"\u003e\n+ \u003cg id=\"g24\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(852.718192, 124.992771)\"\u003e\n+ \u003cpath d=\"M63.9833317,27.9148929 C59.2218085,22.9379001 51.2134221,17.9597442 40.3909323,17.9597442 C25.8888194,17.9597442 20.0453962,25.1013043 20.0453962,34.4074318 C20.0453962,48.4730484 29.7848226,55.1819277 50.5642821,55.1819277 C54.4602853,55.1819277 60.7364685,54.7492469 63.9833317,54.1002256 L63.9833317,27.9148929 L63.9833317,27.9148929 Z M44.2869356,113.827628 C28.9053426,113.827628 14.7975996,108.376082 3.78897657,99.301416 L10.5211864,87.6422957 C18.3131929,92.1866076 27.8374026,96.7320827 41.4728323,96.7320827 C57.0568452,96.7320827 63.9833317,88.7239978 63.9833317,75.3074024 L63.9833317,68.3821827 C60.9528485,69.0312039 54.6766653,69.4650479 50.7806621,69.4650479 C17.4476729,69.4650479 0.565379986,57.7791759 0.565379986,33.3245665 C0.565379986,11.4683685 13.9844297,0.43151772 34.3299658,0.43151772 C48.0351955,0.43151772 61.1692285,6.70771614 65.7143717,16.8780421 L69.1776149,3.02876588 L82.5978279,3.02876588 L82.5978279,75.5237428 C82.5978279,98.462806 72.6408582,113.827628 44.2869356,113.827628 L44.2869356,113.827628 Z\" id=\"path26\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g28\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(959.546624, 124.857151)\"\u003e\n+ \u003cpath d=\"M37.2266657,17.4468081 C30.0837992,17.4468081 23.8064527,18.3121698 19.0449295,20.4767371 L19.0449295,79.2306079 L19.0449295,86.0464943 C25.538656,91.457331 33.5470425,95.3526217 43.7203922,95.3526217 C62.1173451,95.3526217 69.2602116,82.3687072 69.2602116,61.3767077 C69.2602116,31.5135879 57.7885819,17.4468081 37.2266657,17.4468081 M45.2315622,113.963713 C28.208506,113.963713 19.0449295,102.384849 19.0449295,102.384849 L19.0449295,120.67143 L18.9844362,144.908535 L10.3967097,144.908535 L0.371103324,144.908535 L0.431596656,6.62629771 C9.73826309,2.73100702 22.5081728,0.567602823 36.3611458,0.567602823 C71.8579349,0.567602823 88.9566078,23.2891625 88.9566078,62.4584098 C88.9566078,93.4043948 73.1527248,113.963713 45.2315622,113.963713\" id=\"path30\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g32\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(509.576747, 125.294950)\"\u003e\n+ \u003cpath d=\"M68.636665,129.10638 C85.5189579,129.10638 96.3414476,123.480366 103.484314,117.853189 L111.669527,132.029302 C100.513161,141.811145 85.5073245,147.06845 69.5021849,147.06845 C29.0274926,147.06845 0.673569983,122.3975 0.673569983,72.6252464 C0.673569983,20.4709215 31.2622559,0.12910638 66.2553217,0.12910638 C83.7879179,0.12910638 98.7227909,4.24073748 108.462217,8.35236859 L108.063194,64.0763105 L108.063194,70.6502677 L108.063194,81.6057001 L56.1168719,81.6057001 L56.1168719,64.0763105 L89.2323178,64.0763105 L89.6313411,21.7701271 C85.3025779,19.6055598 77.7269514,17.8748364 67.554765,17.8748364 C39.4172223,17.8748364 20.5863462,35.5717154 20.5863462,72.8415868 C20.5863462,110.711628 40.0663623,129.10638 68.636665,129.10638\" id=\"path34\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g36\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(692.388992, 124.376085)\"\u003e\n+ \u003cpath d=\"M19.7766662,145.390067 L1.16216997,145.390067 L1.2226633,121.585642 L1.2226633,111.846834 L1.2226633,106.170806 L1.2226633,96.2656714 L1.2226633,39.5681976 L1.2226633,39.3518572 C1.2226633,16.4127939 11.1796331,1.04797161 39.5335557,1.04797161 C43.4504989,1.04797161 47.2836822,1.40388649 51.0051854,2.07965952 L51.0051854,18.7925385 C48.3109055,18.3796307 45.4351455,18.1446804 42.3476589,18.1446804 C26.763646,18.1446804 19.8371595,26.1516022 19.8371595,39.5681976 L19.8371595,96.2656714 L51.0051854,96.2656714 L51.0051854,111.846834 L19.8371595,111.846834 L19.7766662,145.390067 L19.7766662,145.390067 Z\" id=\"path38\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cpath d=\"M646.318899,128.021188 L664.933395,128.021188 L664.933395,236.223966 L646.318899,236.223966 L646.318899,128.021188 L646.318899,128.021188 Z\" id=\"path40\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cpath d=\"M646.318899,251.154944 L664.933395,251.154944 L664.933395,269.766036 L646.318899,269.766036 L646.318899,251.154944 L646.318899,251.154944 Z\" id=\"path42\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003cg id=\"g44\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.464170, 0.676006)\"\u003e\n+ \u003cpath d=\"M429.269989,169.815599 L405.225053,243.802859 L357.571431,390.440955 C355.120288,397.984955 344.444378,397.984955 341.992071,390.440955 L294.337286,243.802859 L136.094873,243.802859 L88.4389245,390.440955 C85.9877812,397.984955 75.3118715,397.984955 72.8595648,390.440955 L25.2059427,243.802859 L1.16216997,169.815599 C-1.03187664,163.067173 1.37156997,155.674379 7.11261982,151.503429 L215.215498,0.336141836 L423.319539,151.503429 C429.060589,155.674379 431.462873,163.067173 429.269989,169.815599\" id=\"path46\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g48\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(135.410135, 1.012147)\"\u003e\n+ \u003cpath d=\"M80.269998,0 L80.269998,0 L159.391786,243.466717 L1.14820997,243.466717 L80.269998,0 L80.269998,0 Z\" id=\"path50\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g52\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\"\u003e\n+ \u003cg id=\"path54\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g56\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(24.893471, 1.012613)\"\u003e\n+ \u003cpath d=\"M190.786662,0 L111.664874,243.465554 L0.777106647,243.465554 L190.786662,0 L190.786662,0 Z\" id=\"path58\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g60\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\"\u003e\n+ \u003cg id=\"path62\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g64\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.077245, 0.223203)\"\u003e\n+ \u003cpath d=\"M25.5933327,244.255313 L25.5933327,244.255313 L1.54839663,170.268052 C-0.644486651,163.519627 1.75779662,156.126833 7.50000981,151.957046 L215.602888,0.789758846 L25.5933327,244.255313 L25.5933327,244.255313 Z\" id=\"path66\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g68\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\"\u003e\n+ \u003cg id=\"path70\"\u003e\u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g72\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(25.670578, 244.478283)\"\u003e\n+ \u003cpath d=\"M0,0 L110.887767,0 L63.2329818,146.638096 C60.7806751,154.183259 50.1047654,154.183259 47.6536221,146.638096 L0,0 L0,0 Z\" id=\"path74\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g76\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\"\u003e\n+ \u003cpath d=\"M0,0 L79.121788,243.465554 L190.009555,243.465554 L0,0 L0,0 Z\" id=\"path78\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g80\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(214.902910, 0.223203)\"\u003e\n+ \u003cpath d=\"M190.786662,244.255313 L190.786662,244.255313 L214.831598,170.268052 C217.024481,163.519627 214.622198,156.126833 208.879985,151.957046 L0.777106647,0.789758846 L190.786662,244.255313 L190.786662,244.255313 Z\" id=\"path82\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003cg id=\"g84\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(294.009575, 244.478283)\"\u003e\n+ \u003cpath d=\"M111.679997,0 L0.79222998,0 L48.4470155,146.638096 C50.8993221,154.183259 61.5752318,154.183259 64.0263751,146.638096 L111.679997,0 L111.679997,0 Z\" id=\"path86\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"\u003e\u003c/path\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+ \u003c/g\u003e\n+\u003c/svg\u003e\n\\ No newline at end of file\n",
"new_path": "files/images/wm.svg",
"old_path": "files/images/wm.svg",
"a_mode": "0",
@@ -6019,7 +6011,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/files/lfs/lfs_object.iso\n@@ -0,0 +1,4 @@\n+version https://git-lfs.github.com/spec/v1\n+oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897\n+size 1575078\n+\n",
+ "merge_request_diff_id": 10,
+ "relative_order": 7,
+ "utf8_diff": "--- /dev/null\n+++ b/files/lfs/lfs_object.iso\n@@ -0,0 +1,4 @@\n+version https://git-lfs.github.com/spec/v1\n+oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897\n+size 1575078\n+\n",
"new_path": "files/lfs/lfs_object.iso",
"old_path": "files/lfs/lfs_object.iso",
"a_mode": "0",
@@ -6030,7 +6024,9 @@
"too_large": false
},
{
- "diff": "--- a/files/ruby/popen.rb\n+++ b/files/ruby/popen.rb\n@@ -6,12 +6,18 @@ module Popen\n \n def popen(cmd, path=nil)\n unless cmd.is_a?(Array)\n- raise \"System commands must be given as an array of strings\"\n+ raise RuntimeError, \"System commands must be given as an array of strings\"\n end\n \n path ||= Dir.pwd\n- vars = { \"PWD\" =\u003e path }\n- options = { chdir: path }\n+\n+ vars = {\n+ \"PWD\" =\u003e path\n+ }\n+\n+ options = {\n+ chdir: path\n+ }\n \n unless File.directory?(path)\n FileUtils.mkdir_p(path)\n@@ -19,6 +25,7 @@ module Popen\n \n @cmd_output = \"\"\n @cmd_status = 0\n+\n Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr|\n @cmd_output \u003c\u003c stdout.read\n @cmd_output \u003c\u003c stderr.read\n",
+ "merge_request_diff_id": 10,
+ "relative_order": 8,
+ "utf8_diff": "--- a/files/ruby/popen.rb\n+++ b/files/ruby/popen.rb\n@@ -6,12 +6,18 @@ module Popen\n \n def popen(cmd, path=nil)\n unless cmd.is_a?(Array)\n- raise \"System commands must be given as an array of strings\"\n+ raise RuntimeError, \"System commands must be given as an array of strings\"\n end\n \n path ||= Dir.pwd\n- vars = { \"PWD\" =\u003e path }\n- options = { chdir: path }\n+\n+ vars = {\n+ \"PWD\" =\u003e path\n+ }\n+\n+ options = {\n+ chdir: path\n+ }\n \n unless File.directory?(path)\n FileUtils.mkdir_p(path)\n@@ -19,6 +25,7 @@ module Popen\n \n @cmd_output = \"\"\n @cmd_status = 0\n+\n Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr|\n @cmd_output \u003c\u003c stdout.read\n @cmd_output \u003c\u003c stderr.read\n",
"new_path": "files/ruby/popen.rb",
"old_path": "files/ruby/popen.rb",
"a_mode": "100644",
@@ -6041,7 +6037,9 @@
"too_large": false
},
{
- "diff": "--- a/files/ruby/regex.rb\n+++ b/files/ruby/regex.rb\n@@ -19,14 +19,12 @@ module Gitlab\n end\n \n def archive_formats_regex\n- #|zip|tar| tar.gz | tar.bz2 |\n- /(zip|tar|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n+ /(zip|tar|7z|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n end\n \n def git_reference_regex\n # Valid git ref regex, see:\n # https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html\n-\n %r{\n (?!\n (?# doesn't begins with)\n",
+ "merge_request_diff_id": 10,
+ "relative_order": 9,
+ "utf8_diff": "--- a/files/ruby/regex.rb\n+++ b/files/ruby/regex.rb\n@@ -19,14 +19,12 @@ module Gitlab\n end\n \n def archive_formats_regex\n- #|zip|tar| tar.gz | tar.bz2 |\n- /(zip|tar|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n+ /(zip|tar|7z|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n end\n \n def git_reference_regex\n # Valid git ref regex, see:\n # https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html\n-\n %r{\n (?!\n (?# doesn't begins with)\n",
"new_path": "files/ruby/regex.rb",
"old_path": "files/ruby/regex.rb",
"a_mode": "100644",
@@ -6052,7 +6050,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/files/whitespace\n@@ -0,0 +1 @@\n+test \n",
+ "merge_request_diff_id": 10,
+ "relative_order": 10,
+ "utf8_diff": "--- /dev/null\n+++ b/files/whitespace\n@@ -0,0 +1 @@\n+test \n",
"new_path": "files/whitespace",
"old_path": "files/whitespace",
"a_mode": "0",
@@ -6063,7 +6063,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/foo/bar/.gitkeep\n",
+ "merge_request_diff_id": 10,
+ "relative_order": 11,
+ "utf8_diff": "--- /dev/null\n+++ b/foo/bar/.gitkeep\n",
"new_path": "foo/bar/.gitkeep",
"old_path": "foo/bar/.gitkeep",
"a_mode": "0",
@@ -6074,7 +6076,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/gitlab-grack\n@@ -0,0 +1 @@\n+Subproject commit 645f6c4c82fd3f5e06f67134450a570b795e55a6\n",
+ "merge_request_diff_id": 10,
+ "relative_order": 12,
+ "utf8_diff": "--- /dev/null\n+++ b/gitlab-grack\n@@ -0,0 +1 @@\n+Subproject commit 645f6c4c82fd3f5e06f67134450a570b795e55a6\n",
"new_path": "gitlab-grack",
"old_path": "gitlab-grack",
"a_mode": "0",
@@ -6085,7 +6089,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/gitlab-shell\n@@ -0,0 +1 @@\n+Subproject commit 79bceae69cb5750d6567b223597999bfa91cb3b9\n",
+ "merge_request_diff_id": 10,
+ "relative_order": 13,
+ "utf8_diff": "--- /dev/null\n+++ b/gitlab-shell\n@@ -0,0 +1 @@\n+Subproject commit 79bceae69cb5750d6567b223597999bfa91cb3b9\n",
"new_path": "gitlab-shell",
"old_path": "gitlab-shell",
"a_mode": "0",
@@ -6096,7 +6102,9 @@
"too_large": false
},
{
- "diff": "--- /dev/null\n+++ b/test\n",
+ "merge_request_diff_id": 10,
+ "relative_order": 14,
+ "utf8_diff": "--- /dev/null\n+++ b/test\n",
"new_path": "test",
"old_path": "test",
"a_mode": "0",
@@ -6358,13 +6366,12 @@
"merge_request_diff": {
"id": 9,
"state": "collected",
- "st_commits": [
+ "merge_request_diff_commits": [
{
- "id": "a4e5dfebf42e34596526acb8611bc7ed80e4eb3f",
+ "merge_request_diff_id": 9,
+ "relative_order": 0,
+ "sha": "a4e5dfebf42e34596526acb8611bc7ed80e4eb3f",
"message": "fixes #10\n",
- "parent_ids": [
- "be93687618e4b132087f430a4d8fc3a609c9b77c"
- ],
"authored_date": "2016-01-19T15:44:02.000+01:00",
"author_name": "James Lopez",
"author_email": "james@jameslopez.es",
@@ -6373,9 +6380,11 @@
"committer_email": "james@jameslopez.es"
}
],
- "utf8_st_diffs": [
+ "merge_request_diff_files": [
{
- "diff": "--- /dev/null\n+++ b/test\n",
+ "merge_request_diff_id": 9,
+ "relative_order": 0,
+ "utf8_diff": "--- /dev/null\n+++ b/test\n",
"new_path": "test",
"old_path": "test",
"a_mode": "0",
@@ -7402,5 +7411,23 @@
"snippets_access_level": 20,
"updated_at": "2016-09-23T11:58:28.000Z",
"wiki_access_level": 20
- }
+ },
+ "custom_attributes": [
+ {
+ "id": 1,
+ "created_at": "2017-10-19T15:36:23.466Z",
+ "updated_at": "2017-10-19T15:36:23.466Z",
+ "project_id": 5,
+ "key": "foo",
+ "value": "foo"
+ },
+ {
+ "id": 2,
+ "created_at": "2017-10-19T15:37:21.904Z",
+ "updated_at": "2017-10-19T15:37:21.904Z",
+ "project_id": 5,
+ "key": "bar",
+ "value": "bar"
+ }
+ ]
}
diff --git a/spec/lib/gitlab/import_export/project.light.json b/spec/lib/gitlab/import_export/project.light.json
index 2d8f3d4a566..02450478a77 100644
--- a/spec/lib/gitlab/import_export/project.light.json
+++ b/spec/lib/gitlab/import_export/project.light.json
@@ -5,9 +5,9 @@
"milestones": [
{
"id": 1,
- "title": "test milestone",
+ "title": "Project milestone",
"project_id": 8,
- "description": "test milestone",
+ "description": "Project-level milestone",
"due_date": null,
"created_at": "2016-06-14T15:02:04.415Z",
"updated_at": "2016-06-14T15:02:04.415Z",
@@ -19,7 +19,7 @@
"labels": [
{
"id": 2,
- "title": "test2",
+ "title": "A project label",
"color": "#428bca",
"project_id": 8,
"created_at": "2016-07-22T08:55:44.161Z",
@@ -63,30 +63,21 @@
"last_edited_at": null,
"last_edited_by_id": null,
"group_milestone_id": null,
+ "milestone": {
+ "id": 1,
+ "title": "Project milestone",
+ "project_id": 8,
+ "description": "Project-level milestone",
+ "due_date": null,
+ "created_at": "2016-06-14T15:02:04.415Z",
+ "updated_at": "2016-06-14T15:02:04.415Z",
+ "state": "active",
+ "iid": 1,
+ "group_id": null
+ },
"label_links": [
{
"id": 11,
- "label_id": 6,
- "target_id": 1,
- "target_type": "Issue",
- "created_at": "2017-08-15T18:37:40.795Z",
- "updated_at": "2017-08-15T18:37:40.795Z",
- "label": {
- "id": 6,
- "title": "group label",
- "color": "#A8D695",
- "project_id": null,
- "created_at": "2017-08-15T18:37:19.698Z",
- "updated_at": "2017-08-15T18:37:19.698Z",
- "template": false,
- "description": "",
- "group_id": 5,
- "type": "GroupLabel",
- "priorities": []
- }
- },
- {
- "id": 11,
"label_id": 2,
"target_id": 1,
"target_type": "Issue",
@@ -94,14 +85,14 @@
"updated_at": "2017-08-15T18:37:40.795Z",
"label": {
"id": 6,
- "title": "project label",
+ "title": "Another project label",
"color": "#A8D695",
"project_id": null,
"created_at": "2017-08-15T18:37:19.698Z",
"updated_at": "2017-08-15T18:37:19.698Z",
"template": false,
"description": "",
- "group_id": 5,
+ "group_id": null,
"type": "ProjectLabel",
"priorities": []
}
@@ -109,10 +100,6 @@
]
}
],
- "snippets": [
-
- ],
- "hooks": [
-
- ]
+ "snippets": [],
+ "hooks": []
}
diff --git a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb
index efe11ca794a..0ab3afd0074 100644
--- a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb
@@ -24,7 +24,7 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
context 'JSON' do
it 'restores models based on JSON' do
- expect(@restored_project_json).to be true
+ expect(@restored_project_json).to be_truthy
end
it 'restore correct project features' do
@@ -63,6 +63,10 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
expect(issue.reload.updated_at.to_s).to eq('2016-06-14 15:02:47 UTC')
end
+ it 'has issue assignees' do
+ expect(Issue.where(title: 'Voluptatem').first.issue_assignees).not_to be_empty
+ end
+
it 'contains the merge access levels on a protected branch' do
expect(ProtectedBranch.first.merge_access_levels).not_to be_empty
end
@@ -91,26 +95,18 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
end
end
- it 'has the correct data for merge request st_diffs' do
- # makes sure we are renaming the custom method +utf8_st_diffs+ into +st_diffs+
- # one MergeRequestDiff uses the new format, where st_diffs is expected to be nil
-
- expect(MergeRequestDiff.where.not(st_diffs: nil).count).to eq(8)
- end
-
it 'has the correct data for merge request diff files' do
- expect(MergeRequestDiffFile.where.not(diff: nil).count).to eq(9)
+ expect(MergeRequestDiffFile.where.not(diff: nil).count).to eq(55)
end
- it 'has the correct data for merge request diff commits in serialised and table formats' do
- expect(MergeRequestDiff.where.not(st_commits: nil).count).to eq(7)
- expect(MergeRequestDiffCommit.count).to eq(6)
+ it 'has the correct data for merge request diff commits' do
+ expect(MergeRequestDiffCommit.count).to eq(77)
end
- it 'has the correct time for merge request st_commits' do
- st_commits = MergeRequestDiff.where.not(st_commits: nil).first.st_commits
-
- expect(st_commits.first[:committed_date]).to be_kind_of(Time)
+ it 'has the correct data for merge request latest_merge_request_diff' do
+ MergeRequest.find_each do |merge_request|
+ expect(merge_request.latest_merge_request_diff_id).to eq(merge_request.merge_request_diffs.maximum(:id))
+ end
end
it 'has labels associated to label links, associated to issues' do
@@ -129,6 +125,10 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
expect(@project.project_feature).not_to be_nil
end
+ it 'has custom attributes' do
+ expect(@project.custom_attributes.count).to eq(2)
+ end
+
it 'restores the correct service' do
expect(CustomIssueTrackerService.first).not_to be_nil
end
@@ -147,7 +147,7 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
end
it 'has no source if source/target differ' do
- expect(MergeRequest.find_by_title('MR2').source_project_id).to eq(-1)
+ expect(MergeRequest.find_by_title('MR2').source_project_id).to be_nil
end
end
@@ -182,6 +182,53 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
end
end
+ shared_examples 'restores project successfully' do
+ it 'correctly restores project' do
+ expect(shared.errors).to be_empty
+ expect(restored_project_json).to be_truthy
+ end
+ end
+
+ shared_examples 'restores project correctly' do |**results|
+ it 'has labels' do
+ expect(project.labels.size).to eq(results.fetch(:labels, 0))
+ end
+
+ it 'has label priorities' do
+ expect(project.labels.first.priorities).not_to be_empty
+ end
+
+ it 'has milestones' do
+ expect(project.milestones.size).to eq(results.fetch(:milestones, 0))
+ end
+
+ it 'has issues' do
+ expect(project.issues.size).to eq(results.fetch(:issues, 0))
+ end
+
+ it 'has issue with group label and project label' do
+ labels = project.issues.first.labels
+
+ expect(labels.where(type: "ProjectLabel").count).to eq(results.fetch(:first_issue_labels, 0))
+ end
+ end
+
+ shared_examples 'restores group correctly' do |**results|
+ it 'has group label' do
+ expect(project.group.labels.size).to eq(results.fetch(:labels, 0))
+ end
+
+ it 'has group milestone' do
+ expect(project.group.milestones.size).to eq(results.fetch(:milestones, 0))
+ end
+
+ it 'has issue with group label' do
+ labels = project.issues.first.labels
+
+ expect(labels.where(type: "GroupLabel").count).to eq(results.fetch(:first_issue_labels, 0))
+ end
+ end
+
context 'Light JSON' do
let(:user) { create(:user) }
let(:shared) { Gitlab::ImportExport::Shared.new(relative_path: "", project_path: 'path') }
@@ -190,33 +237,45 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
let(:restored_project_json) { project_tree_restorer.restore }
before do
- project_tree_restorer.instance_variable_set(:@path, "spec/lib/gitlab/import_export/project.light.json")
-
allow(shared).to receive(:export_path).and_return('spec/lib/gitlab/import_export/')
end
- context 'project.json file access check' do
- it 'does not read a symlink' do
- Dir.mktmpdir do |tmpdir|
- setup_symlink(tmpdir, 'project.json')
- allow(shared).to receive(:export_path).and_call_original
+ context 'with a simple project' do
+ before do
+ project_tree_restorer.instance_variable_set(:@path, "spec/lib/gitlab/import_export/project.light.json")
+
+ restored_project_json
+ end
+
+ it_behaves_like 'restores project correctly',
+ issues: 1,
+ labels: 1,
+ milestones: 1,
+ first_issue_labels: 1
- restored_project_json
+ context 'project.json file access check' do
+ it 'does not read a symlink' do
+ Dir.mktmpdir do |tmpdir|
+ setup_symlink(tmpdir, 'project.json')
+ allow(shared).to receive(:export_path).and_call_original
- expect(shared.errors.first).to be_nil
+ restored_project_json
+
+ expect(shared.errors).to be_empty
+ end
end
end
- end
- context 'when there is an existing build with build token' do
- it 'restores project json correctly' do
- create(:ci_build, token: 'abcd')
+ context 'when there is an existing build with build token' do
+ before do
+ create(:ci_build, token: 'abcd')
+ end
- expect(restored_project_json).to be true
+ it_behaves_like 'restores project successfully'
end
end
- context 'with group' do
+ context 'with a project that has a group' do
let!(:project) do
create(:project,
:builds_disabled,
@@ -227,43 +286,22 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
end
before do
- project_tree_restorer.instance_variable_set(:@path, "spec/lib/gitlab/import_export/project.light.json")
+ project_tree_restorer.instance_variable_set(:@path, "spec/lib/gitlab/import_export/project.group.json")
restored_project_json
end
- it 'correctly restores project' do
- expect(restored_project_json).to be_truthy
- expect(shared.errors).to be_empty
- end
-
- it 'has labels' do
- expect(project.labels.count).to eq(2)
- end
-
- it 'creates group label' do
- expect(project.group.labels.count).to eq(1)
- end
-
- it 'has label priorities' do
- expect(project.labels.first.priorities).not_to be_empty
- end
-
- it 'has milestones' do
- expect(project.milestones.count).to eq(1)
- end
+ it_behaves_like 'restores project successfully'
+ it_behaves_like 'restores project correctly',
+ issues: 2,
+ labels: 1,
+ milestones: 1,
+ first_issue_labels: 1
- it 'has issue' do
- expect(project.issues.count).to eq(1)
- expect(project.issues.first.labels.count).to eq(2)
- end
-
- it 'has issue with group label and project label' do
- labels = project.issues.first.labels
-
- expect(labels.where(type: "GroupLabel").count).to eq(1)
- expect(labels.where(type: "ProjectLabel").count).to eq(1)
- end
+ it_behaves_like 'restores group correctly',
+ labels: 1,
+ milestones: 1,
+ first_issue_labels: 1
end
end
end
diff --git a/spec/lib/gitlab/import_export/project_tree_saver_spec.rb b/spec/lib/gitlab/import_export/project_tree_saver_spec.rb
index d9b86e1bf34..6243b6ac9f0 100644
--- a/spec/lib/gitlab/import_export/project_tree_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/project_tree_saver_spec.rb
@@ -77,6 +77,10 @@ describe Gitlab::ImportExport::ProjectTreeSaver do
expect(saved_project_json['issues'].first['notes']).not_to be_empty
end
+ it 'has issue assignees' do
+ expect(saved_project_json['issues'].first['issue_assignees']).not_to be_empty
+ end
+
it 'has author on issue comments' do
expect(saved_project_json['issues'].first['notes'].first['author']).not_to be_empty
end
@@ -89,10 +93,6 @@ describe Gitlab::ImportExport::ProjectTreeSaver do
expect(saved_project_json['merge_requests'].first['merge_request_diff']).not_to be_empty
end
- it 'has merge requests diff st_diffs' do
- expect(saved_project_json['merge_requests'].first['merge_request_diff']['utf8_st_diffs']).not_to be_nil
- end
-
it 'has merge request diff files' do
expect(saved_project_json['merge_requests'].first['merge_request_diff']['merge_request_diff_files']).not_to be_empty
end
@@ -164,10 +164,8 @@ describe Gitlab::ImportExport::ProjectTreeSaver do
expect(project_feature["builds_access_level"]).to eq(ProjectFeature::PRIVATE)
end
- it 'does not complain about non UTF-8 characters in MR diffs' do
- ActiveRecord::Base.connection.execute("UPDATE merge_request_diffs SET st_diffs = '---\n- :diff: !binary |-\n LS0tIC9kZXYvbnVsbAorKysgYi9pbWFnZXMvbnVjb3IucGRmCkBAIC0wLDAg\n KzEsMTY3OSBAQAorJVBERi0xLjUNJeLjz9MNCisxIDAgb2JqDTw8L01ldGFk\n YXR'")
-
- expect(project_tree_saver.save).to be true
+ it 'has custom attributes' do
+ expect(saved_project_json['custom_attributes'].count).to eq(2)
end
it 'does not complain about non UTF-8 characters in MR diff files' do
@@ -275,6 +273,9 @@ describe Gitlab::ImportExport::ProjectTreeSaver do
create(:event, :created, target: milestone, project: project, author: user)
create(:service, project: project, type: 'CustomIssueTrackerService', category: 'issue_tracker')
+ create(:project_custom_attribute, project: project)
+ create(:project_custom_attribute, project: project)
+
project
end
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index 80d92b2e6a3..ec8fa99e0da 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -173,7 +173,6 @@ MergeRequest:
MergeRequestDiff:
- id
- state
-- st_commits
- merge_request_id
- created_at
- updated_at
@@ -313,30 +312,47 @@ Ci::PipelineSchedule:
- deleted_at
- created_at
- updated_at
-Gcp::Cluster:
+Clusters::Cluster:
- id
-- project_id
- user_id
-- service_id
- enabled
+- name
+- provider_type
+- platform_type
+- created_at
+- updated_at
+Clusters::Project:
+- id
+- project_id
+- cluster_id
+- created_at
+- updated_at
+Clusters::Providers::Gcp:
+- id
+- cluster_id
- status
- status_reason
-- project_namespace
+- gcp_project_id
+- zone
+- num_nodes
+- machine_type
+- operation_id
- endpoint
+- encrypted_access_token
+- encrypted_access_token_iv
+- created_at
+- updated_at
+Clusters::Platforms::Kubernetes:
+- id
+- cluster_id
+- api_url
- ca_cert
-- encrypted_kubernetes_token
-- encrypted_kubernetes_token_iv
+- namespace
- username
- encrypted_password
- encrypted_password_iv
-- gcp_project_id
-- gcp_cluster_zone
-- gcp_cluster_name
-- gcp_cluster_size
-- gcp_machine_type
-- gcp_operation_id
-- encrypted_gcp_token
-- encrypted_gcp_token_iv
+- encrypted_token
+- encrypted_token_iv
- created_at
- updated_at
DeployKey:
@@ -496,6 +512,7 @@ Timelog:
- merge_request_id
- issue_id
- user_id
+- spent_at
- created_at
- updated_at
ProjectAutoDevops:
@@ -505,3 +522,13 @@ ProjectAutoDevops:
- project_id
- created_at
- updated_at
+IssueAssignee:
+- user_id
+- issue_id
+ProjectCustomAttribute:
+- id
+- created_at
+- updated_at
+- project_id
+- key
+- value
diff --git a/spec/lib/gitlab/import_export/uploads_restorer_spec.rb b/spec/lib/gitlab/import_export/uploads_restorer_spec.rb
new file mode 100644
index 00000000000..63992ea8ab8
--- /dev/null
+++ b/spec/lib/gitlab/import_export/uploads_restorer_spec.rb
@@ -0,0 +1,55 @@
+require 'spec_helper'
+
+describe Gitlab::ImportExport::UploadsRestorer do
+ describe 'bundle a project Git repo' do
+ let(:export_path) { "#{Dir.tmpdir}/uploads_saver_spec" }
+ let(:shared) { Gitlab::ImportExport::Shared.new(relative_path: project.full_path) }
+ let(:uploads_path) { FileUploader.dynamic_path_segment(project) }
+
+ before do
+ allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
+ FileUtils.mkdir_p(File.join(shared.export_path, 'uploads/random'))
+ FileUtils.touch(File.join(shared.export_path, 'uploads/random', "dummy.txt"))
+ end
+
+ after do
+ FileUtils.rm_rf(export_path)
+ end
+
+ describe 'legacy storage' do
+ let(:project) { create(:project) }
+
+ subject(:restorer) { described_class.new(project: project, shared: shared) }
+
+ it 'saves the uploads successfully' do
+ expect(restorer.restore).to be true
+ end
+
+ it 'copies the uploads to the project path' do
+ restorer.restore
+
+ uploads = Dir.glob(File.join(uploads_path, '**/*')).map { |file| File.basename(file) }
+
+ expect(uploads).to include('dummy.txt')
+ end
+ end
+
+ describe 'hashed storage' do
+ let(:project) { create(:project, :hashed) }
+
+ subject(:restorer) { described_class.new(project: project, shared: shared) }
+
+ it 'saves the uploads successfully' do
+ expect(restorer.restore).to be true
+ end
+
+ it 'copies the uploads to the project path' do
+ restorer.restore
+
+ uploads = Dir.glob(File.join(uploads_path, '**/*')).map { |file| File.basename(file) }
+
+ expect(uploads).to include('dummy.txt')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/uploads_saver_spec.rb b/spec/lib/gitlab/import_export/uploads_saver_spec.rb
new file mode 100644
index 00000000000..e8948de1f3a
--- /dev/null
+++ b/spec/lib/gitlab/import_export/uploads_saver_spec.rb
@@ -0,0 +1,61 @@
+require 'spec_helper'
+
+describe Gitlab::ImportExport::UploadsSaver do
+ describe 'bundle a project Git repo' do
+ let(:export_path) { "#{Dir.tmpdir}/uploads_saver_spec" }
+ let(:file) { fixture_file_upload(Rails.root + 'spec/fixtures/banana_sample.gif', 'image/gif') }
+ let(:shared) { Gitlab::ImportExport::Shared.new(relative_path: project.full_path) }
+
+ before do
+ allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
+ end
+
+ after do
+ FileUtils.rm_rf(export_path)
+ end
+
+ describe 'legacy storage' do
+ let(:project) { create(:project) }
+
+ subject(:saver) { described_class.new(shared: shared, project: project) }
+
+ before do
+ UploadService.new(project, file, FileUploader).execute
+ end
+
+ it 'saves the uploads successfully' do
+ expect(saver.save).to be true
+ end
+
+ it 'copies the uploads to the export path' do
+ saver.save
+
+ uploads = Dir.glob(File.join(shared.export_path, 'uploads', '**/*')).map { |file| File.basename(file) }
+
+ expect(uploads).to include('banana_sample.gif')
+ end
+ end
+
+ describe 'hashed storage' do
+ let(:project) { create(:project, :hashed) }
+
+ subject(:saver) { described_class.new(shared: shared, project: project) }
+
+ before do
+ UploadService.new(project, file, FileUploader).execute
+ end
+
+ it 'saves the uploads successfully' do
+ expect(saver.save).to be true
+ end
+
+ it 'copies the uploads to the export path' do
+ saver.save
+
+ uploads = Dir.glob(File.join(shared.export_path, 'uploads', '**/*')).map { |file| File.basename(file) }
+
+ expect(uploads).to include('banana_sample.gif')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_sources_spec.rb b/spec/lib/gitlab/import_sources_spec.rb
index c5725f47453..f2fa315e3ec 100644
--- a/spec/lib/gitlab/import_sources_spec.rb
+++ b/spec/lib/gitlab/import_sources_spec.rb
@@ -56,14 +56,14 @@ describe Gitlab::ImportSources do
describe '.importer' do
import_sources = {
- 'github' => Github::Import,
+ 'github' => Gitlab::GithubImport::ParallelImporter,
'bitbucket' => Gitlab::BitbucketImport::Importer,
'gitlab' => Gitlab::GitlabImport::Importer,
'google_code' => Gitlab::GoogleCodeImport::Importer,
'fogbugz' => Gitlab::FogbugzImport::Importer,
'git' => nil,
'gitlab_project' => Gitlab::ImportExport::Importer,
- 'gitea' => Gitlab::GithubImport::Importer
+ 'gitea' => Gitlab::LegacyGithubImport::Importer
}
import_sources.each do |name, klass|
diff --git a/spec/lib/gitlab/issuable_metadata_spec.rb b/spec/lib/gitlab/issuable_metadata_spec.rb
index 2455969a183..42635a68ee1 100644
--- a/spec/lib/gitlab/issuable_metadata_spec.rb
+++ b/spec/lib/gitlab/issuable_metadata_spec.rb
@@ -1,8 +1,8 @@
require 'spec_helper'
describe Gitlab::IssuableMetadata do
- let(:user) { create(:user) }
- let!(:project) { create(:project, :public, :repository, creator: user, namespace: user.namespace) }
+ let(:user) { create(:user) }
+ let!(:project) { create(:project, :public, :repository, creator: user, namespace: user.namespace) }
subject { Class.new { include Gitlab::IssuableMetadata }.new }
@@ -10,6 +10,10 @@ describe Gitlab::IssuableMetadata do
expect(subject.issuable_meta_data(Issue.none, 'Issue')).to eq({})
end
+ it 'raises an error when given a collection with no limit' do
+ expect { subject.issuable_meta_data(Issue.all, 'Issue') }.to raise_error(/must have a limit/)
+ end
+
context 'issues' do
let!(:issue) { create(:issue, author: user, project: project) }
let!(:closed_issue) { create(:issue, state: :closed, author: user, project: project) }
@@ -19,7 +23,7 @@ describe Gitlab::IssuableMetadata do
let!(:closing_issues) { create(:merge_requests_closing_issues, issue: issue, merge_request: merge_request) }
it 'aggregates stats on issues' do
- data = subject.issuable_meta_data(Issue.all, 'Issue')
+ data = subject.issuable_meta_data(Issue.all.limit(10), 'Issue')
expect(data.count).to eq(2)
expect(data[issue.id].upvotes).to eq(1)
@@ -42,7 +46,7 @@ describe Gitlab::IssuableMetadata do
let!(:note) { create(:note_on_merge_request, author: user, project: project, noteable: merge_request, note: "a comment on a MR") }
it 'aggregates stats on merge requests' do
- data = subject.issuable_meta_data(MergeRequest.all, 'MergeRequest')
+ data = subject.issuable_meta_data(MergeRequest.all.limit(10), 'MergeRequest')
expect(data.count).to eq(2)
expect(data[merge_request.id].upvotes).to eq(1)
diff --git a/spec/lib/gitlab/kubernetes/helm_spec.rb b/spec/lib/gitlab/kubernetes/helm_spec.rb
new file mode 100644
index 00000000000..15f99b0401f
--- /dev/null
+++ b/spec/lib/gitlab/kubernetes/helm_spec.rb
@@ -0,0 +1,100 @@
+require 'spec_helper'
+
+describe Gitlab::Kubernetes::Helm do
+ let(:client) { double('kubernetes client') }
+ let(:helm) { described_class.new(client) }
+ let(:namespace) { Gitlab::Kubernetes::Namespace.new(described_class::NAMESPACE, client) }
+ let(:install_helm) { true }
+ let(:chart) { 'stable/a_chart' }
+ let(:application_name) { 'app_name' }
+ let(:command) { Gitlab::Kubernetes::Helm::InstallCommand.new(application_name, install_helm, chart) }
+ subject { helm }
+
+ before do
+ allow(Gitlab::Kubernetes::Namespace).to receive(:new).with(described_class::NAMESPACE, client).and_return(namespace)
+ end
+
+ describe '#initialize' do
+ it 'creates a namespace object' do
+ expect(Gitlab::Kubernetes::Namespace).to receive(:new).with(described_class::NAMESPACE, client)
+
+ subject
+ end
+ end
+
+ describe '#install' do
+ before do
+ allow(client).to receive(:create_pod).and_return(nil)
+ allow(namespace).to receive(:ensure_exists!).once
+ end
+
+ it 'ensures the namespace exists before creating the POD' do
+ expect(namespace).to receive(:ensure_exists!).once.ordered
+ expect(client).to receive(:create_pod).once.ordered
+
+ subject.install(command)
+ end
+ end
+
+ describe '#installation_status' do
+ let(:phase) { Gitlab::Kubernetes::Pod::RUNNING }
+ let(:pod) { Kubeclient::Resource.new(status: { phase: phase }) } # partial representation
+
+ it 'fetches POD phase from kubernetes cluster' do
+ expect(client).to receive(:get_pod).with(command.pod_name, described_class::NAMESPACE).once.and_return(pod)
+
+ expect(subject.installation_status(command.pod_name)).to eq(phase)
+ end
+ end
+
+ describe '#installation_log' do
+ let(:log) { 'some output' }
+ let(:response) { RestClient::Response.new(log) }
+
+ it 'fetches POD phase from kubernetes cluster' do
+ expect(client).to receive(:get_pod_log).with(command.pod_name, described_class::NAMESPACE).once.and_return(response)
+
+ expect(subject.installation_log(command.pod_name)).to eq(log)
+ end
+ end
+
+ describe '#delete_installation_pod!' do
+ it 'deletes the POD from kubernetes cluster' do
+ expect(client).to receive(:delete_pod).with(command.pod_name, described_class::NAMESPACE).once
+
+ subject.delete_installation_pod!(command.pod_name)
+ end
+ end
+
+ describe '#helm_init_command' do
+ subject { helm.send(:helm_init_command, command) }
+
+ context 'when command.install_helm is true' do
+ let(:install_helm) { true }
+
+ it { is_expected.to eq('helm init >/dev/null') }
+ end
+
+ context 'when command.install_helm is false' do
+ let(:install_helm) { false }
+
+ it { is_expected.to eq('helm init --client-only >/dev/null') }
+ end
+ end
+
+ describe '#helm_install_command' do
+ subject { helm.send(:helm_install_command, command) }
+
+ context 'when command.chart is nil' do
+ let(:chart) { nil }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when command.chart is set' do
+ let(:chart) { 'stable/a_chart' }
+
+ it { is_expected.to eq("helm install #{chart} --name #{application_name} --namespace #{namespace.name} >/dev/null")}
+ end
+ end
+end
diff --git a/spec/lib/gitlab/kubernetes/namespace_spec.rb b/spec/lib/gitlab/kubernetes/namespace_spec.rb
new file mode 100644
index 00000000000..b3c987f9344
--- /dev/null
+++ b/spec/lib/gitlab/kubernetes/namespace_spec.rb
@@ -0,0 +1,66 @@
+require 'spec_helper'
+
+describe Gitlab::Kubernetes::Namespace do
+ let(:name) { 'a_namespace' }
+ let(:client) { double('kubernetes client') }
+ subject { described_class.new(name, client) }
+
+ it { expect(subject.name).to eq(name) }
+
+ describe '#exists?' do
+ context 'when namespace do not exits' do
+ let(:exception) { ::KubeException.new(404, "namespace #{name} not found", nil) }
+
+ it 'returns false' do
+ expect(client).to receive(:get_namespace).with(name).once.and_raise(exception)
+
+ expect(subject.exists?).to be_falsey
+ end
+ end
+
+ context 'when namespace exits' do
+ let(:namespace) { ::Kubeclient::Resource.new(kind: 'Namespace', metadata: { name: name }) } # partial representation
+
+ it 'returns true' do
+ expect(client).to receive(:get_namespace).with(name).once.and_return(namespace)
+
+ expect(subject.exists?).to be_truthy
+ end
+ end
+
+ context 'when cluster cannot be reached' do
+ let(:exception) { Errno::ECONNREFUSED.new }
+
+ it 'raises exception' do
+ expect(client).to receive(:get_namespace).with(name).once.and_raise(exception)
+
+ expect { subject.exists? }.to raise_error(exception)
+ end
+ end
+ end
+
+ describe '#create!' do
+ it 'creates a namespace' do
+ matcher = have_attributes(metadata: have_attributes(name: name))
+ expect(client).to receive(:create_namespace).with(matcher).once
+
+ expect { subject.create! }.not_to raise_error
+ end
+ end
+
+ describe '#ensure_exists!' do
+ it 'checks for existing namespace before creating' do
+ expect(subject).to receive(:exists?).once.ordered.and_return(false)
+ expect(subject).to receive(:create!).once.ordered
+
+ subject.ensure_exists!
+ end
+
+ it 'do not re-create an existing namespace' do
+ expect(subject).to receive(:exists?).once.and_return(true)
+ expect(subject).not_to receive(:create!)
+
+ subject.ensure_exists!
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ldap/authentication_spec.rb b/spec/lib/gitlab/ldap/authentication_spec.rb
index 01b6282af0c..9d57a46c12b 100644
--- a/spec/lib/gitlab/ldap/authentication_spec.rb
+++ b/spec/lib/gitlab/ldap/authentication_spec.rb
@@ -1,8 +1,8 @@
require 'spec_helper'
describe Gitlab::LDAP::Authentication do
- let(:user) { create(:omniauth_user, extern_uid: dn) }
- let(:dn) { 'uid=john,ou=people,dc=example,dc=com' }
+ let(:dn) { 'uid=John Smith, ou=People, dc=example, dc=com' }
+ let(:user) { create(:omniauth_user, extern_uid: Gitlab::LDAP::Person.normalize_dn(dn)) }
let(:login) { 'john' }
let(:password) { 'password' }
diff --git a/spec/lib/gitlab/ldap/user_spec.rb b/spec/lib/gitlab/ldap/user_spec.rb
index 9a4705d1cee..260df6e4dae 100644
--- a/spec/lib/gitlab/ldap/user_spec.rb
+++ b/spec/lib/gitlab/ldap/user_spec.rb
@@ -44,23 +44,25 @@ describe Gitlab::LDAP::User do
end
describe '.find_by_uid_and_provider' do
+ let(:dn) { 'CN=John Åström, CN=Users, DC=Example, DC=com' }
+
it 'retrieves the correct user' do
special_info = {
name: 'John Åström',
email: 'john@example.com',
nickname: 'jastrom'
}
- special_hash = OmniAuth::AuthHash.new(uid: 'CN=John Åström,CN=Users,DC=Example,DC=com', provider: 'ldapmain', info: special_info)
+ special_hash = OmniAuth::AuthHash.new(uid: dn, provider: 'ldapmain', info: special_info)
special_chars_user = described_class.new(special_hash)
user = special_chars_user.save
- expect(described_class.find_by_uid_and_provider(special_hash.uid, special_hash.provider)).to eq user
+ expect(described_class.find_by_uid_and_provider(dn, 'ldapmain')).to eq user
end
end
describe 'find or create' do
it "finds the user if already existing" do
- create(:omniauth_user, extern_uid: 'uid=John Smith,ou=People,dc=example,dc=com', provider: 'ldapmain')
+ create(:omniauth_user, extern_uid: 'uid=john smith,ou=people,dc=example,dc=com', provider: 'ldapmain')
expect { ldap_user.save }.not_to change { User.count }
end
diff --git a/spec/lib/gitlab/github_import/branch_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/branch_formatter_spec.rb
index 426b43f8b51..48655851140 100644
--- a/spec/lib/gitlab/github_import/branch_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/branch_formatter_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Gitlab::GithubImport::BranchFormatter do
+describe Gitlab::LegacyGithubImport::BranchFormatter do
let(:project) { create(:project, :repository) }
let(:commit) { create(:commit, project: project) }
let(:repo) { double }
diff --git a/spec/lib/gitlab/legacy_github_import/client_spec.rb b/spec/lib/gitlab/legacy_github_import/client_spec.rb
new file mode 100644
index 00000000000..80b767abce0
--- /dev/null
+++ b/spec/lib/gitlab/legacy_github_import/client_spec.rb
@@ -0,0 +1,97 @@
+require 'spec_helper'
+
+describe Gitlab::LegacyGithubImport::Client do
+ let(:token) { '123456' }
+ let(:github_provider) { Settingslogic.new('app_id' => 'asd123', 'app_secret' => 'asd123', 'name' => 'github', 'args' => { 'client_options' => {} }) }
+
+ subject(:client) { described_class.new(token) }
+
+ before do
+ allow(Gitlab.config.omniauth).to receive(:providers).and_return([github_provider])
+ end
+
+ it 'convert OAuth2 client options to symbols' do
+ client.client.options.keys.each do |key|
+ expect(key).to be_kind_of(Symbol)
+ end
+ end
+
+ it 'does not crash (e.g. Settingslogic::MissingSetting) when verify_ssl config is not present' do
+ expect { client.api }.not_to raise_error
+ end
+
+ context 'when config is missing' do
+ before do
+ allow(Gitlab.config.omniauth).to receive(:providers).and_return([])
+ end
+
+ it 'is still possible to get an Octokit client' do
+ expect { client.api }.not_to raise_error
+ end
+
+ it 'is not be possible to get an OAuth2 client' do
+ expect { client.client }.to raise_error(Projects::ImportService::Error)
+ end
+ end
+
+ context 'allow SSL verification to be configurable on API' do
+ before do
+ github_provider['verify_ssl'] = false
+ end
+
+ it 'uses supplied value' do
+ expect(client.client.options[:connection_opts][:ssl]).to eq({ verify: false })
+ expect(client.api.connection_options[:ssl]).to eq({ verify: false })
+ end
+ end
+
+ describe '#api_endpoint' do
+ context 'when provider does not specity an API endpoint' do
+ it 'uses GitHub root API endpoint' do
+ expect(client.api.api_endpoint).to eq 'https://api.github.com/'
+ end
+ end
+
+ context 'when provider specify a custom API endpoint' do
+ before do
+ github_provider['args']['client_options']['site'] = 'https://github.company.com/'
+ end
+
+ it 'uses the custom API endpoint' do
+ expect(OmniAuth::Strategies::GitHub).not_to receive(:default_options)
+ expect(client.api.api_endpoint).to eq 'https://github.company.com/'
+ end
+ end
+
+ context 'when given a host' do
+ subject(:client) { described_class.new(token, host: 'https://try.gitea.io/') }
+
+ it 'builds a endpoint with the given host and the default API version' do
+ expect(client.api.api_endpoint).to eq 'https://try.gitea.io/api/v3/'
+ end
+ end
+
+ context 'when given an API version' do
+ subject(:client) { described_class.new(token, api_version: 'v3') }
+
+ it 'does not use the API version without a host' do
+ expect(client.api.api_endpoint).to eq 'https://api.github.com/'
+ end
+ end
+
+ context 'when given a host and version' do
+ subject(:client) { described_class.new(token, host: 'https://try.gitea.io/', api_version: 'v3') }
+
+ it 'builds a endpoint with the given options' do
+ expect(client.api.api_endpoint).to eq 'https://try.gitea.io/api/v3/'
+ end
+ end
+ end
+
+ it 'does not raise error when rate limit is disabled' do
+ stub_request(:get, /api.github.com/)
+ allow(client.api).to receive(:rate_limit!).and_raise(Octokit::NotFound)
+
+ expect { client.issues {} }.not_to raise_error
+ end
+end
diff --git a/spec/lib/gitlab/github_import/comment_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/comment_formatter_spec.rb
index 035ac8c7c1f..413654e108c 100644
--- a/spec/lib/gitlab/github_import/comment_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/comment_formatter_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Gitlab::GithubImport::CommentFormatter do
+describe Gitlab::LegacyGithubImport::CommentFormatter do
let(:client) { double }
let(:project) { create(:project) }
let(:octocat) { double(id: 123456, login: 'octocat', email: 'octocat@example.com') }
diff --git a/spec/lib/gitlab/github_import/importer_spec.rb b/spec/lib/gitlab/legacy_github_import/importer_spec.rb
index d570f34985b..20514486727 100644
--- a/spec/lib/gitlab/github_import/importer_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/importer_spec.rb
@@ -1,7 +1,7 @@
require 'spec_helper'
-describe Gitlab::GithubImport::Importer do
- shared_examples 'Gitlab::GithubImport::Importer#execute' do
+describe Gitlab::LegacyGithubImport::Importer do
+ shared_examples 'Gitlab::LegacyGithubImport::Importer#execute' do
let(:expected_not_called) { [] }
before do
@@ -35,7 +35,7 @@ describe Gitlab::GithubImport::Importer do
end
end
- shared_examples 'Gitlab::GithubImport::Importer#execute an error occurs' do
+ shared_examples 'Gitlab::LegacyGithubImport::Importer#execute an error occurs' do
before do
allow(project).to receive(:import_data).and_return(double.as_null_object)
@@ -178,7 +178,7 @@ describe Gitlab::GithubImport::Importer do
end
end
- shared_examples 'Gitlab::GithubImport unit-testing' do
+ shared_examples 'Gitlab::LegacyGithubImport unit-testing' do
describe '#clean_up_restored_branches' do
subject { described_class.new(project) }
@@ -188,7 +188,7 @@ describe Gitlab::GithubImport::Importer do
end
context 'when pull request stills open' do
- let(:gh_pull_request) { Gitlab::GithubImport::PullRequestFormatter.new(project, pull_request) }
+ let(:gh_pull_request) { Gitlab::LegacyGithubImport::PullRequestFormatter.new(project, pull_request) }
it 'does not remove branches' do
expect(subject).not_to receive(:remove_branch)
@@ -197,7 +197,7 @@ describe Gitlab::GithubImport::Importer do
end
context 'when pull request is closed' do
- let(:gh_pull_request) { Gitlab::GithubImport::PullRequestFormatter.new(project, closed_pull_request) }
+ let(:gh_pull_request) { Gitlab::LegacyGithubImport::PullRequestFormatter.new(project, closed_pull_request) }
it 'does remove branches' do
expect(subject).to receive(:remove_branch).at_least(2).times
@@ -262,14 +262,14 @@ describe Gitlab::GithubImport::Importer do
let(:repo_root) { 'https://github.com' }
subject { described_class.new(project) }
- it_behaves_like 'Gitlab::GithubImport::Importer#execute'
- it_behaves_like 'Gitlab::GithubImport::Importer#execute an error occurs'
- it_behaves_like 'Gitlab::GithubImport unit-testing'
+ it_behaves_like 'Gitlab::LegacyGithubImport::Importer#execute'
+ it_behaves_like 'Gitlab::LegacyGithubImport::Importer#execute an error occurs'
+ it_behaves_like 'Gitlab::LegacyGithubImport unit-testing'
describe '#client' do
it 'instantiates a Client' do
allow(project).to receive(:import_data).and_return(double(credentials: credentials))
- expect(Gitlab::GithubImport::Client).to receive(:new).with(
+ expect(Gitlab::LegacyGithubImport::Client).to receive(:new).with(
credentials[:user],
{}
)
@@ -288,16 +288,16 @@ describe Gitlab::GithubImport::Importer do
project.update(import_type: 'gitea', import_url: "#{repo_root}/foo/group/project.git")
end
- it_behaves_like 'Gitlab::GithubImport::Importer#execute' do
+ it_behaves_like 'Gitlab::LegacyGithubImport::Importer#execute' do
let(:expected_not_called) { [:import_releases] }
end
- it_behaves_like 'Gitlab::GithubImport::Importer#execute an error occurs'
- it_behaves_like 'Gitlab::GithubImport unit-testing'
+ it_behaves_like 'Gitlab::LegacyGithubImport::Importer#execute an error occurs'
+ it_behaves_like 'Gitlab::LegacyGithubImport unit-testing'
describe '#client' do
it 'instantiates a Client' do
allow(project).to receive(:import_data).and_return(double(credentials: credentials))
- expect(Gitlab::GithubImport::Client).to receive(:new).with(
+ expect(Gitlab::LegacyGithubImport::Client).to receive(:new).with(
credentials[:user],
{ host: "#{repo_root}:443/foo", api_version: 'v1' }
)
diff --git a/spec/lib/gitlab/github_import/issuable_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/issuable_formatter_spec.rb
index 05294d227bd..3b5d8945344 100644
--- a/spec/lib/gitlab/github_import/issuable_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/issuable_formatter_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Gitlab::GithubImport::IssuableFormatter do
+describe Gitlab::LegacyGithubImport::IssuableFormatter do
let(:raw_data) do
double(number: 42)
end
diff --git a/spec/lib/gitlab/github_import/issue_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/issue_formatter_spec.rb
index 0fc56d92aa6..1a4d5dbfb70 100644
--- a/spec/lib/gitlab/github_import/issue_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/issue_formatter_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Gitlab::GithubImport::IssueFormatter do
+describe Gitlab::LegacyGithubImport::IssueFormatter do
let(:client) { double }
let!(:project) { create(:project, namespace: create(:namespace, path: 'octocat')) }
let(:octocat) { double(id: 123456, login: 'octocat', email: 'octocat@example.com') }
@@ -30,7 +30,7 @@ describe Gitlab::GithubImport::IssueFormatter do
allow(client).to receive(:user).and_return(octocat)
end
- shared_examples 'Gitlab::GithubImport::IssueFormatter#attributes' do
+ shared_examples 'Gitlab::LegacyGithubImport::IssueFormatter#attributes' do
context 'when issue is open' do
let(:raw_data) { double(base_data.merge(state: 'open')) }
@@ -135,7 +135,7 @@ describe Gitlab::GithubImport::IssueFormatter do
end
end
- shared_examples 'Gitlab::GithubImport::IssueFormatter#number' do
+ shared_examples 'Gitlab::LegacyGithubImport::IssueFormatter#number' do
let(:raw_data) { double(base_data.merge(number: 1347)) }
it 'returns issue number' do
@@ -144,8 +144,8 @@ describe Gitlab::GithubImport::IssueFormatter do
end
context 'when importing a GitHub project' do
- it_behaves_like 'Gitlab::GithubImport::IssueFormatter#attributes'
- it_behaves_like 'Gitlab::GithubImport::IssueFormatter#number'
+ it_behaves_like 'Gitlab::LegacyGithubImport::IssueFormatter#attributes'
+ it_behaves_like 'Gitlab::LegacyGithubImport::IssueFormatter#number'
end
context 'when importing a Gitea project' do
@@ -153,8 +153,8 @@ describe Gitlab::GithubImport::IssueFormatter do
project.update(import_type: 'gitea')
end
- it_behaves_like 'Gitlab::GithubImport::IssueFormatter#attributes'
- it_behaves_like 'Gitlab::GithubImport::IssueFormatter#number'
+ it_behaves_like 'Gitlab::LegacyGithubImport::IssueFormatter#attributes'
+ it_behaves_like 'Gitlab::LegacyGithubImport::IssueFormatter#number'
end
describe '#has_comments?' do
diff --git a/spec/lib/gitlab/github_import/label_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/label_formatter_spec.rb
index 83fdd2cc415..0d1d04f1bf6 100644
--- a/spec/lib/gitlab/github_import/label_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/label_formatter_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Gitlab::GithubImport::LabelFormatter do
+describe Gitlab::LegacyGithubImport::LabelFormatter do
let(:project) { create(:project) }
let(:raw) { double(name: 'improvements', color: 'e6e6e6') }
diff --git a/spec/lib/gitlab/github_import/milestone_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/milestone_formatter_spec.rb
index 683fa51b78e..1db4bbb568c 100644
--- a/spec/lib/gitlab/github_import/milestone_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/milestone_formatter_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Gitlab::GithubImport::MilestoneFormatter do
+describe Gitlab::LegacyGithubImport::MilestoneFormatter do
let(:project) { create(:project) }
let(:created_at) { DateTime.strptime('2011-01-26T19:01:12Z') }
let(:updated_at) { DateTime.strptime('2011-01-27T19:01:12Z') }
@@ -19,7 +19,7 @@ describe Gitlab::GithubImport::MilestoneFormatter do
subject(:formatter) { described_class.new(project, raw_data) }
- shared_examples 'Gitlab::GithubImport::MilestoneFormatter#attributes' do
+ shared_examples 'Gitlab::LegacyGithubImport::MilestoneFormatter#attributes' do
let(:data) { base_data.merge(iid_attr => 1347) }
context 'when milestone is open' do
@@ -82,7 +82,7 @@ describe Gitlab::GithubImport::MilestoneFormatter do
end
context 'when importing a GitHub project' do
- it_behaves_like 'Gitlab::GithubImport::MilestoneFormatter#attributes'
+ it_behaves_like 'Gitlab::LegacyGithubImport::MilestoneFormatter#attributes'
end
context 'when importing a Gitea project' do
@@ -91,6 +91,6 @@ describe Gitlab::GithubImport::MilestoneFormatter do
project.update(import_type: 'gitea')
end
- it_behaves_like 'Gitlab::GithubImport::MilestoneFormatter#attributes'
+ it_behaves_like 'Gitlab::LegacyGithubImport::MilestoneFormatter#attributes'
end
end
diff --git a/spec/lib/gitlab/github_import/project_creator_spec.rb b/spec/lib/gitlab/legacy_github_import/project_creator_spec.rb
index 948e7469a18..737c9a624e0 100644
--- a/spec/lib/gitlab/github_import/project_creator_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/project_creator_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Gitlab::GithubImport::ProjectCreator do
+describe Gitlab::LegacyGithubImport::ProjectCreator do
let(:user) { create(:user) }
let(:namespace) { create(:group, owner: user) }
diff --git a/spec/lib/gitlab/github_import/pull_request_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/pull_request_formatter_spec.rb
index 2e42f6239b7..267a41e3f32 100644
--- a/spec/lib/gitlab/github_import/pull_request_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/pull_request_formatter_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Gitlab::GithubImport::PullRequestFormatter do
+describe Gitlab::LegacyGithubImport::PullRequestFormatter do
let(:client) { double }
let(:project) { create(:project, :repository) }
let(:source_sha) { create(:commit, project: project).id }
@@ -44,7 +44,7 @@ describe Gitlab::GithubImport::PullRequestFormatter do
allow(client).to receive(:user).and_return(octocat)
end
- shared_examples 'Gitlab::GithubImport::PullRequestFormatter#attributes' do
+ shared_examples 'Gitlab::LegacyGithubImport::PullRequestFormatter#attributes' do
context 'when pull request is open' do
let(:raw_data) { double(base_data.merge(state: 'open')) }
@@ -189,7 +189,7 @@ describe Gitlab::GithubImport::PullRequestFormatter do
end
end
- shared_examples 'Gitlab::GithubImport::PullRequestFormatter#number' do
+ shared_examples 'Gitlab::LegacyGithubImport::PullRequestFormatter#number' do
let(:raw_data) { double(base_data) }
it 'returns pull request number' do
@@ -197,7 +197,7 @@ describe Gitlab::GithubImport::PullRequestFormatter do
end
end
- shared_examples 'Gitlab::GithubImport::PullRequestFormatter#source_branch_name' do
+ shared_examples 'Gitlab::LegacyGithubImport::PullRequestFormatter#source_branch_name' do
context 'when source branch exists' do
let(:raw_data) { double(base_data) }
@@ -231,7 +231,7 @@ describe Gitlab::GithubImport::PullRequestFormatter do
end
end
- shared_examples 'Gitlab::GithubImport::PullRequestFormatter#target_branch_name' do
+ shared_examples 'Gitlab::LegacyGithubImport::PullRequestFormatter#target_branch_name' do
context 'when target branch exists' do
let(:raw_data) { double(base_data) }
@@ -250,10 +250,10 @@ describe Gitlab::GithubImport::PullRequestFormatter do
end
context 'when importing a GitHub project' do
- it_behaves_like 'Gitlab::GithubImport::PullRequestFormatter#attributes'
- it_behaves_like 'Gitlab::GithubImport::PullRequestFormatter#number'
- it_behaves_like 'Gitlab::GithubImport::PullRequestFormatter#source_branch_name'
- it_behaves_like 'Gitlab::GithubImport::PullRequestFormatter#target_branch_name'
+ it_behaves_like 'Gitlab::LegacyGithubImport::PullRequestFormatter#attributes'
+ it_behaves_like 'Gitlab::LegacyGithubImport::PullRequestFormatter#number'
+ it_behaves_like 'Gitlab::LegacyGithubImport::PullRequestFormatter#source_branch_name'
+ it_behaves_like 'Gitlab::LegacyGithubImport::PullRequestFormatter#target_branch_name'
end
context 'when importing a Gitea project' do
@@ -261,10 +261,10 @@ describe Gitlab::GithubImport::PullRequestFormatter do
project.update(import_type: 'gitea')
end
- it_behaves_like 'Gitlab::GithubImport::PullRequestFormatter#attributes'
- it_behaves_like 'Gitlab::GithubImport::PullRequestFormatter#number'
- it_behaves_like 'Gitlab::GithubImport::PullRequestFormatter#source_branch_name'
- it_behaves_like 'Gitlab::GithubImport::PullRequestFormatter#target_branch_name'
+ it_behaves_like 'Gitlab::LegacyGithubImport::PullRequestFormatter#attributes'
+ it_behaves_like 'Gitlab::LegacyGithubImport::PullRequestFormatter#number'
+ it_behaves_like 'Gitlab::LegacyGithubImport::PullRequestFormatter#source_branch_name'
+ it_behaves_like 'Gitlab::LegacyGithubImport::PullRequestFormatter#target_branch_name'
end
describe '#valid?' do
diff --git a/spec/lib/gitlab/github_import/release_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/release_formatter_spec.rb
index 926bf725d6a..082e3b36dd0 100644
--- a/spec/lib/gitlab/github_import/release_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/release_formatter_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Gitlab::GithubImport::ReleaseFormatter do
+describe Gitlab::LegacyGithubImport::ReleaseFormatter do
let!(:project) { create(:project, namespace: create(:namespace, path: 'octocat')) }
let(:octocat) { double(id: 123456, login: 'octocat') }
let(:created_at) { DateTime.strptime('2011-01-26T19:01:12Z') }
diff --git a/spec/lib/gitlab/github_import/user_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/user_formatter_spec.rb
index 98e3a7c28b9..3cd096eb0ad 100644
--- a/spec/lib/gitlab/github_import/user_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/user_formatter_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Gitlab::GithubImport::UserFormatter do
+describe Gitlab::LegacyGithubImport::UserFormatter do
let(:client) { double }
let(:octocat) { double(id: 123456, login: 'octocat', email: 'octocat@example.com') }
diff --git a/spec/lib/gitlab/github_import/wiki_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/wiki_formatter_spec.rb
index fcd90fab547..7723533aee2 100644
--- a/spec/lib/gitlab/github_import/wiki_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/wiki_formatter_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Gitlab::GithubImport::WikiFormatter do
+describe Gitlab::LegacyGithubImport::WikiFormatter do
let(:project) do
create(:project,
namespace: create(:namespace, path: 'gitlabhq'),
@@ -11,7 +11,7 @@ describe Gitlab::GithubImport::WikiFormatter do
describe '#disk_path' do
it 'appends .wiki to project path' do
- expect(wiki.disk_path).to eq project.disk_path + '.wiki'
+ expect(wiki.disk_path).to eq project.wiki.disk_path
end
end
diff --git a/spec/lib/gitlab/metrics/background_transaction_spec.rb b/spec/lib/gitlab/metrics/background_transaction_spec.rb
new file mode 100644
index 00000000000..17445fe6de5
--- /dev/null
+++ b/spec/lib/gitlab/metrics/background_transaction_spec.rb
@@ -0,0 +1,19 @@
+require 'spec_helper'
+
+describe Gitlab::Metrics::BackgroundTransaction do
+ let(:test_worker_class) { double(:class, name: 'TestWorker') }
+
+ subject { described_class.new(test_worker_class) }
+
+ describe '#action' do
+ it 'returns transaction action name' do
+ expect(subject.action).to eq('TestWorker#perform')
+ end
+ end
+
+ describe '#label' do
+ it 'returns labels based on class name' do
+ expect(subject.labels).to eq(controller: 'TestWorker', action: 'perform')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/instrumentation_spec.rb b/spec/lib/gitlab/metrics/instrumentation_spec.rb
index 4b19ee19103..977bc250049 100644
--- a/spec/lib/gitlab/metrics/instrumentation_spec.rb
+++ b/spec/lib/gitlab/metrics/instrumentation_spec.rb
@@ -1,7 +1,8 @@
require 'spec_helper'
describe Gitlab::Metrics::Instrumentation do
- let(:transaction) { Gitlab::Metrics::Transaction.new }
+ let(:env) { {} }
+ let(:transaction) { Gitlab::Metrics::WebTransaction.new(env) }
before do
@dummy = Class.new do
diff --git a/spec/lib/gitlab/metrics/method_call_spec.rb b/spec/lib/gitlab/metrics/method_call_spec.rb
index a247f03b2da..5341addf911 100644
--- a/spec/lib/gitlab/metrics/method_call_spec.rb
+++ b/spec/lib/gitlab/metrics/method_call_spec.rb
@@ -1,7 +1,8 @@
require 'spec_helper'
describe Gitlab::Metrics::MethodCall do
- let(:method_call) { described_class.new('Foo#bar', 'foo') }
+ let(:transaction) { double(:transaction, labels: {}) }
+ let(:method_call) { described_class.new('Foo#bar', :Foo, '#bar', transaction) }
describe '#measure' do
it 'measures the performance of the supplied block' do
@@ -11,6 +12,54 @@ describe Gitlab::Metrics::MethodCall do
expect(method_call.cpu_time).to be_a_kind_of(Numeric)
expect(method_call.call_count).to eq(1)
end
+
+ context 'when measurement is above threshold' do
+ before do
+ allow(method_call).to receive(:above_threshold?).and_return(true)
+ end
+
+ context 'prometheus instrumentation is enabled' do
+ before do
+ Feature.get(:prometheus_metrics_method_instrumentation).enable
+ end
+
+ it 'observes the performance of the supplied block' do
+ expect(described_class.call_duration_histogram)
+ .to receive(:observe)
+ .with({ module: :Foo, method: '#bar' }, be_a_kind_of(Numeric))
+
+ method_call.measure { 'foo' }
+ end
+ end
+
+ context 'prometheus instrumentation is disabled' do
+ before do
+ Feature.get(:prometheus_metrics_method_instrumentation).disable
+ end
+
+ it 'does not observe the performance' do
+ expect(described_class.call_duration_histogram)
+ .not_to receive(:observe)
+
+ method_call.measure { 'foo' }
+ end
+ end
+ end
+
+ context 'when measurement is below threshold' do
+ before do
+ allow(method_call).to receive(:above_threshold?).and_return(false)
+
+ Feature.get(:prometheus_metrics_method_instrumentation).enable
+ end
+
+ it 'does not observe the performance' do
+ expect(described_class.call_duration_histogram)
+ .not_to receive(:observe)
+
+ method_call.measure { 'foo' }
+ end
+ end
end
describe '#to_metric' do
@@ -19,7 +68,7 @@ describe Gitlab::Metrics::MethodCall do
metric = method_call.to_metric
expect(metric).to be_an_instance_of(Gitlab::Metrics::Metric)
- expect(metric.series).to eq('foo')
+ expect(metric.series).to eq('rails_method_calls')
expect(metric.values[:duration]).to be_a_kind_of(Numeric)
expect(metric.values[:cpu_duration]).to be_a_kind_of(Numeric)
@@ -30,7 +79,13 @@ describe Gitlab::Metrics::MethodCall do
end
describe '#above_threshold?' do
+ before do
+ allow(Gitlab::Metrics).to receive(:method_call_threshold).and_return(100)
+ end
+
it 'returns false when the total call time is not above the threshold' do
+ expect(method_call).to receive(:real_time).and_return(9)
+
expect(method_call.above_threshold?).to eq(false)
end
diff --git a/spec/lib/gitlab/metrics/rack_middleware_spec.rb b/spec/lib/gitlab/metrics/rack_middleware_spec.rb
index ec415f2bd85..b84387204ee 100644
--- a/spec/lib/gitlab/metrics/rack_middleware_spec.rb
+++ b/spec/lib/gitlab/metrics/rack_middleware_spec.rb
@@ -18,34 +18,6 @@ describe Gitlab::Metrics::RackMiddleware do
expect(middleware.call(env)).to eq('yay')
end
- it 'tags a transaction with the name and action of a controller' do
- klass = double(:klass, name: 'TestController', content_type: 'text/html')
- controller = double(:controller, class: klass, action_name: 'show')
-
- env['action_controller.instance'] = controller
-
- allow(app).to receive(:call).with(env)
-
- expect(middleware).to receive(:tag_controller)
- .with(an_instance_of(Gitlab::Metrics::Transaction), env)
-
- middleware.call(env)
- end
-
- it 'tags a transaction with the method and path of the route in the grape endpoint' do
- route = double(:route, request_method: "GET", path: "/:version/projects/:id/archive(.:format)")
- endpoint = double(:endpoint, route: route)
-
- env['api.endpoint'] = endpoint
-
- allow(app).to receive(:call).with(env)
-
- expect(middleware).to receive(:tag_endpoint)
- .with(an_instance_of(Gitlab::Metrics::Transaction), env)
-
- middleware.call(env)
- end
-
it 'tracks any raised exceptions' do
expect(app).to receive(:call).with(env).and_raise(RuntimeError)
@@ -60,7 +32,7 @@ describe Gitlab::Metrics::RackMiddleware do
let(:transaction) { middleware.transaction_from_env(env) }
it 'returns a Transaction' do
- expect(transaction).to be_an_instance_of(Gitlab::Metrics::Transaction)
+ expect(transaction).to be_an_instance_of(Gitlab::Metrics::WebTransaction)
end
it 'stores the request method and URI in the transaction as values' do
@@ -84,58 +56,4 @@ describe Gitlab::Metrics::RackMiddleware do
end
end
end
-
- describe '#tag_controller' do
- let(:transaction) { middleware.transaction_from_env(env) }
- let(:content_type) { 'text/html' }
-
- before do
- klass = double(:klass, name: 'TestController')
- controller = double(:controller, class: klass, action_name: 'show', content_type: content_type)
-
- env['action_controller.instance'] = controller
- end
-
- it 'tags a transaction with the name and action of a controller' do
- middleware.tag_controller(transaction, env)
-
- expect(transaction.action).to eq('TestController#show')
- end
-
- context 'when the response content type is not :html' do
- let(:content_type) { 'application/json' }
-
- it 'appends the mime type to the transaction action' do
- middleware.tag_controller(transaction, env)
-
- expect(transaction.action).to eq('TestController#show.json')
- end
- end
- end
-
- describe '#tag_endpoint' do
- let(:transaction) { middleware.transaction_from_env(env) }
-
- it 'tags a transaction with the method and path of the route in the grape endpount' do
- route = double(:route, request_method: "GET", path: "/:version/projects/:id/archive(.:format)")
- endpoint = double(:endpoint, route: route)
-
- env['api.endpoint'] = endpoint
-
- middleware.tag_endpoint(transaction, env)
-
- expect(transaction.action).to eq('Grape#GET /projects/:id/archive')
- end
-
- it 'does not tag a transaction if route infos are missing' do
- endpoint = double(:endpoint)
- allow(endpoint).to receive(:route).and_raise
-
- env['api.endpoint'] = endpoint
-
- middleware.tag_endpoint(transaction, env)
-
- expect(transaction.action).to be_nil
- end
- end
end
diff --git a/spec/lib/gitlab/metrics/influx_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/influx_sampler_spec.rb
index 999a9536d82..667e4747897 100644
--- a/spec/lib/gitlab/metrics/influx_sampler_spec.rb
+++ b/spec/lib/gitlab/metrics/samplers/influx_sampler_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Gitlab::Metrics::InfluxSampler do
+describe Gitlab::Metrics::Samplers::InfluxSampler do
let(:sampler) { described_class.new(5) }
after do
diff --git a/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb
new file mode 100644
index 00000000000..53699327da1
--- /dev/null
+++ b/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb
@@ -0,0 +1,90 @@
+require 'spec_helper'
+
+describe Gitlab::Metrics::Samplers::RubySampler do
+ let(:sampler) { described_class.new(5) }
+
+ after do
+ Allocations.stop if Gitlab::Metrics.mri?
+ end
+
+ describe '#sample' do
+ it 'samples various statistics' do
+ expect(Gitlab::Metrics::System).to receive(:memory_usage)
+ expect(Gitlab::Metrics::System).to receive(:file_descriptor_count)
+ expect(sampler).to receive(:sample_objects)
+ expect(sampler).to receive(:sample_gc)
+
+ sampler.sample
+ end
+
+ it 'adds a metric containing the memory usage' do
+ expect(Gitlab::Metrics::System).to receive(:memory_usage)
+ .and_return(9000)
+
+ expect(sampler.metrics[:memory_usage]).to receive(:set)
+ .with({}, 9000)
+ .and_call_original
+
+ sampler.sample
+ end
+
+ it 'adds a metric containing the amount of open file descriptors' do
+ expect(Gitlab::Metrics::System).to receive(:file_descriptor_count)
+ .and_return(4)
+
+ expect(sampler.metrics[:file_descriptors]).to receive(:set)
+ .with({}, 4)
+ .and_call_original
+
+ sampler.sample
+ end
+
+ it 'clears any GC profiles' do
+ expect(GC::Profiler).to receive(:clear)
+
+ sampler.sample
+ end
+ end
+
+ describe '#sample_gc' do
+ it 'adds a metric containing garbage collection time statistics' do
+ expect(GC::Profiler).to receive(:total_time).and_return(0.24)
+
+ expect(sampler.metrics[:total_time]).to receive(:set)
+ .with({}, 240)
+ .and_call_original
+
+ sampler.sample
+ end
+
+ it 'adds a metric containing garbage collection statistics' do
+ GC.stat.keys.each do |key|
+ expect(sampler.metrics[key]).to receive(:set).with({}, anything).and_call_original
+ end
+
+ sampler.sample
+ end
+ end
+
+ if Gitlab::Metrics.mri?
+ describe '#sample_objects' do
+ it 'adds a metric containing the amount of allocated objects' do
+ expect(sampler.metrics[:objects_total]).to receive(:set)
+ .with(include(class: anything), be > 0)
+ .at_least(:once)
+ .and_call_original
+
+ sampler.sample
+ end
+
+ it 'ignores classes without a name' do
+ expect(Allocations).to receive(:to_hash).and_return({ Class.new => 4 })
+
+ expect(sampler.metrics[:objects_total]).not_to receive(:set)
+ .with(include(class: 'object_counts'), anything)
+
+ sampler.sample
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/unicorn_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/unicorn_sampler_spec.rb
index dc0d1f2e940..771b633a2b9 100644
--- a/spec/lib/gitlab/metrics/unicorn_sampler_spec.rb
+++ b/spec/lib/gitlab/metrics/samplers/unicorn_sampler_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe Gitlab::Metrics::UnicornSampler do
+describe Gitlab::Metrics::Samplers::UnicornSampler do
subject { described_class.new(1.second) }
describe '#sample' do
diff --git a/spec/lib/gitlab/metrics/sidekiq_middleware_spec.rb b/spec/lib/gitlab/metrics/sidekiq_middleware_spec.rb
index b576d7173f5..ae1d8b47fe9 100644
--- a/spec/lib/gitlab/metrics/sidekiq_middleware_spec.rb
+++ b/spec/lib/gitlab/metrics/sidekiq_middleware_spec.rb
@@ -8,8 +8,8 @@ describe Gitlab::Metrics::SidekiqMiddleware do
it 'tracks the transaction' do
worker = double(:worker, class: double(:class, name: 'TestWorker'))
- expect(Gitlab::Metrics::Transaction).to receive(:new)
- .with('TestWorker#perform')
+ expect(Gitlab::Metrics::BackgroundTransaction).to receive(:new)
+ .with(worker.class)
.and_call_original
expect_any_instance_of(Gitlab::Metrics::Transaction).to receive(:set)
@@ -23,8 +23,8 @@ describe Gitlab::Metrics::SidekiqMiddleware do
it 'tracks the transaction (for messages without `enqueued_at`)' do
worker = double(:worker, class: double(:class, name: 'TestWorker'))
- expect(Gitlab::Metrics::Transaction).to receive(:new)
- .with('TestWorker#perform')
+ expect(Gitlab::Metrics::BackgroundTransaction).to receive(:new)
+ .with(worker.class)
.and_call_original
expect_any_instance_of(Gitlab::Metrics::Transaction).to receive(:set)
diff --git a/spec/lib/gitlab/metrics/subscribers/action_view_spec.rb b/spec/lib/gitlab/metrics/subscribers/action_view_spec.rb
index e7b595405a8..eca75a4fac1 100644
--- a/spec/lib/gitlab/metrics/subscribers/action_view_spec.rb
+++ b/spec/lib/gitlab/metrics/subscribers/action_view_spec.rb
@@ -1,7 +1,8 @@
require 'spec_helper'
describe Gitlab::Metrics::Subscribers::ActionView do
- let(:transaction) { Gitlab::Metrics::Transaction.new }
+ let(:env) { {} }
+ let(:transaction) { Gitlab::Metrics::WebTransaction.new(env) }
let(:subscriber) { described_class.new }
@@ -29,5 +30,13 @@ describe Gitlab::Metrics::Subscribers::ActionView do
subscriber.render_template(event)
end
+
+ it 'observes view rendering time' do
+ expect(subscriber.send(:metric_view_rendering_duration_seconds))
+ .to receive(:observe)
+ .with({ view: 'app/views/x.html.haml' }, 2.1)
+
+ subscriber.render_template(event)
+ end
end
end
diff --git a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
index ce6587e993f..9b3698fb4a8 100644
--- a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
+++ b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
@@ -1,11 +1,12 @@
require 'spec_helper'
describe Gitlab::Metrics::Subscribers::ActiveRecord do
- let(:transaction) { Gitlab::Metrics::Transaction.new }
+ let(:env) { {} }
+ let(:transaction) { Gitlab::Metrics::WebTransaction.new(env) }
let(:subscriber) { described_class.new }
let(:event) do
- double(:event, duration: 0.2,
+ double(:event, duration: 2,
payload: { sql: 'SELECT * FROM users WHERE id = 10' })
end
@@ -20,16 +21,24 @@ describe Gitlab::Metrics::Subscribers::ActiveRecord do
end
describe 'with a current transaction' do
+ it 'observes sql_duration metric' do
+ expect(subscriber).to receive(:current_transaction)
+ .at_least(:once)
+ .and_return(transaction)
+ expect(subscriber.send(:metric_sql_duration_seconds)).to receive(:observe).with({}, 0.002)
+ subscriber.sql(event)
+ end
+
it 'increments the :sql_duration value' do
expect(subscriber).to receive(:current_transaction)
.at_least(:once)
.and_return(transaction)
expect(transaction).to receive(:increment)
- .with(:sql_duration, 0.2)
+ .with(:sql_duration, 2, false)
expect(transaction).to receive(:increment)
- .with(:sql_count, 1)
+ .with(:sql_count, 1, false)
subscriber.sql(event)
end
diff --git a/spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb b/spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb
index f04dc8dcc02..58e28592cf9 100644
--- a/spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb
+++ b/spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb
@@ -1,15 +1,16 @@
require 'spec_helper'
describe Gitlab::Metrics::Subscribers::RailsCache do
- let(:transaction) { Gitlab::Metrics::Transaction.new }
+ let(:env) { {} }
+ let(:transaction) { Gitlab::Metrics::WebTransaction.new(env) }
let(:subscriber) { described_class.new }
let(:event) { double(:event, duration: 15.2) }
describe '#cache_read' do
it 'increments the cache_read duration' do
- expect(subscriber).to receive(:increment)
- .with(:cache_read, event.duration)
+ expect(subscriber).to receive(:observe)
+ .with(:read, event.duration)
subscriber.cache_read(event)
end
@@ -17,7 +18,7 @@ describe Gitlab::Metrics::Subscribers::RailsCache do
context 'with a transaction' do
before do
allow(subscriber).to receive(:current_transaction)
- .and_return(transaction)
+ .and_return(transaction)
end
context 'with hit event' do
@@ -25,9 +26,9 @@ describe Gitlab::Metrics::Subscribers::RailsCache do
it 'increments the cache_read_hit count' do
expect(transaction).to receive(:increment)
- .with(:cache_read_hit_count, 1)
+ .with(:cache_read_hit_count, 1, false)
expect(transaction).to receive(:increment)
- .with(any_args).at_least(1) # Other calls
+ .with(any_args).at_least(1) # Other calls
subscriber.cache_read(event)
end
@@ -37,7 +38,7 @@ describe Gitlab::Metrics::Subscribers::RailsCache do
it 'does not increment cache read miss' do
expect(transaction).not_to receive(:increment)
- .with(:cache_read_hit_count, 1)
+ .with(:cache_read_hit_count, 1)
subscriber.cache_read(event)
end
@@ -49,9 +50,15 @@ describe Gitlab::Metrics::Subscribers::RailsCache do
it 'increments the cache_read_miss count' do
expect(transaction).to receive(:increment)
- .with(:cache_read_miss_count, 1)
+ .with(:cache_read_miss_count, 1, false)
expect(transaction).to receive(:increment)
- .with(any_args).at_least(1) # Other calls
+ .with(any_args).at_least(1) # Other calls
+
+ subscriber.cache_read(event)
+ end
+
+ it 'increments the cache_read_miss total' do
+ expect(subscriber.send(:metric_cache_misses_total)).to receive(:increment).with({})
subscriber.cache_read(event)
end
@@ -61,7 +68,13 @@ describe Gitlab::Metrics::Subscribers::RailsCache do
it 'does not increment cache read miss' do
expect(transaction).not_to receive(:increment)
- .with(:cache_read_miss_count, 1)
+ .with(:cache_read_miss_count, 1)
+
+ subscriber.cache_read(event)
+ end
+
+ it 'does not increment cache_read_miss total' do
+ expect(subscriber.send(:metric_cache_misses_total)).not_to receive(:increment).with({})
subscriber.cache_read(event)
end
@@ -71,27 +84,27 @@ describe Gitlab::Metrics::Subscribers::RailsCache do
end
describe '#cache_write' do
- it 'increments the cache_write duration' do
- expect(subscriber).to receive(:increment)
- .with(:cache_write, event.duration)
+ it 'observes write duration' do
+ expect(subscriber).to receive(:observe)
+ .with(:write, event.duration)
subscriber.cache_write(event)
end
end
describe '#cache_delete' do
- it 'increments the cache_delete duration' do
- expect(subscriber).to receive(:increment)
- .with(:cache_delete, event.duration)
+ it 'observes delete duration' do
+ expect(subscriber).to receive(:observe)
+ .with(:delete, event.duration)
subscriber.cache_delete(event)
end
end
describe '#cache_exist?' do
- it 'increments the cache_exists duration' do
- expect(subscriber).to receive(:increment)
- .with(:cache_exists, event.duration)
+ it 'observes the exists duration' do
+ expect(subscriber).to receive(:observe)
+ .with(:exists, event.duration)
subscriber.cache_exist?(event)
end
@@ -109,12 +122,12 @@ describe Gitlab::Metrics::Subscribers::RailsCache do
context 'with a transaction' do
before do
allow(subscriber).to receive(:current_transaction)
- .and_return(transaction)
+ .and_return(transaction)
end
it 'increments the cache_read_hit count' do
expect(transaction).to receive(:increment)
- .with(:cache_read_hit_count, 1)
+ .with(:cache_read_hit_count, 1)
subscriber.cache_fetch_hit(event)
end
@@ -133,47 +146,61 @@ describe Gitlab::Metrics::Subscribers::RailsCache do
context 'with a transaction' do
before do
allow(subscriber).to receive(:current_transaction)
- .and_return(transaction)
+ .and_return(transaction)
end
it 'increments the cache_fetch_miss count' do
expect(transaction).to receive(:increment)
- .with(:cache_read_miss_count, 1)
+ .with(:cache_read_miss_count, 1)
+
+ subscriber.cache_generate(event)
+ end
+
+ it 'increments the cache_read_miss total' do
+ expect(subscriber.send(:metric_cache_misses_total)).to receive(:increment).with({})
subscriber.cache_generate(event)
end
end
end
- describe '#increment' do
+ describe '#observe' do
context 'without a transaction' do
it 'returns' do
expect(transaction).not_to receive(:increment)
- subscriber.increment(:foo, 15.2)
+ subscriber.observe(:foo, 15.2)
end
end
context 'with a transaction' do
before do
allow(subscriber).to receive(:current_transaction)
- .and_return(transaction)
+ .and_return(transaction)
end
it 'increments the total and specific cache duration' do
expect(transaction).to receive(:increment)
- .with(:cache_duration, event.duration)
+ .with(:cache_duration, event.duration, false)
expect(transaction).to receive(:increment)
- .with(:cache_count, 1)
+ .with(:cache_count, 1, false)
expect(transaction).to receive(:increment)
- .with(:cache_delete_duration, event.duration)
+ .with(:cache_delete_duration, event.duration, false)
expect(transaction).to receive(:increment)
- .with(:cache_delete_count, 1)
+ .with(:cache_delete_count, 1, false)
+
+ subscriber.observe(:delete, event.duration)
+ end
+
+ it 'observes cache metric' do
+ expect(subscriber.send(:metric_cache_operation_duration_seconds))
+ .to receive(:observe)
+ .with(transaction.labels.merge(operation: :delete), event.duration / 1000.0)
- subscriber.increment(:cache_delete, event.duration)
+ subscriber.observe(:delete, event.duration)
end
end
end
diff --git a/spec/lib/gitlab/metrics/transaction_spec.rb b/spec/lib/gitlab/metrics/web_transaction_spec.rb
index 3779af81512..1d162f53a13 100644
--- a/spec/lib/gitlab/metrics/transaction_spec.rb
+++ b/spec/lib/gitlab/metrics/web_transaction_spec.rb
@@ -1,7 +1,8 @@
require 'spec_helper'
-describe Gitlab::Metrics::Transaction do
- let(:transaction) { described_class.new }
+describe Gitlab::Metrics::WebTransaction do
+ let(:env) { {} }
+ let(:transaction) { described_class.new(env) }
describe '#duration' do
it 'returns the duration of a transaction in seconds' do
@@ -48,7 +49,7 @@ describe Gitlab::Metrics::Transaction do
describe '#method_call_for' do
it 'returns a MethodCall' do
- method = transaction.method_call_for('Foo#bar')
+ method = transaction.method_call_for('Foo#bar', :Foo, '#bar')
expect(method).to be_an_instance_of(Gitlab::Metrics::MethodCall)
end
@@ -85,14 +86,6 @@ describe Gitlab::Metrics::Transaction do
end
end
- describe '#add_tag' do
- it 'adds a tag' do
- transaction.add_tag(:foo, 'bar')
-
- expect(transaction.tags).to eq({ foo: 'bar' })
- end
- end
-
describe '#finish' do
it 'tracks the transaction details and submits them to Sidekiq' do
expect(transaction).to receive(:track_self)
@@ -127,7 +120,7 @@ describe Gitlab::Metrics::Transaction do
end
it 'adds the action as a tag for every metric' do
- transaction.action = 'Foo#bar'
+ allow(transaction).to receive(:labels).and_return(controller: 'Foo', action: 'bar')
transaction.track_self
hash = {
@@ -144,7 +137,8 @@ describe Gitlab::Metrics::Transaction do
end
it 'does not add an action tag for events' do
- transaction.action = 'Foo#bar'
+ allow(transaction).to receive(:labels).and_return(controller: 'Foo', action: 'bar')
+
transaction.add_event(:meow)
hash = {
@@ -161,6 +155,61 @@ describe Gitlab::Metrics::Transaction do
end
end
+ describe '#labels' do
+ context 'when request goes to Grape endpoint' do
+ before do
+ route = double(:route, request_method: 'GET', path: '/:version/projects/:id/archive(.:format)')
+ endpoint = double(:endpoint, route: route)
+
+ env['api.endpoint'] = endpoint
+ end
+ it 'provides labels with the method and path of the route in the grape endpoint' do
+ expect(transaction.labels).to eq({ controller: 'Grape', action: 'GET /projects/:id/archive' })
+ expect(transaction.action).to eq('Grape#GET /projects/:id/archive')
+ end
+
+ it 'does not provide labels if route infos are missing' do
+ endpoint = double(:endpoint)
+ allow(endpoint).to receive(:route).and_raise
+
+ env['api.endpoint'] = endpoint
+
+ expect(transaction.labels).to eq({})
+ expect(transaction.action).to be_nil
+ end
+ end
+
+ context 'when request goes to ActionController' do
+ let(:content_type) { 'text/html' }
+
+ before do
+ klass = double(:klass, name: 'TestController')
+ controller = double(:controller, class: klass, action_name: 'show', content_type: content_type)
+
+ env['action_controller.instance'] = controller
+ end
+
+ it 'tags a transaction with the name and action of a controller' do
+ expect(transaction.labels).to eq({ controller: 'TestController', action: 'show' })
+ expect(transaction.action).to eq('TestController#show')
+ end
+
+ context 'when the response content type is not :html' do
+ let(:content_type) { 'application/json' }
+
+ it 'appends the mime type to the transaction action' do
+ expect(transaction.labels).to eq({ controller: 'TestController', action: 'show.json' })
+ expect(transaction.action).to eq('TestController#show.json')
+ end
+ end
+ end
+
+ it 'returns no labels when no route information is present in env' do
+ expect(transaction.labels).to eq({})
+ expect(transaction.action).to eq(nil)
+ end
+ end
+
describe '#add_event' do
it 'adds a metric' do
transaction.add_event(:meow)
diff --git a/spec/lib/gitlab/metrics_spec.rb b/spec/lib/gitlab/metrics_spec.rb
index 599b8807d8d..1619fbd88b1 100644
--- a/spec/lib/gitlab/metrics_spec.rb
+++ b/spec/lib/gitlab/metrics_spec.rb
@@ -115,7 +115,7 @@ describe Gitlab::Metrics do
end
context 'with a transaction' do
- let(:transaction) { Gitlab::Metrics::Transaction.new }
+ let(:transaction) { Gitlab::Metrics::WebTransaction.new({}) }
before do
allow(described_class).to receive(:current_transaction)
@@ -124,13 +124,13 @@ describe Gitlab::Metrics do
it 'adds a metric to the current transaction' do
expect(transaction).to receive(:increment)
- .with('foo_real_time', a_kind_of(Numeric))
+ .with('foo_real_time', a_kind_of(Numeric), false)
expect(transaction).to receive(:increment)
- .with('foo_cpu_time', a_kind_of(Numeric))
+ .with('foo_cpu_time', a_kind_of(Numeric), false)
expect(transaction).to receive(:increment)
- .with('foo_call_count', 1)
+ .with('foo_call_count', 1, false)
described_class.measure(:foo) { 10 }
end
@@ -143,31 +143,6 @@ describe Gitlab::Metrics do
end
end
- describe '.tag_transaction' do
- context 'without a transaction' do
- it 'does nothing' do
- expect_any_instance_of(Gitlab::Metrics::Transaction)
- .not_to receive(:add_tag)
-
- described_class.tag_transaction(:foo, 'bar')
- end
- end
-
- context 'with a transaction' do
- let(:transaction) { Gitlab::Metrics::Transaction.new }
-
- it 'adds the tag to the transaction' do
- expect(described_class).to receive(:current_transaction)
- .and_return(transaction)
-
- expect(transaction).to receive(:add_tag)
- .with(:foo, 'bar')
-
- described_class.tag_transaction(:foo, 'bar')
- end
- end
- end
-
describe '.action=' do
context 'without a transaction' do
it 'does nothing' do
@@ -180,7 +155,7 @@ describe Gitlab::Metrics do
context 'with a transaction' do
it 'sets the action of a transaction' do
- trans = Gitlab::Metrics::Transaction.new
+ trans = Gitlab::Metrics::WebTransaction.new({})
expect(described_class).to receive(:current_transaction)
.and_return(trans)
@@ -210,7 +185,7 @@ describe Gitlab::Metrics do
context 'with a transaction' do
it 'adds an event' do
- transaction = Gitlab::Metrics::Transaction.new
+ transaction = Gitlab::Metrics::WebTransaction.new({})
expect(transaction).to receive(:add_event).with(:meow)
@@ -224,7 +199,7 @@ describe Gitlab::Metrics do
shared_examples 'prometheus metrics API' do
describe '#counter' do
- subject { described_class.counter(:couter, 'doc') }
+ subject { described_class.counter(:counter, 'doc') }
describe '#increment' do
it 'successfully calls #increment without arguments' do
@@ -280,7 +255,7 @@ describe Gitlab::Metrics do
it_behaves_like 'prometheus metrics API'
describe '#null_metric' do
- subject { described_class.provide_metric(:test) }
+ subject { described_class.send(:provide_metric, :test) }
it { is_expected.to be_a(Gitlab::Metrics::NullMetric) }
end
@@ -321,7 +296,7 @@ describe Gitlab::Metrics do
it_behaves_like 'prometheus metrics API'
describe '#null_metric' do
- subject { described_class.provide_metric(:test) }
+ subject { described_class.send(:provide_metric, :test) }
it { is_expected.to be_nil }
end
diff --git a/spec/lib/gitlab/middleware/go_spec.rb b/spec/lib/gitlab/middleware/go_spec.rb
index cab662819ac..60a134be939 100644
--- a/spec/lib/gitlab/middleware/go_spec.rb
+++ b/spec/lib/gitlab/middleware/go_spec.rb
@@ -17,89 +17,123 @@ describe Gitlab::Middleware::Go do
describe 'when go-get=1' do
let(:current_user) { nil }
- context 'with simple 2-segment project path' do
- let!(:project) { create(:project, :private) }
+ shared_examples 'go-get=1' do |enabled_protocol:|
+ context 'with simple 2-segment project path' do
+ let!(:project) { create(:project, :private) }
- context 'with subpackages' do
- let(:path) { "#{project.full_path}/subpackage" }
+ context 'with subpackages' do
+ let(:path) { "#{project.full_path}/subpackage" }
- it 'returns the full project path' do
- expect_response_with_path(go, project.full_path)
- end
- end
-
- context 'without subpackages' do
- let(:path) { project.full_path }
-
- it 'returns the full project path' do
- expect_response_with_path(go, project.full_path)
+ it 'returns the full project path' do
+ expect_response_with_path(go, enabled_protocol, project.full_path)
+ end
end
- end
- end
- context 'with a nested project path' do
- let(:group) { create(:group, :nested) }
- let!(:project) { create(:project, :public, namespace: group) }
+ context 'without subpackages' do
+ let(:path) { project.full_path }
- shared_examples 'a nested project' do
- context 'when the project is public' do
it 'returns the full project path' do
- expect_response_with_path(go, project.full_path)
+ expect_response_with_path(go, enabled_protocol, project.full_path)
end
end
+ end
- context 'when the project is private' do
- before do
- project.update_attribute(:visibility_level, Project::PRIVATE)
- end
+ context 'with a nested project path' do
+ let(:group) { create(:group, :nested) }
+ let!(:project) { create(:project, :public, namespace: group) }
- context 'with access to the project' do
- let(:current_user) { project.creator }
+ shared_examples 'a nested project' do
+ context 'when the project is public' do
+ it 'returns the full project path' do
+ expect_response_with_path(go, enabled_protocol, project.full_path)
+ end
+ end
+ context 'when the project is private' do
before do
- project.team.add_master(current_user)
+ project.update_attribute(:visibility_level, Project::PRIVATE)
end
- it 'returns the full project path' do
- expect_response_with_path(go, project.full_path)
+ context 'with access to the project' do
+ let(:current_user) { project.creator }
+
+ before do
+ project.team.add_master(current_user)
+ end
+
+ it 'returns the full project path' do
+ expect_response_with_path(go, enabled_protocol, project.full_path)
+ end
end
- end
- context 'without access to the project' do
- it 'returns the 2-segment group path' do
- expect_response_with_path(go, group.full_path)
+ context 'without access to the project' do
+ it 'returns the 2-segment group path' do
+ expect_response_with_path(go, enabled_protocol, group.full_path)
+ end
end
end
end
- end
- context 'with subpackages' do
- let(:path) { "#{project.full_path}/subpackage" }
+ context 'with subpackages' do
+ let(:path) { "#{project.full_path}/subpackage" }
- it_behaves_like 'a nested project'
+ it_behaves_like 'a nested project'
+ end
+
+ context 'with a subpackage that is not a valid project path' do
+ let(:path) { "#{project.full_path}/---subpackage" }
+
+ it_behaves_like 'a nested project'
+ end
+
+ context 'without subpackages' do
+ let(:path) { project.full_path }
+
+ it_behaves_like 'a nested project'
+ end
end
- context 'with a subpackage that is not a valid project path' do
- let(:path) { "#{project.full_path}/---subpackage" }
+ context 'with a bogus path' do
+ let(:path) { "http:;url=http:&sol;&sol;www.example.com'http-equiv='refresh'x='?go-get=1" }
- it_behaves_like 'a nested project'
+ it 'skips go-import generation' do
+ expect(app).to receive(:call).and_return('no-go')
+
+ go
+ end
+ end
+ end
+
+ context 'with SSH disabled' do
+ before do
+ stub_application_setting(enabled_git_access_protocol: 'http')
end
- context 'without subpackages' do
- let(:path) { project.full_path }
+ include_examples 'go-get=1', enabled_protocol: :http
+ end
- it_behaves_like 'a nested project'
+ context 'with HTTP disabled' do
+ before do
+ stub_application_setting(enabled_git_access_protocol: 'ssh')
end
+
+ include_examples 'go-get=1', enabled_protocol: :ssh
end
- context 'with a bogus path' do
- let(:path) { "http:;url=http:&sol;&sol;www.example.com'http-equiv='refresh'x='?go-get=1" }
+ context 'with nothing disabled' do
+ before do
+ stub_application_setting(enabled_git_access_protocol: nil)
+ end
- it 'skips go-import generation' do
- expect(app).to receive(:call).and_return('no-go')
+ include_examples 'go-get=1', enabled_protocol: nil
+ end
- go
+ context 'with nothing disabled (blank string)' do
+ before do
+ stub_application_setting(enabled_git_access_protocol: '')
end
+
+ include_examples 'go-get=1', enabled_protocol: nil
end
end
@@ -113,10 +147,16 @@ describe Gitlab::Middleware::Go do
middleware.call(env)
end
- def expect_response_with_path(response, path)
+ def expect_response_with_path(response, protocol, path)
+ repository_url = case protocol
+ when :ssh
+ "ssh://git@#{Gitlab.config.gitlab.host}/#{path}.git"
+ when :http, nil
+ "http://#{Gitlab.config.gitlab.host}/#{path}.git"
+ end
expect(response[0]).to eq(200)
expect(response[1]['Content-Type']).to eq('text/html')
- expected_body = %{<html><head><meta name="go-import" content="#{Gitlab.config.gitlab.host}/#{path} git http://#{Gitlab.config.gitlab.host}/#{path}.git" /></head></html>}
+ expected_body = %{<html><head><meta name="go-import" content="#{Gitlab.config.gitlab.host}/#{path} git #{repository_url}" /></head></html>}
expect(response[2].body).to eq([expected_body])
end
end
diff --git a/spec/lib/gitlab/middleware/rails_queue_duration_spec.rb b/spec/lib/gitlab/middleware/rails_queue_duration_spec.rb
index 88107536c9e..14f2c3cb86f 100644
--- a/spec/lib/gitlab/middleware/rails_queue_duration_spec.rb
+++ b/spec/lib/gitlab/middleware/rails_queue_duration_spec.rb
@@ -4,7 +4,7 @@ describe Gitlab::Middleware::RailsQueueDuration do
let(:app) { double(:app) }
let(:middleware) { described_class.new(app) }
let(:env) { {} }
- let(:transaction) { double(:transaction) }
+ let(:transaction) { Gitlab::Metrics::WebTransaction.new(env) }
before do
expect(app).to receive(:call).with(env).and_return('yay')
@@ -30,6 +30,16 @@ describe Gitlab::Middleware::RailsQueueDuration do
expect(transaction).to receive(:set).with(:rails_queue_duration, an_instance_of(Float))
expect(middleware.call(env)).to eq('yay')
end
+
+ it 'observes rails queue duration metrics and calls the app when the header is present' do
+ env['HTTP_GITLAB_WORKHORSE_PROXY_START'] = '2000000000'
+
+ expect(middleware.send(:metric_rails_queue_duration_seconds)).to receive(:observe).with(transaction.labels, 1)
+
+ Timecop.freeze(Time.at(3)) do
+ expect(middleware.call(env)).to eq('yay')
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/middleware/read_only_spec.rb b/spec/lib/gitlab/middleware/read_only_spec.rb
index 742a792a1af..07ba11b93a3 100644
--- a/spec/lib/gitlab/middleware/read_only_spec.rb
+++ b/spec/lib/gitlab/middleware/read_only_spec.rb
@@ -83,15 +83,24 @@ describe Gitlab::Middleware::ReadOnly do
expect(subject).to disallow_request
end
- context 'whitelisted requests' do
- it 'expects DELETE request to logout to be allowed' do
- response = request.delete('/users/sign_out')
+ it 'expects POST of new file that looks like an LFS batch url to be disallowed' do
+ expect(Rails.application.routes).to receive(:recognize_path).and_call_original
+ response = request.post('/root/gitlab-ce/new/master/app/info/lfs/objects/batch')
- expect(response).not_to be_a_redirect
- expect(subject).not_to disallow_request
- end
+ expect(response).to be_a_redirect
+ expect(subject).to disallow_request
+ end
+ it 'returns last_vistited_url for disallowed request' do
+ response = request.post('/test_request')
+
+ expect(response.location).to eq 'http://localhost/'
+ end
+
+ context 'whitelisted requests' do
it 'expects a POST internal request to be allowed' do
+ expect(Rails.application.routes).not_to receive(:recognize_path)
+
response = request.post("/api/#{API::API.version}/internal")
expect(response).not_to be_a_redirect
@@ -99,11 +108,32 @@ describe Gitlab::Middleware::ReadOnly do
end
it 'expects a POST LFS request to batch URL to be allowed' do
+ expect(Rails.application.routes).to receive(:recognize_path).and_call_original
response = request.post('/root/rouge.git/info/lfs/objects/batch')
expect(response).not_to be_a_redirect
expect(subject).not_to disallow_request
end
+
+ it 'expects a POST request to git-upload-pack URL to be allowed' do
+ expect(Rails.application.routes).to receive(:recognize_path).and_call_original
+ response = request.post('/root/rouge.git/git-upload-pack')
+
+ expect(response).not_to be_a_redirect
+ expect(subject).not_to disallow_request
+ end
+
+ it 'expects requests to sidekiq admin to be allowed' do
+ response = request.post('/admin/sidekiq')
+
+ expect(response).not_to be_a_redirect
+ expect(subject).not_to disallow_request
+
+ response = request.get('/admin/sidekiq')
+
+ expect(response).not_to be_a_redirect
+ expect(subject).not_to disallow_request
+ end
end
end
diff --git a/spec/lib/gitlab/multi_collection_paginator_spec.rb b/spec/lib/gitlab/multi_collection_paginator_spec.rb
new file mode 100644
index 00000000000..68bd4f93159
--- /dev/null
+++ b/spec/lib/gitlab/multi_collection_paginator_spec.rb
@@ -0,0 +1,46 @@
+require 'spec_helper'
+
+describe Gitlab::MultiCollectionPaginator do
+ subject(:paginator) { described_class.new(Project.all.order(:id), Group.all.order(:id), per_page: 3) }
+
+ it 'combines both collections' do
+ project = create(:project)
+ group = create(:group)
+
+ expect(paginator.paginate(1)).to eq([project, group])
+ end
+
+ it 'includes elements second collection if first collection is empty' do
+ group = create(:group)
+
+ expect(paginator.paginate(1)).to eq([group])
+ end
+
+ context 'with a full first page' do
+ let!(:all_groups) { create_list(:group, 4) }
+ let!(:all_projects) { create_list(:project, 4) }
+
+ it 'knows the total count of the collection' do
+ expect(paginator.total_count).to eq(8)
+ end
+
+ it 'fills the first page with elements of the first collection' do
+ expect(paginator.paginate(1)).to eq(all_projects.take(3))
+ end
+
+ it 'fils the second page with a mixture of of the first & second collection' do
+ first_collection_element = all_projects.last
+ second_collection_elements = all_groups.take(2)
+
+ expected_collection = [first_collection_element] + second_collection_elements
+
+ expect(paginator.paginate(2)).to eq(expected_collection)
+ end
+
+ it 'fils the last page with elements from the second collection' do
+ expected_collection = all_groups[-2..-1]
+
+ expect(paginator.paginate(3)).to eq(expected_collection)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/o_auth/user_spec.rb b/spec/lib/gitlab/o_auth/user_spec.rb
index db26e16e3b2..2f19fb7312d 100644
--- a/spec/lib/gitlab/o_auth/user_spec.rb
+++ b/spec/lib/gitlab/o_auth/user_spec.rb
@@ -4,7 +4,7 @@ describe Gitlab::OAuth::User do
let(:oauth_user) { described_class.new(auth_hash) }
let(:gl_user) { oauth_user.gl_user }
let(:uid) { 'my-uid' }
- let(:dn) { 'uid=user1,ou=People,dc=example' }
+ let(:dn) { 'uid=user1,ou=people,dc=example' }
let(:provider) { 'my-provider' }
let(:auth_hash) { OmniAuth::AuthHash.new(uid: uid, provider: provider, info: info_hash) }
let(:info_hash) do
@@ -662,4 +662,13 @@ describe Gitlab::OAuth::User do
end
end
end
+
+ describe '.find_by_uid_and_provider' do
+ let!(:existing_user) { create(:omniauth_user, extern_uid: 'my-uid', provider: 'my-provider') }
+
+ it 'normalizes extern_uid' do
+ allow(oauth_user.auth_hash).to receive(:uid).and_return('MY-UID')
+ expect(oauth_user.find_user).to eql gl_user
+ end
+ end
end
diff --git a/spec/lib/gitlab/path_regex_spec.rb b/spec/lib/gitlab/path_regex_spec.rb
index 1f1c48ee9b5..0ae90069b7f 100644
--- a/spec/lib/gitlab/path_regex_spec.rb
+++ b/spec/lib/gitlab/path_regex_spec.rb
@@ -45,21 +45,16 @@ describe Gitlab::PathRegex do
Found new routes that could cause conflicts with existing namespaced routes
for groups or projects.
- Add <#{missing_words.join(', ')}> to `Gitlab::PathRegex::#{constant_name}
- to make sure no projects or namespaces can be created with those paths.
-
- To rename any existing records with those paths you can use the
- `Gitlab::Database::RenameReservedpathsMigration::<VERSION>.#{migration_helper}`
- migration helper.
-
- Make sure to make a note of the renamed records in the release blog post.
+ Nest <#{missing_words.join(', ')}> in a route containing `-`, that way
+ we know there will be no conflicts with groups or projects created with those
+ paths.
MISSING
end
if additional_words.any?
message += <<-ADDITIONAL
- Why are <#{additional_words.join(', ')}> in `#{constant_name}`?
+ Is <#{additional_words.join(', ')}> in `#{constant_name}` required?
If they are really required, update these specs to reflect that.
ADDITIONAL
@@ -68,14 +63,27 @@ describe Gitlab::PathRegex do
message
end
- let(:all_routes) do
+ let(:all_non_legacy_routes) do
route_set = Rails.application.routes
routes_collection = route_set.routes
routes_array = routes_collection.routes
- routes_array.map { |route| route.path.spec.to_s }
+
+ non_legacy_routes = routes_array.reject do |route|
+ route.name.to_s =~ /legacy_(\w*)_redirect/
+ end
+
+ non_deprecated_redirect_routes = non_legacy_routes.reject do |route|
+ app = route.app
+ # `app.app` is either another app, or `self`. We want to find the final app.
+ app = app.app while app.try(:app) && app.app != app
+
+ app.is_a?(ActionDispatch::Routing::PathRedirect) && app.block.include?('/-/')
+ end
+
+ non_deprecated_redirect_routes.map { |route| route.path.spec.to_s }
end
- let(:routes_without_format) { all_routes.map { |path| without_format(path) } }
+ let(:routes_without_format) { all_non_legacy_routes.map { |path| without_format(path) } }
# Routes not starting with `/:` or `/*`
# all routes not starting with a param
@@ -144,16 +152,7 @@ describe Gitlab::PathRegex do
let(:paths_after_group_id) do
group_routes.map do |route|
route.gsub(STARTING_WITH_GROUP, '').split('/').first
- end.uniq + ee_paths_after_group_id
- end
-
- let(:ee_paths_after_group_id) do
- %w(analytics
- ldap
- ldap_group_links
- notification_setting
- audit_events
- pipeline_quota hooks)
+ end.uniq
end
describe 'TOP_LEVEL_ROUTES' do
@@ -212,8 +211,6 @@ describe Gitlab::PathRegex do
it 'accepts group routes' do
expect(subject).to match('activity/')
- expect(subject).to match('group_members/')
- expect(subject).to match('subgroups/')
end
it 'is not case sensitive' do
@@ -245,8 +242,6 @@ describe Gitlab::PathRegex do
it 'accepts group routes' do
expect(subject).to match('activity/')
- expect(subject).to match('group_members/')
- expect(subject).to match('subgroups/')
end
end
@@ -267,8 +262,6 @@ describe Gitlab::PathRegex do
it 'accepts group routes' do
expect(subject).to match('activity/more/')
- expect(subject).to match('group_members/more/')
- expect(subject).to match('subgroups/more/')
end
end
end
@@ -290,9 +283,7 @@ describe Gitlab::PathRegex do
end
it 'rejects group routes' do
- expect(subject).not_to match('root/activity/')
- expect(subject).not_to match('root/group_members/')
- expect(subject).not_to match('root/subgroups/')
+ expect(subject).not_to match('root/-/')
end
end
@@ -312,9 +303,7 @@ describe Gitlab::PathRegex do
end
it 'rejects group routes' do
- expect(subject).not_to match('root/activity/more/')
- expect(subject).not_to match('root/group_members/more/')
- expect(subject).not_to match('root/subgroups/more/')
+ expect(subject).not_to match('root/-/')
end
end
end
@@ -347,9 +336,7 @@ describe Gitlab::PathRegex do
end
it 'accepts group routes' do
- expect(subject).to match('activity/')
- expect(subject).to match('group_members/')
- expect(subject).to match('subgroups/')
+ expect(subject).to match('analytics/')
end
it 'is not case sensitive' do
@@ -380,9 +367,7 @@ describe Gitlab::PathRegex do
end
it 'accepts group routes' do
- expect(subject).to match('root/activity/')
- expect(subject).to match('root/group_members/')
- expect(subject).to match('root/subgroups/')
+ expect(subject).to match('root/analytics/')
end
it 'is not case sensitive' do
diff --git a/spec/lib/gitlab/project_search_results_spec.rb b/spec/lib/gitlab/project_search_results_spec.rb
index 9c3e7d7e9ba..a424f0f5cfe 100644
--- a/spec/lib/gitlab/project_search_results_spec.rb
+++ b/spec/lib/gitlab/project_search_results_spec.rb
@@ -70,6 +70,15 @@ describe Gitlab::ProjectSearchResults do
subject { described_class.parse_search_result(search_result) }
+ it 'can correctly parse filenames including ":"' do
+ special_char_result = "\nmaster:testdata/project::function1.yaml-1----\nmaster:testdata/project::function1.yaml:2:test: data1\n"
+
+ blob = described_class.parse_search_result(special_char_result)
+
+ expect(blob.ref).to eq('master')
+ expect(blob.filename).to eq('testdata/project::function1.yaml')
+ end
+
it "returns a valid FoundBlob" do
is_expected.to be_an Gitlab::SearchResults::FoundBlob
expect(subject.id).to be_nil
diff --git a/spec/lib/gitlab/project_template_spec.rb b/spec/lib/gitlab/project_template_spec.rb
index d19bd611919..57b0ef8d1ad 100644
--- a/spec/lib/gitlab/project_template_spec.rb
+++ b/spec/lib/gitlab/project_template_spec.rb
@@ -4,9 +4,9 @@ describe Gitlab::ProjectTemplate do
describe '.all' do
it 'returns a all templates' do
expected = [
- described_class.new('rails', 'Ruby on Rails'),
- described_class.new('spring', 'Spring'),
- described_class.new('express', 'NodeJS Express')
+ described_class.new('rails', 'Ruby on Rails', 'Includes an MVC structure, .gitignore, Gemfile, and more great stuff', 'https://gitlab.com/gitlab-org/project-templates/rails'),
+ described_class.new('spring', 'Spring', 'Includes an MVC structure, .gitignore, Gemfile, and more great stuff', 'https://gitlab.com/gitlab-org/project-templates/spring'),
+ described_class.new('express', 'NodeJS Express', 'Includes an MVC structure, .gitignore, Gemfile, and more great stuff', 'https://gitlab.com/gitlab-org/project-templates/express')
]
expect(described_class.all).to be_an(Array)
@@ -31,7 +31,7 @@ describe Gitlab::ProjectTemplate do
end
describe 'instance methods' do
- subject { described_class.new('phoenix', 'Phoenix Framework') }
+ subject { described_class.new('phoenix', 'Phoenix Framework', 'Phoenix description', 'link-to-template') }
it { is_expected.to respond_to(:logo, :file, :archive_path) }
end
diff --git a/spec/lib/gitlab/quick_actions/spend_time_and_date_separator_spec.rb b/spec/lib/gitlab/quick_actions/spend_time_and_date_separator_spec.rb
new file mode 100644
index 00000000000..8b58f0b3725
--- /dev/null
+++ b/spec/lib/gitlab/quick_actions/spend_time_and_date_separator_spec.rb
@@ -0,0 +1,81 @@
+require 'spec_helper'
+
+describe Gitlab::QuickActions::SpendTimeAndDateSeparator do
+ subject { described_class }
+
+ shared_examples 'arg line with invalid parameters' do
+ it 'return nil' do
+ expect(subject.new(invalid_arg).execute).to eq(nil)
+ end
+ end
+
+ shared_examples 'arg line with valid parameters' do
+ it 'return time and date array' do
+ expect(subject.new(valid_arg).execute).to eq(expected_response)
+ end
+ end
+
+ describe '#execute' do
+ context 'invalid paramenter in arg line' do
+ context 'empty arg line' do
+ it_behaves_like 'arg line with invalid parameters' do
+ let(:invalid_arg) { '' }
+ end
+ end
+
+ context 'future date in arg line' do
+ it_behaves_like 'arg line with invalid parameters' do
+ let(:invalid_arg) { '10m 6023-02-02' }
+ end
+ end
+
+ context 'unparseable date(invalid mixes of delimiters)' do
+ it_behaves_like 'arg line with invalid parameters' do
+ let(:invalid_arg) { '10m 2017.02-02' }
+ end
+ end
+
+ context 'trash in arg line' do
+ let(:invalid_arg) { 'dfjkghdskjfghdjskfgdfg' }
+
+ it 'return nil as time value' do
+ time_date_response = subject.new(invalid_arg).execute
+
+ expect(time_date_response).to be_an_instance_of(Array)
+ expect(time_date_response.first).to eq(nil)
+ end
+ end
+ end
+
+ context 'only time present in arg line' do
+ it_behaves_like 'arg line with valid parameters' do
+ let(:valid_arg) { '2m 3m 5m 1h' }
+ let(:time) { Gitlab::TimeTrackingFormatter.parse(valid_arg) }
+ let(:date) { DateTime.now.to_date }
+ let(:expected_response) { [time, date] }
+ end
+ end
+
+ context 'simple time with date in arg line' do
+ it_behaves_like 'arg line with valid parameters' do
+ let(:raw_time) { '10m' }
+ let(:raw_date) { '2016-02-02' }
+ let(:valid_arg) { "#{raw_time} #{raw_date}" }
+ let(:date) { Date.parse(raw_date) }
+ let(:time) { Gitlab::TimeTrackingFormatter.parse(raw_time) }
+ let(:expected_response) { [time, date] }
+ end
+ end
+
+ context 'composite time with date in arg line' do
+ it_behaves_like 'arg line with valid parameters' do
+ let(:raw_time) { '2m 10m 1h 3d' }
+ let(:raw_date) { '2016/02/02' }
+ let(:valid_arg) { "#{raw_time} #{raw_date}" }
+ let(:date) { Date.parse(raw_date) }
+ let(:time) { Gitlab::TimeTrackingFormatter.parse(raw_time) }
+ let(:expected_response) { [time, date] }
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/saml/auth_hash_spec.rb b/spec/lib/gitlab/saml/auth_hash_spec.rb
new file mode 100644
index 00000000000..a555935aea3
--- /dev/null
+++ b/spec/lib/gitlab/saml/auth_hash_spec.rb
@@ -0,0 +1,40 @@
+require 'spec_helper'
+
+describe Gitlab::Saml::AuthHash do
+ include LoginHelpers
+
+ let(:raw_info_attr) { { 'groups' => %w(Developers Freelancers) } }
+ subject(:saml_auth_hash) { described_class.new(omniauth_auth_hash) }
+
+ let(:info_hash) do
+ {
+ name: 'John',
+ email: 'john@mail.com'
+ }
+ end
+
+ let(:omniauth_auth_hash) do
+ OmniAuth::AuthHash.new(uid: 'my-uid',
+ provider: 'saml',
+ info: info_hash,
+ extra: { raw_info: OneLogin::RubySaml::Attributes.new(raw_info_attr) } )
+ end
+
+ before do
+ stub_saml_group_config(%w(Developers Freelancers Designers))
+ end
+
+ describe '#groups' do
+ it 'returns array of groups' do
+ expect(saml_auth_hash.groups).to eq(%w(Developers Freelancers))
+ end
+
+ context 'raw info hash attributes empty' do
+ let(:raw_info_attr) { {} }
+
+ it 'returns an empty array' do
+ expect(saml_auth_hash.groups).to be_a(Array)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/saml/user_spec.rb b/spec/lib/gitlab/saml/user_spec.rb
index 59923bfb14d..1765980e977 100644
--- a/spec/lib/gitlab/saml/user_spec.rb
+++ b/spec/lib/gitlab/saml/user_spec.rb
@@ -2,13 +2,15 @@ require 'spec_helper'
describe Gitlab::Saml::User do
include LdapHelpers
+ include LoginHelpers
let(:saml_user) { described_class.new(auth_hash) }
let(:gl_user) { saml_user.gl_user }
let(:uid) { 'my-uid' }
- let(:dn) { 'uid=user1,ou=People,dc=example' }
+ let(:dn) { 'uid=user1,ou=people,dc=example' }
let(:provider) { 'saml' }
- let(:auth_hash) { OmniAuth::AuthHash.new(uid: uid, provider: provider, info: info_hash, extra: { raw_info: OneLogin::RubySaml::Attributes.new({ 'groups' => %w(Developers Freelancers Designers) }) }) }
+ let(:raw_info_attr) { { 'groups' => %w(Developers Freelancers Designers) } }
+ let(:auth_hash) { OmniAuth::AuthHash.new(uid: uid, provider: provider, info: info_hash, extra: { raw_info: OneLogin::RubySaml::Attributes.new(raw_info_attr) }) }
let(:info_hash) do
{
name: 'John',
@@ -18,22 +20,6 @@ describe Gitlab::Saml::User do
let(:ldap_user) { Gitlab::LDAP::Person.new(Net::LDAP::Entry.new, 'ldapmain') }
describe '#save' do
- def stub_omniauth_config(messages)
- allow(Gitlab.config.omniauth).to receive_messages(messages)
- end
-
- def stub_ldap_config(messages)
- allow(Gitlab::LDAP::Config).to receive_messages(messages)
- end
-
- def stub_basic_saml_config
- allow(Gitlab::Saml::Config).to receive_messages({ options: { name: 'saml', args: {} } })
- end
-
- def stub_saml_group_config(groups)
- allow(Gitlab::Saml::Config).to receive_messages({ options: { name: 'saml', groups_attribute: 'groups', external_groups: groups, args: {} } })
- end
-
before do
stub_basic_saml_config
end
@@ -402,4 +388,16 @@ describe Gitlab::Saml::User do
end
end
end
+
+ describe '#find_user' do
+ context 'raw info hash attributes empty' do
+ let(:raw_info_attr) { {} }
+
+ it 'does not mark user as external' do
+ stub_saml_group_config(%w(Freelancers))
+
+ expect(saml_user.find_user.external).to be_falsy
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/shell_spec.rb b/spec/lib/gitlab/shell_spec.rb
index 139afa22d01..2158b2837e2 100644
--- a/spec/lib/gitlab/shell_spec.rb
+++ b/spec/lib/gitlab/shell_spec.rb
@@ -156,7 +156,7 @@ describe Gitlab::Shell do
it_behaves_like '#add_repository'
end
- context 'without gitaly', skip_gitaly_mock: true do
+ context 'without gitaly', :skip_gitaly_mock do
it_behaves_like '#add_repository'
end
end
@@ -333,7 +333,7 @@ describe Gitlab::Shell do
end
end
- describe '#fetch_remote local', skip_gitaly_mock: true do
+ describe '#fetch_remote local', :skip_gitaly_mock do
it_should_behave_like 'fetch_remote', false
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/memory_killer_spec.rb b/spec/lib/gitlab/sidekiq_middleware/memory_killer_spec.rb
new file mode 100644
index 00000000000..8fdbbacd04d
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_middleware/memory_killer_spec.rb
@@ -0,0 +1,63 @@
+require 'spec_helper'
+
+describe Gitlab::SidekiqMiddleware::MemoryKiller do
+ subject { described_class.new }
+ let(:pid) { 999 }
+
+ let(:worker) { double(:worker, class: 'TestWorker') }
+ let(:job) { { 'jid' => 123 } }
+ let(:queue) { 'test_queue' }
+
+ def run
+ thread = subject.call(worker, job, queue) { nil }
+ thread&.join
+ end
+
+ before do
+ allow(subject).to receive(:get_rss).and_return(10.kilobytes)
+ allow(subject).to receive(:pid).and_return(pid)
+ end
+
+ context 'when MAX_RSS is set to 0' do
+ before do
+ stub_const("#{described_class}::MAX_RSS", 0)
+ end
+
+ it 'does nothing' do
+ expect(subject).not_to receive(:sleep)
+
+ run
+ end
+ end
+
+ context 'when MAX_RSS is exceeded' do
+ before do
+ stub_const("#{described_class}::MAX_RSS", 5.kilobytes)
+ end
+
+ it 'sends the STP, TERM and KILL signals at expected times' do
+ expect(subject).to receive(:sleep).with(15 * 60).ordered
+ expect(Process).to receive(:kill).with('SIGSTP', pid).ordered
+
+ expect(subject).to receive(:sleep).with(30).ordered
+ expect(Process).to receive(:kill).with('SIGTERM', pid).ordered
+
+ expect(subject).to receive(:sleep).with(10).ordered
+ expect(Process).to receive(:kill).with('SIGKILL', pid).ordered
+
+ run
+ end
+ end
+
+ context 'when MAX_RSS is not exceeded' do
+ before do
+ stub_const("#{described_class}::MAX_RSS", 15.kilobytes)
+ end
+
+ it 'does nothing' do
+ expect(subject).not_to receive(:sleep)
+
+ run
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_status_spec.rb b/spec/lib/gitlab/sidekiq_status_spec.rb
index c2e77ef6b6c..884f27b212c 100644
--- a/spec/lib/gitlab/sidekiq_status_spec.rb
+++ b/spec/lib/gitlab/sidekiq_status_spec.rb
@@ -39,6 +39,18 @@ describe Gitlab::SidekiqStatus do
end
end
+ describe '.running?', :clean_gitlab_redis_shared_state do
+ it 'returns true if job is running' do
+ described_class.set('123')
+
+ expect(described_class.running?('123')).to be(true)
+ end
+
+ it 'returns false if job is not found' do
+ expect(described_class.running?('123')).to be(false)
+ end
+ end
+
describe '.num_running', :clean_gitlab_redis_shared_state do
it 'returns 0 if all jobs have been completed' do
expect(described_class.num_running(%w(123))).to eq(0)
diff --git a/spec/lib/gitlab/sql/pattern_spec.rb b/spec/lib/gitlab/sql/pattern_spec.rb
index 48d56628ed5..ef51e3cc8df 100644
--- a/spec/lib/gitlab/sql/pattern_spec.rb
+++ b/spec/lib/gitlab/sql/pattern_spec.rb
@@ -137,22 +137,22 @@ describe Gitlab::SQL::Pattern do
end
end
- describe '.to_fuzzy_arel' do
- subject(:to_fuzzy_arel) { Issue.to_fuzzy_arel(:title, query) }
+ describe '.fuzzy_arel_match' do
+ subject(:fuzzy_arel_match) { Issue.fuzzy_arel_match(:title, query) }
context 'with a word equal to 3 chars' do
let(:query) { 'foo' }
it 'returns a single ILIKE condition' do
- expect(to_fuzzy_arel.to_sql).to match(/title.*I?LIKE '\%foo\%'/)
+ expect(fuzzy_arel_match.to_sql).to match(/title.*I?LIKE '\%foo\%'/)
end
end
context 'with a word shorter than 3 chars' do
let(:query) { 'fo' }
- it 'returns nil' do
- expect(to_fuzzy_arel).to be_nil
+ it 'returns a single equality condition' do
+ expect(fuzzy_arel_match.to_sql).to match(/title.*I?LIKE 'fo'/)
end
end
@@ -160,7 +160,23 @@ describe Gitlab::SQL::Pattern do
let(:query) { 'foo baz' }
it 'returns a joining LIKE condition using a AND' do
- expect(to_fuzzy_arel.to_sql).to match(/title.+I?LIKE '\%foo\%' AND .*title.*I?LIKE '\%baz\%'/)
+ expect(fuzzy_arel_match.to_sql).to match(/title.+I?LIKE '\%foo\%' AND .*title.*I?LIKE '\%baz\%'/)
+ end
+ end
+
+ context 'with two words both shorter than 3 chars' do
+ let(:query) { 'fo ba' }
+
+ it 'returns a single ILIKE condition' do
+ expect(fuzzy_arel_match.to_sql).to match(/title.*I?LIKE 'fo ba'/)
+ end
+ end
+
+ context 'with two words, one shorter 3 chars' do
+ let(:query) { 'foo ba' }
+
+ it 'returns a single ILIKE condition using the longer word' do
+ expect(fuzzy_arel_match.to_sql).to match(/title.+I?LIKE '\%foo\%'/)
end
end
@@ -168,7 +184,7 @@ describe Gitlab::SQL::Pattern do
let(:query) { 'foo "really bar" baz' }
it 'returns a joining LIKE condition using a AND' do
- expect(to_fuzzy_arel.to_sql).to match(/title.+I?LIKE '\%foo\%' AND .*title.*I?LIKE '\%baz\%' AND .*title.*I?LIKE '\%really bar\%'/)
+ expect(fuzzy_arel_match.to_sql).to match(/title.+I?LIKE '\%foo\%' AND .*title.*I?LIKE '\%baz\%' AND .*title.*I?LIKE '\%really bar\%'/)
end
end
end
diff --git a/spec/lib/gitlab/sql/union_spec.rb b/spec/lib/gitlab/sql/union_spec.rb
index 8026fba9f0a..fe6422c32b6 100644
--- a/spec/lib/gitlab/sql/union_spec.rb
+++ b/spec/lib/gitlab/sql/union_spec.rb
@@ -29,5 +29,12 @@ describe Gitlab::SQL::Union do
expect(union.to_sql).to include('UNION ALL')
end
+
+ it 'returns `NULL` if all relations are empty' do
+ empty_relation = User.none
+ union = described_class.new([empty_relation, empty_relation])
+
+ expect(union.to_sql).to eq('NULL')
+ end
end
end
diff --git a/spec/lib/gitlab/url_blocker_spec.rb b/spec/lib/gitlab/url_blocker_spec.rb
index f18823b61ef..d9b3c2350b1 100644
--- a/spec/lib/gitlab/url_blocker_spec.rb
+++ b/spec/lib/gitlab/url_blocker_spec.rb
@@ -20,6 +20,22 @@ describe Gitlab::UrlBlocker do
expect(described_class.blocked_url?('https://gitlab.com:25/foo/foo.git')).to be true
end
+ it 'returns true for alternative version of 127.0.0.1 (0177.1)' do
+ expect(described_class.blocked_url?('https://0177.1:65535/foo/foo.git')).to be true
+ end
+
+ it 'returns true for alternative version of 127.0.0.1 (0x7f.1)' do
+ expect(described_class.blocked_url?('https://0x7f.1:65535/foo/foo.git')).to be true
+ end
+
+ it 'returns true for alternative version of 127.0.0.1 (2130706433)' do
+ expect(described_class.blocked_url?('https://2130706433:65535/foo/foo.git')).to be true
+ end
+
+ it 'returns true for alternative version of 127.0.0.1 (127.000.000.001)' do
+ expect(described_class.blocked_url?('https://127.000.000.001:65535/foo/foo.git')).to be true
+ end
+
it 'returns true for a non-alphanumeric hostname' do
stub_resolv
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index 777e9c8e21d..b5f2a15ada3 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -60,7 +60,9 @@ describe Gitlab::UsageData do
deploy_keys
deployments
environments
- gcp_clusters
+ clusters
+ clusters_enabled
+ clusters_disabled
in_review_folder
groups
issues
@@ -101,7 +103,7 @@ describe Gitlab::UsageData do
subject { described_class.features_usage_data_ce }
it 'gathers feature usage data' do
- expect(subject[:signup]).to eq(current_application_settings.signup_enabled?)
+ expect(subject[:signup]).to eq(current_application_settings.allow_signup?)
expect(subject[:ldap]).to eq(Gitlab.config.ldap.enabled)
expect(subject[:gravatar]).to eq(current_application_settings.gravatar_enabled?)
expect(subject[:omniauth]).to eq(Gitlab.config.omniauth.enabled)
diff --git a/spec/lib/gitlab/utils/merge_hash_spec.rb b/spec/lib/gitlab/utils/merge_hash_spec.rb
new file mode 100644
index 00000000000..4fa7bb31301
--- /dev/null
+++ b/spec/lib/gitlab/utils/merge_hash_spec.rb
@@ -0,0 +1,33 @@
+require 'spec_helper'
+describe Gitlab::Utils::MergeHash do
+ describe '.crush' do
+ it 'can flatten a hash to each element' do
+ input = { hello: "world", this: { crushes: ["an entire", "hash"] } }
+ expected_result = [:hello, "world", :this, :crushes, "an entire", "hash"]
+
+ expect(described_class.crush(input)).to eq(expected_result)
+ end
+ end
+
+ describe '.elements' do
+ it 'deep merges an array of elements' do
+ input = [{ hello: ["world"] },
+ { hello: "Everyone" },
+ { hello: { greetings: ['Bonjour', 'Hello', 'Hallo', 'Dzień dobry'] } },
+ "Goodbye", "Hallo"]
+ expected_output = [
+ {
+ hello:
+ [
+ "world",
+ "Everyone",
+ { greetings: ['Bonjour', 'Hello', 'Hallo', 'Dzień dobry'] }
+ ]
+ },
+ "Goodbye"
+ ]
+
+ expect(described_class.merge(input)).to eq(expected_output)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/utils/strong_memoize_spec.rb b/spec/lib/gitlab/utils/strong_memoize_spec.rb
new file mode 100644
index 00000000000..4a104ab6d97
--- /dev/null
+++ b/spec/lib/gitlab/utils/strong_memoize_spec.rb
@@ -0,0 +1,52 @@
+require 'spec_helper'
+
+describe Gitlab::Utils::StrongMemoize do
+ let(:klass) do
+ struct = Struct.new(:value) do
+ def method_name
+ strong_memoize(:method_name) do
+ trace << value
+ value
+ end
+ end
+
+ def trace
+ @trace ||= []
+ end
+ end
+
+ struct.include(described_class)
+ struct
+ end
+
+ subject(:object) { klass.new(value) }
+
+ shared_examples 'caching the value' do
+ it 'only calls the block once' do
+ value0 = object.method_name
+ value1 = object.method_name
+
+ expect(value0).to eq(value)
+ expect(value1).to eq(value)
+ expect(object.trace).to contain_exactly(value)
+ end
+
+ it 'returns and defines the instance variable for the exact value' do
+ returned_value = object.method_name
+ memoized_value = object.instance_variable_get(:@method_name)
+
+ expect(returned_value).to eql(value)
+ expect(memoized_value).to eql(value)
+ end
+ end
+
+ describe '#strong_memoize' do
+ [nil, false, true, 'value', 0, [0]].each do |value|
+ context "with value #{value}" do
+ let(:value) { value }
+
+ it_behaves_like 'caching the value'
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb
index 4dffe2bd82f..249c77dc636 100644
--- a/spec/lib/gitlab/workhorse_spec.rb
+++ b/spec/lib/gitlab/workhorse_spec.rb
@@ -41,7 +41,7 @@ describe Gitlab::Workhorse do
end
end
- context 'when Gitaly workhorse_archive feature is disabled', skip_gitaly_mock: true do
+ context 'when Gitaly workhorse_archive feature is disabled', :skip_gitaly_mock do
it 'sets the header correctly' do
key, command, params = decode_workhorse_header(subject)
@@ -66,12 +66,34 @@ describe Gitlab::Workhorse do
let(:diff_refs) { double(base_sha: "base", head_sha: "head") }
subject { described_class.send_git_patch(repository, diff_refs) }
- it 'sets the header correctly' do
- key, command, params = decode_workhorse_header(subject)
+ context 'when Gitaly workhorse_send_git_patch feature is enabled' do
+ it 'sets the header correctly' do
+ key, command, params = decode_workhorse_header(subject)
- expect(key).to eq("Gitlab-Workhorse-Send-Data")
- expect(command).to eq("git-format-patch")
- expect(params).to eq("RepoPath" => repository.path_to_repo, "ShaFrom" => "base", "ShaTo" => "head")
+ expect(key).to eq("Gitlab-Workhorse-Send-Data")
+ expect(command).to eq("git-format-patch")
+ expect(params).to eq({
+ 'GitalyServer' => {
+ address: Gitlab::GitalyClient.address(project.repository_storage),
+ token: Gitlab::GitalyClient.token(project.repository_storage)
+ },
+ 'RawPatchRequest' => Gitaly::RawPatchRequest.new(
+ repository: repository.gitaly_repository,
+ left_commit_id: 'base',
+ right_commit_id: 'head'
+ ).to_json
+ }.deep_stringify_keys)
+ end
+ end
+
+ context 'when Gitaly workhorse_send_git_patch feature is disabled', :skip_gitaly_mock do
+ it 'sets the header correctly' do
+ key, command, params = decode_workhorse_header(subject)
+
+ expect(key).to eq("Gitlab-Workhorse-Send-Data")
+ expect(command).to eq("git-format-patch")
+ expect(params).to eq("RepoPath" => repository.path_to_repo, "ShaFrom" => "base", "ShaTo" => "head")
+ end
end
end
@@ -115,14 +137,36 @@ describe Gitlab::Workhorse do
describe '.send_git_diff' do
let(:diff_refs) { double(base_sha: "base", head_sha: "head") }
- subject { described_class.send_git_patch(repository, diff_refs) }
+ subject { described_class.send_git_diff(repository, diff_refs) }
- it 'sets the header correctly' do
- key, command, params = decode_workhorse_header(subject)
+ context 'when Gitaly workhorse_send_git_diff feature is enabled' do
+ it 'sets the header correctly' do
+ key, command, params = decode_workhorse_header(subject)
- expect(key).to eq("Gitlab-Workhorse-Send-Data")
- expect(command).to eq("git-format-patch")
- expect(params).to eq("RepoPath" => repository.path_to_repo, "ShaFrom" => "base", "ShaTo" => "head")
+ expect(key).to eq("Gitlab-Workhorse-Send-Data")
+ expect(command).to eq("git-diff")
+ expect(params).to eq({
+ 'GitalyServer' => {
+ address: Gitlab::GitalyClient.address(project.repository_storage),
+ token: Gitlab::GitalyClient.token(project.repository_storage)
+ },
+ 'RawDiffRequest' => Gitaly::RawDiffRequest.new(
+ repository: repository.gitaly_repository,
+ left_commit_id: 'base',
+ right_commit_id: 'head'
+ ).to_json
+ }.deep_stringify_keys)
+ end
+ end
+
+ context 'when Gitaly workhorse_send_git_diff feature is disabled', :skip_gitaly_mock do
+ it 'sets the header correctly' do
+ key, command, params = decode_workhorse_header(subject)
+
+ expect(key).to eq("Gitlab-Workhorse-Send-Data")
+ expect(command).to eq("git-diff")
+ expect(params).to eq("RepoPath" => repository.path_to_repo, "ShaFrom" => "base", "ShaTo" => "head")
+ end
end
end
@@ -224,7 +268,8 @@ describe Gitlab::Workhorse do
GL_ID: "user-#{user.id}",
GL_USERNAME: user.username,
GL_REPOSITORY: "project-#{project.id}",
- RepoPath: repo_path
+ RepoPath: repo_path,
+ ShowAllRefs: false
}
end
@@ -238,7 +283,8 @@ describe Gitlab::Workhorse do
GL_ID: "user-#{user.id}",
GL_USERNAME: user.username,
GL_REPOSITORY: "wiki-#{project.id}",
- RepoPath: repo_path
+ RepoPath: repo_path,
+ ShowAllRefs: false
}
end
@@ -280,6 +326,12 @@ describe Gitlab::Workhorse do
expect(subject).to include(gitaly_params)
end
+
+ context 'show_all_refs enabled' do
+ subject { described_class.git_http_ok(repository, false, user, action, show_all_refs: true) }
+
+ it { is_expected.to include(ShowAllRefs: true) }
+ end
end
context "when git_receive_pack action is passed" do
@@ -292,6 +344,12 @@ describe Gitlab::Workhorse do
let(:action) { 'info_refs' }
it { expect(subject).to include(gitaly_params) }
+
+ context 'show_all_refs enabled' do
+ subject { described_class.git_http_ok(repository, false, user, action, show_all_refs: true) }
+
+ it { is_expected.to include(ShowAllRefs: true) }
+ end
end
context 'when action passed is not supported by Gitaly' do
@@ -383,7 +441,7 @@ describe Gitlab::Workhorse do
end
end
- context 'when Gitaly workhorse_raw_show feature is disabled', skip_gitaly_mock: true do
+ context 'when Gitaly workhorse_raw_show feature is disabled', :skip_gitaly_mock do
it 'sets the header correctly' do
key, command, params = decode_workhorse_header(subject)
diff --git a/spec/lib/google_api/cloud_platform/client_spec.rb b/spec/lib/google_api/cloud_platform/client_spec.rb
index acc5bd1da35..fac23dce44d 100644
--- a/spec/lib/google_api/cloud_platform/client_spec.rb
+++ b/spec/lib/google_api/cloud_platform/client_spec.rb
@@ -69,7 +69,7 @@ describe GoogleApi::CloudPlatform::Client do
let(:cluster_name) { 'test-cluster' }
let(:cluster_size) { 1 }
- let(:machine_type) { 'n1-standard-4' }
+ let(:machine_type) { 'n1-standard-2' }
let(:operation) { double }
before do
diff --git a/spec/lib/milestone_array_spec.rb b/spec/lib/milestone_array_spec.rb
new file mode 100644
index 00000000000..df91677b925
--- /dev/null
+++ b/spec/lib/milestone_array_spec.rb
@@ -0,0 +1,34 @@
+require 'spec_helper'
+
+describe MilestoneArray do
+ let(:object1) { instance_double("BirdMilestone", due_date: Time.now, start_date: Time.now - 15.days, title: 'v2.0') }
+ let(:object2) { instance_double("CatMilestone", due_date: Time.now - 1.day, start_date: nil, title: 'v1.0') }
+ let(:object3) { instance_double("DogMilestone", due_date: nil, start_date: Time.now - 30.days, title: 'v3.0') }
+ let(:array) { [object1, object3, object2] }
+
+ describe '#sort' do
+ it 'reorders array with due date in ascending order with nulls last' do
+ expect(described_class.sort(array, 'due_date_asc')).to eq([object2, object1, object3])
+ end
+
+ it 'reorders array with due date in desc order with nulls last' do
+ expect(described_class.sort(array, 'due_date_desc')).to eq([object1, object2, object3])
+ end
+
+ it 'reorders array with start date in ascending order with nulls last' do
+ expect(described_class.sort(array, 'start_date_asc')).to eq([object3, object1, object2])
+ end
+
+ it 'reorders array with start date in descending order with nulls last' do
+ expect(described_class.sort(array, 'start_date_desc')).to eq([object1, object3, object2])
+ end
+
+ it 'reorders array with title in ascending order' do
+ expect(described_class.sort(array, 'name_asc')).to eq([object2, object1, object3])
+ end
+
+ it 'reorders array with title in descending order' do
+ expect(described_class.sort(array, 'name_desc')).to eq([object3, object1, object2])
+ end
+ end
+end
diff --git a/spec/lib/system_check/app/git_user_default_ssh_config_check_spec.rb b/spec/lib/system_check/app/git_user_default_ssh_config_check_spec.rb
index b4b83b70d1c..a0fb86345f3 100644
--- a/spec/lib/system_check/app/git_user_default_ssh_config_check_spec.rb
+++ b/spec/lib/system_check/app/git_user_default_ssh_config_check_spec.rb
@@ -39,14 +39,6 @@ describe SystemCheck::App::GitUserDefaultSSHConfigCheck do
it { is_expected.to eq(expected_result) }
end
-
- it 'skips GitLab read-only instances' do
- stub_user
- stub_home_dir
- allow(Gitlab::Database).to receive(:read_only?).and_return(true)
-
- is_expected.to be_truthy
- end
end
describe '#check?' do