summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorDouwe Maan <douwe@gitlab.com>2016-12-02 04:32:23 +0000
committerAlejandro Rodríguez <alejorro70@gmail.com>2016-12-02 13:24:43 -0300
commitc16a5701e90d43216c57bfea6b174f163662d0cf (patch)
treed2155a13e2d8aea866badf6d9ab4593036d85ced /spec
parent0eecc9419c2a55f5e35d8e6c08820fbfd19ef178 (diff)
downloadgitlab-ce-c16a5701e90d43216c57bfea6b174f163662d0cf.tar.gz
Merge branch 'process-commit-worker-improvements' into 'master'
Pass commit data to ProcessCommitWorker This changes `ProcessCommitWorker` so that it takes a Hash containing commit data instead of a commit SHA. This means the worker doesn't have to access Git just to process a commit message (and other data it may use). This in turn should solve the problem of ending up with 15 000-something jobs in the `process_commit` queue that take forever to process. See merge request !7744
Diffstat (limited to 'spec')
-rw-r--r--spec/lib/gitlab/cycle_analytics/events_spec.rb2
-rw-r--r--spec/migrations/migrate_process_commit_worker_jobs_spec.rb194
-rw-r--r--spec/models/commit_spec.rb17
-rw-r--r--spec/requests/projects/cycle_analytics_events_spec.rb2
-rw-r--r--spec/services/git_push_service_spec.rb6
-rw-r--r--spec/spec_helper.rb4
-rw-r--r--spec/workers/process_commit_worker_spec.rb29
7 files changed, 239 insertions, 15 deletions
diff --git a/spec/lib/gitlab/cycle_analytics/events_spec.rb b/spec/lib/gitlab/cycle_analytics/events_spec.rb
index 9aeaa6b3ee8..6062e7af4f5 100644
--- a/spec/lib/gitlab/cycle_analytics/events_spec.rb
+++ b/spec/lib/gitlab/cycle_analytics/events_spec.rb
@@ -321,6 +321,6 @@ describe Gitlab::CycleAnalytics::Events do
context.update(milestone: milestone)
mr = create_merge_request_closing_issue(context)
- ProcessCommitWorker.new.perform(project.id, user.id, mr.commits.last.sha)
+ ProcessCommitWorker.new.perform(project.id, user.id, mr.commits.last.to_hash)
end
end
diff --git a/spec/migrations/migrate_process_commit_worker_jobs_spec.rb b/spec/migrations/migrate_process_commit_worker_jobs_spec.rb
new file mode 100644
index 00000000000..52428547a9f
--- /dev/null
+++ b/spec/migrations/migrate_process_commit_worker_jobs_spec.rb
@@ -0,0 +1,194 @@
+require 'spec_helper'
+require Rails.root.join('db', 'migrate', '20161124141322_migrate_process_commit_worker_jobs.rb')
+
+describe MigrateProcessCommitWorkerJobs do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+ let(:commit) { project.commit.raw.raw_commit }
+
+ describe 'Project' do
+ describe 'find_including_path' do
+ it 'returns Project instances' do
+ expect(described_class::Project.find_including_path(project.id)).
+ to be_an_instance_of(described_class::Project)
+ end
+
+ it 'selects the full path for every Project' do
+ migration_project = described_class::Project.
+ find_including_path(project.id)
+
+ expect(migration_project[:path_with_namespace]).
+ to eq(project.path_with_namespace)
+ end
+ end
+
+ describe '#repository_storage_path' do
+ it 'returns the storage path for the repository' do
+ migration_project = described_class::Project.
+ find_including_path(project.id)
+
+ expect(File.directory?(migration_project.repository_storage_path)).
+ to eq(true)
+ end
+ end
+
+ describe '#repository_path' do
+ it 'returns the path to the repository' do
+ migration_project = described_class::Project.
+ find_including_path(project.id)
+
+ expect(File.directory?(migration_project.repository_path)).to eq(true)
+ end
+ end
+
+ describe '#repository' do
+ it 'returns a Rugged::Repository' do
+ migration_project = described_class::Project.
+ find_including_path(project.id)
+
+ expect(migration_project.repository).
+ to be_an_instance_of(Rugged::Repository)
+ end
+ end
+ end
+
+ describe '#up', :redis do
+ let(:migration) { described_class.new }
+
+ def job_count
+ Sidekiq.redis { |r| r.llen('queue:process_commit') }
+ end
+
+ before do
+ Sidekiq.redis do |redis|
+ job = JSON.dump(args: [project.id, user.id, commit.oid])
+ redis.lpush('queue:process_commit', job)
+ end
+ end
+
+ it 'skips jobs using a project that no longer exists' do
+ allow(described_class::Project).to receive(:find_including_path).
+ with(project.id).
+ and_return(nil)
+
+ migration.up
+
+ expect(job_count).to eq(0)
+ end
+
+ it 'skips jobs using commits that no longer exist' do
+ allow_any_instance_of(Rugged::Repository).to receive(:lookup).
+ with(commit.oid).
+ and_raise(Rugged::OdbError)
+
+ migration.up
+
+ expect(job_count).to eq(0)
+ end
+
+ it 'inserts migrated jobs back into the queue' do
+ migration.up
+
+ expect(job_count).to eq(1)
+ end
+
+ context 'a migrated job' do
+ let(:job) do
+ migration.up
+
+ JSON.load(Sidekiq.redis { |r| r.lpop('queue:process_commit') })
+ end
+
+ let(:commit_hash) do
+ job['args'][2]
+ end
+
+ it 'includes the project ID' do
+ expect(job['args'][0]).to eq(project.id)
+ end
+
+ it 'includes the user ID' do
+ expect(job['args'][1]).to eq(user.id)
+ end
+
+ it 'includes the commit ID' do
+ expect(commit_hash['id']).to eq(commit.oid)
+ end
+
+ it 'includes the commit message' do
+ expect(commit_hash['message']).to eq(commit.message)
+ end
+
+ it 'includes the parent IDs' do
+ expect(commit_hash['parent_ids']).to eq(commit.parent_ids)
+ end
+
+ it 'includes the author date' do
+ expect(commit_hash['authored_date']).to eq(commit.author[:time].to_s)
+ end
+
+ it 'includes the author name' do
+ expect(commit_hash['author_name']).to eq(commit.author[:name])
+ end
+
+ it 'includes the author Email' do
+ expect(commit_hash['author_email']).to eq(commit.author[:email])
+ end
+
+ it 'includes the commit date' do
+ expect(commit_hash['committed_date']).to eq(commit.committer[:time].to_s)
+ end
+
+ it 'includes the committer name' do
+ expect(commit_hash['committer_name']).to eq(commit.committer[:name])
+ end
+
+ it 'includes the committer Email' do
+ expect(commit_hash['committer_email']).to eq(commit.committer[:email])
+ end
+ end
+ end
+
+ describe '#down', :redis do
+ let(:migration) { described_class.new }
+
+ def job_count
+ Sidekiq.redis { |r| r.llen('queue:process_commit') }
+ end
+
+ before do
+ Sidekiq.redis do |redis|
+ job = JSON.dump(args: [project.id, user.id, commit.oid])
+ redis.lpush('queue:process_commit', job)
+
+ migration.up
+ end
+ end
+
+ it 'pushes migrated jobs back into the queue' do
+ migration.down
+
+ expect(job_count).to eq(1)
+ end
+
+ context 'a migrated job' do
+ let(:job) do
+ migration.down
+
+ JSON.load(Sidekiq.redis { |r| r.lpop('queue:process_commit') })
+ end
+
+ it 'includes the project ID' do
+ expect(job['args'][0]).to eq(project.id)
+ end
+
+ it 'includes the user ID' do
+ expect(job['args'][1]).to eq(user.id)
+ end
+
+ it 'includes the commit SHA' do
+ expect(job['args'][2]).to eq(commit.oid)
+ end
+ end
+ end
+end
diff --git a/spec/models/commit_spec.rb b/spec/models/commit_spec.rb
index e3bb3482d67..7194c20d3bf 100644
--- a/spec/models/commit_spec.rb
+++ b/spec/models/commit_spec.rb
@@ -302,4 +302,21 @@ eos
expect(commit.uri_type('this/path/doesnt/exist')).to be_nil
end
end
+
+ describe '.from_hash' do
+ let(:new_commit) { described_class.from_hash(commit.to_hash, project) }
+
+ it 'returns a Commit' do
+ expect(new_commit).to be_an_instance_of(described_class)
+ end
+
+ it 'wraps a Gitlab::Git::Commit' do
+ expect(new_commit.raw).to be_an_instance_of(Gitlab::Git::Commit)
+ end
+
+ it 'stores the correct commit fields' do
+ expect(new_commit.id).to eq(commit.id)
+ expect(new_commit.message).to eq(commit.message)
+ end
+ end
end
diff --git a/spec/requests/projects/cycle_analytics_events_spec.rb b/spec/requests/projects/cycle_analytics_events_spec.rb
index 5c90fd9bad9..f5e0fdcda2d 100644
--- a/spec/requests/projects/cycle_analytics_events_spec.rb
+++ b/spec/requests/projects/cycle_analytics_events_spec.rb
@@ -135,6 +135,6 @@ describe 'cycle analytics events' do
merge_merge_requests_closing_issue(issue)
- ProcessCommitWorker.new.perform(project.id, user.id, mr.commits.last.sha)
+ ProcessCommitWorker.new.perform(project.id, user.id, mr.commits.last.to_hash)
end
end
diff --git a/spec/services/git_push_service_spec.rb b/spec/services/git_push_service_spec.rb
index 43dff2da5c0..9e3a0f1c4c9 100644
--- a/spec/services/git_push_service_spec.rb
+++ b/spec/services/git_push_service_spec.rb
@@ -263,7 +263,7 @@ describe GitPushService, services: true do
author_email: commit_author.email
)
- allow_any_instance_of(ProcessCommitWorker).to receive(:find_commit).
+ allow_any_instance_of(ProcessCommitWorker).to receive(:build_commit).
and_return(commit)
allow(project.repository).to receive(:commits_between).and_return([commit])
@@ -321,7 +321,7 @@ describe GitPushService, services: true do
committed_date: commit_time
)
- allow_any_instance_of(ProcessCommitWorker).to receive(:find_commit).
+ allow_any_instance_of(ProcessCommitWorker).to receive(:build_commit).
and_return(commit)
allow(project.repository).to receive(:commits_between).and_return([commit])
@@ -360,7 +360,7 @@ describe GitPushService, services: true do
allow(project.repository).to receive(:commits_between).
and_return([closing_commit])
- allow_any_instance_of(ProcessCommitWorker).to receive(:find_commit).
+ allow_any_instance_of(ProcessCommitWorker).to receive(:build_commit).
and_return(closing_commit)
project.team << [commit_author, :master]
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index bead1a006d1..243200486f8 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -55,8 +55,12 @@ RSpec.configure do |config|
config.around(:each, :redis) do |example|
Gitlab::Redis.with(&:flushall)
+ Sidekiq.redis(&:flushall)
+
example.run
+
Gitlab::Redis.with(&:flushall)
+ Sidekiq.redis(&:flushall)
end
end
diff --git a/spec/workers/process_commit_worker_spec.rb b/spec/workers/process_commit_worker_spec.rb
index 3e4fee42240..75c7fc1efd2 100644
--- a/spec/workers/process_commit_worker_spec.rb
+++ b/spec/workers/process_commit_worker_spec.rb
@@ -11,31 +11,25 @@ describe ProcessCommitWorker do
it 'does not process the commit when the project does not exist' do
expect(worker).not_to receive(:close_issues)
- worker.perform(-1, user.id, commit.id)
+ worker.perform(-1, user.id, commit.to_hash)
end
it 'does not process the commit when the user does not exist' do
expect(worker).not_to receive(:close_issues)
- worker.perform(project.id, -1, commit.id)
- end
-
- it 'does not process the commit when the commit no longer exists' do
- expect(worker).not_to receive(:close_issues)
-
- worker.perform(project.id, user.id, 'this-should-does-not-exist')
+ worker.perform(project.id, -1, commit.to_hash)
end
it 'processes the commit message' do
expect(worker).to receive(:process_commit_message).and_call_original
- worker.perform(project.id, user.id, commit.id)
+ worker.perform(project.id, user.id, commit.to_hash)
end
it 'updates the issue metrics' do
expect(worker).to receive(:update_issue_metrics).and_call_original
- worker.perform(project.id, user.id, commit.id)
+ worker.perform(project.id, user.id, commit.to_hash)
end
end
@@ -106,4 +100,19 @@ describe ProcessCommitWorker do
expect(metric.first_mentioned_in_commit_at).to eq(commit.committed_date)
end
end
+
+ describe '#build_commit' do
+ it 'returns a Commit' do
+ commit = worker.build_commit(project, id: '123')
+
+ expect(commit).to be_an_instance_of(Commit)
+ end
+
+ it 'parses date strings into Time instances' do
+ commit = worker.
+ build_commit(project, id: '123', authored_date: Time.now.to_s)
+
+ expect(commit.authored_date).to be_an_instance_of(Time)
+ end
+ end
end